diff --git a/.beads/.local_version b/.beads/.local_version index 787ffc30..8298bb08 100644 --- a/.beads/.local_version +++ b/.beads/.local_version @@ -1 +1 @@ -0.42.0 +0.43.0 diff --git a/.beads/issues.jsonl b/.beads/issues.jsonl index d9afdfe9..bbc2b8ad 100644 --- a/.beads/issues.jsonl +++ b/.beads/issues.jsonl @@ -2,8 +2,10 @@ {"id":"node-01y","title":"Fix executeNativeTransaction type errors (2 errors)","description":"src/libs/blockchain/routines/executeNativeTransaction.ts has 2 type errors:\n\n1. Line 43: Expected 0 arguments, but got 1\n2. Line 45: Expected 0 arguments, but got 1\n\nFunction being called with arguments it doesn't accept.","notes":"Fixed properly with runtime type check like validateTransaction.ts does. Handles both string (SDK type) and Buffer (runtime possibility) by checking typeof and using forgeToHex for conversion when needed.","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-17T13:19:11.517890581+01:00","updated_at":"2025-12-17T13:35:29.594175959+01:00","closed_at":"2025-12-17T13:34:09.752017177+01:00","close_reason":"Fixed 2 errors. Root cause: SDK types define from/to as string, but code called .toString(\"hex\") as if they were Buffers. Removed redundant conversion.","dependencies":[{"issue_id":"node-01y","depends_on_id":"node-tsaudit","type":"parent-child","created_at":"2025-12-17T13:19:34.843754687+01:00","created_by":"daemon","metadata":"{}"}]} {"id":"node-0eg","title":"Replace appendFile with persistent WriteStreams for log files","description":"Replace fs.promises.appendFile calls with persistent fs.createWriteStream for better performance while preserving category files.\n\nCurrent problem: Each log entry triggers 3 separate appendFile calls (all.log, level.log, category.log), creating I/O contention.\n\nSolution: Use persistent write streams with Node/Bun's built-in buffering:\n\n```typescript\nprivate fileStreams: Map\u003cstring, fs.WriteStream\u003e = new Map()\n\nprivate getOrCreateStream(filename: string): fs.WriteStream {\n if (!this.fileStreams.has(filename)) {\n const filepath = path.join(this.config.logsDir, filename)\n const stream = fs.createWriteStream(filepath, { flags: 'a' })\n this.fileStreams.set(filename, stream)\n }\n return this.fileStreams.get(filename)!\n}\n\nprivate appendToFile(filename: string, content: string): void {\n const stream = this.getOrCreateStream(filename)\n stream.write(content) // Non-blocking, kernel handles buffering\n}\n```\n\nBenefits:\n- Non-blocking writes (kernel-level buffering)\n- All category files preserved\n- Reuses existing unused `fileHandles` property\n- Bun fully compatible with fs.createWriteStream\n\nNote: Need to handle stream cleanup in closeFileHandles() and on rotation.","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-16T12:41:47.743367+01:00","updated_at":"2025-12-16T13:13:55.713387+01:00","closed_at":"2025-12-16T13:13:55.713387+01:00","close_reason":"Implemented persistent WriteStreams for log files. Added getOrCreateStream() method to lazily create and cache fs.WriteStream instances. appendToFile() now uses stream.write() instead of fs.promises.appendFile(). Streams are properly closed during rotation and cleanup. Benefits: non-blocking writes with kernel-level buffering, reduced file handle churn.","labels":["logging","performance"]} {"id":"node-1ao","title":"TypeScript Type Errors - Needs Investigation","description":"Type errors that require investigation and understanding of business logic before fixing. May involve SDK updates or architectural decisions.","status":"closed","priority":2,"issue_type":"epic","created_at":"2025-12-16T16:34:00.220919038+01:00","updated_at":"2025-12-16T17:02:40.832471347+01:00","closed_at":"2025-12-16T17:02:40.832471347+01:00"} +{"id":"node-1l9","title":"Create portAllocator.ts - port pool management","description":"Port pool management module with: initPortPool(), allocatePort(), releasePort(port), isPortAvailable(port). Sequential 55000→57000, then recycle freed ports.","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-03T16:47:18.903865055+01:00","updated_at":"2026-01-03T16:49:38.173961798+01:00","closed_at":"2026-01-03T16:49:38.173961798+01:00","close_reason":"Created portAllocator.ts with initPortPool, allocatePort, releasePort, isPortAvailable functions","dependencies":[{"issue_id":"node-1l9","depends_on_id":"node-y3o","type":"blocks","created_at":"2026-01-03T16:47:18.911791628+01:00","created_by":"tcsenpai"}]} {"id":"node-1q8","title":"Phase 1: Categorized Logger Utility","description":"Create a new categorized Logger utility that serves as a drop-in replacement for the current logger. Must support categories and be TUI-ready.","status":"closed","priority":1,"issue_type":"feature","created_at":"2025-12-04T15:45:22.238751684+01:00","updated_at":"2025-12-04T15:57:01.3507118+01:00","closed_at":"2025-12-04T15:57:01.3507118+01:00","dependencies":[{"issue_id":"node-1q8","depends_on_id":"node-wrd","type":"parent-child","created_at":"2025-12-04T15:46:41.663898616+01:00","created_by":"daemon","metadata":"{}"}]} {"id":"node-1tr","title":"OmniProtocol handler typing fixes (OmniHandler\u003cBuffer\u003e)","description":"Fixed OmniHandler generic type parameter across all handlers and registry:\n- Changed all handlers to use OmniHandler\u003cBuffer\u003e instead of OmniHandler\n- Updated HandlerDescriptor interface to accept OmniHandler\u003cBuffer\u003e\n- Updated createHttpFallbackHandler return type\n- Fixed encodeTransactionResponse → encodeTransactionEnvelope in sync.ts\n- Fixed Datasource.getInstance() usage in gcr.ts\n\nRemaining issues in transaction.ts need separate fix (default export, type mismatches).","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-16T16:57:55.948145986+01:00","updated_at":"2025-12-16T16:58:02.55714785+01:00","closed_at":"2025-12-16T16:58:02.55714785+01:00","labels":["omniprotocol","typescript"]} +{"id":"node-2bq","title":"Create proxyManager.ts - proxy lifecycle management","description":"Main proxy lifecycle: ensureWstcp(), extractDomainAndPort(), getPublicUrl(), spawnProxy(), cleanupStaleProxies(), requestProxy(), killProxy(). 30s idle timeout with stdout activity monitoring.","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-03T16:47:19.111913563+01:00","updated_at":"2026-01-03T16:50:11.028889862+01:00","closed_at":"2026-01-03T16:50:11.028889862+01:00","close_reason":"Created proxyManager.ts with full proxy lifecycle management","dependencies":[{"issue_id":"node-2bq","depends_on_id":"node-y3o","type":"blocks","created_at":"2026-01-03T16:47:19.113060332+01:00","created_by":"tcsenpai"},{"issue_id":"node-2bq","depends_on_id":"node-1l9","type":"blocks","created_at":"2026-01-03T16:47:30.308309669+01:00","created_by":"tcsenpai"},{"issue_id":"node-2bq","depends_on_id":"node-vt5","type":"blocks","created_at":"2026-01-03T16:47:30.386692841+01:00","created_by":"tcsenpai"}]} {"id":"node-2e8","title":"Fix Utils and Test file type errors (5 errors)","description":"Various utility and test files have type errors:\n\n**showPubkey.ts** (1): Line 91: Uint8Array | PublicKey not assignable to Uint8Array\n\n**transactionTester.ts** (3):\n- Lines 46,47: BinaryBuffer not assignable to string\n- Line 53: Expected 1 arguments, but got 2\n\n**testingEnvironment.ts** (1): Line 9: Cannot find module 'src/libs/blockchain/mempool'","status":"closed","priority":3,"issue_type":"task","created_at":"2025-12-17T13:19:11.645074146+01:00","updated_at":"2025-12-17T14:00:51.604461619+01:00","closed_at":"2025-12-17T14:00:51.604461619+01:00","close_reason":"Excluded src/tests from tsconfig.json type-checking - test files don't need strict type validation","labels":["test"],"dependencies":[{"issue_id":"node-2e8","depends_on_id":"node-tsaudit","type":"parent-child","created_at":"2025-12-17T13:19:34.96711142+01:00","created_by":"daemon","metadata":"{}"}]} {"id":"node-2ja","title":"Fix TLSConnection class inheritance - private to protected","description":"TLSConnection incorrectly extends PeerConnection. Need to change private properties (setState, socket, peerIdentity) to protected in PeerConnection base class. 8 errors in TLSConnection.ts","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-16T16:34:21.855164384+01:00","updated_at":"2025-12-16T16:35:56.755314541+01:00","closed_at":"2025-12-16T16:35:56.755314541+01:00","dependencies":[{"issue_id":"node-2ja","depends_on_id":"node-718","type":"parent-child","created_at":"2025-12-16T16:34:21.887280095+01:00","created_by":"daemon","metadata":"{}"}]} {"id":"node-2pd","title":"Phase 1: IPFS Foundation - Docker + Skeleton","description":"Set up Kubo Docker container and create basic IPFSManager skeleton.\n\n## Tasks\n1. Add Kubo service to docker-compose.yml\n2. Create src/features/ipfs/ directory structure\n3. Implement IPFSManager class skeleton\n4. Add health check and lifecycle management\n5. Test container startup with Demos node","design":"### Docker Compose Addition\n```yaml\nipfs:\n image: ipfs/kubo:v0.26.0\n container_name: demos-ipfs\n environment:\n - IPFS_PROFILE=server\n volumes:\n - ipfs-data:/data/ipfs\n networks:\n - demos-network\n healthcheck:\n test: [\"CMD-SHELL\", \"ipfs id || exit 1\"]\n interval: 30s\n timeout: 10s\n retries: 3\n restart: unless-stopped\n```\n\n### Directory Structure\n```\nsrc/features/ipfs/\n├── index.ts\n├── IPFSManager.ts\n├── types.ts\n└── errors.ts\n```\n\n### IPFSManager Skeleton\n- constructor(apiUrl)\n- healthCheck(): Promise\u003cboolean\u003e\n- getNodeId(): Promise\u003cstring\u003e\n- Private apiUrl configuration","acceptance_criteria":"- [ ] Kubo container defined in docker-compose.yml\n- [ ] Container starts successfully with docker-compose up\n- [ ] IPFSManager class exists with health check\n- [ ] Health check returns true when container is running\n- [ ] getNodeId() returns valid peer ID","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-24T14:35:56.863177+01:00","updated_at":"2025-12-24T15:13:18.231786+01:00","closed_at":"2025-12-24T15:13:18.231786+01:00","close_reason":"Completed Phase 1: IPFS auto-start integration with PostgreSQL pattern, IPFSManager, docker-compose, helper scripts, and README","dependencies":[{"issue_id":"node-2pd","depends_on_id":"node-qz1","type":"parent-child","created_at":"2025-12-24T14:36:10.251508+01:00","created_by":"daemon"}]} @@ -12,33 +14,53 @@ {"id":"node-3ju","title":"Remove pretty-print JSON.stringify from hot paths","description":"Replace JSON.stringify(obj, null, 2) calls with either:\n- JSON.stringify(obj) without formatting\n- Concise field logging (e.g., log key counts/identifiers instead of full objects)\n\nFound 56 occurrences across codebase, many in consensus and peer handling hot paths.\n\nExample fix:\n- Before: log.debug(`Shard: ${JSON.stringify(manager.shard, null, 2)}`)\n- After: log.debug(`Shard: ${manager.shard.members.length} members, secretary: ${manager.shard.secretaryKey.slice(0,8)}...`)","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-16T12:40:13.982267+01:00","updated_at":"2025-12-16T13:00:18.074387+01:00","closed_at":"2025-12-16T13:00:18.074387+01:00","close_reason":"Removed pretty-print JSON.stringify(obj, null, 2) from all hot paths across 20+ files. Consensus, network, peer, sync, and utility modules all now use compact JSON.stringify(obj). ~10x faster serialization in logging paths.","labels":["logging","performance"]} {"id":"node-45p","title":"node-tscheck","description":"Run bun run type-check-ts, create an appropriate epic and add issues about the errors you find categorized","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-16T17:01:33.543856281+01:00","updated_at":"2025-12-17T13:19:42.512775764+01:00","closed_at":"2025-12-17T13:19:42.512775764+01:00","close_reason":"Completed. Created epic node-tsaudit with 9 categorized subtasks covering all 38 type errors."} {"id":"node-4w6","title":"Phase 1: Migrate hottest path console.log calls","description":"Convert console.log calls in the most frequently executed code paths:\n\n- src/libs/consensus/v2/PoRBFT.ts (lines 245, 332-333, 527, 533)\n- src/libs/peer/PeerManager.ts (lines 52-371)\n- src/libs/blockchain/transaction.ts (lines 115-490)\n- src/libs/blockchain/routines/validateTransaction.ts (lines 38-288)\n- src/libs/blockchain/routines/Sync.ts (lines 283, 368)\n\nPattern:\n```typescript\n// Before\nconsole.log(\"[PEER] message\", data)\n\n// After\nimport { getLogger } from \"@/utilities/tui/CategorizedLogger\"\nconst log = getLogger()\nlog.debug(\"PEER\", `message ${data}`)\n```\n\nEstimated: ~50-80 calls","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-16T13:24:07.305928+01:00","updated_at":"2025-12-16T13:47:57.060488+01:00","closed_at":"2025-12-16T13:47:57.060488+01:00","close_reason":"Phase 1 complete - migrated 34+ console.log calls in 5 hot path files","labels":["logging","performance"],"dependencies":[{"issue_id":"node-4w6","depends_on_id":"node-7d8","type":"parent-child","created_at":"2025-12-16T13:24:22.786925+01:00","created_by":"daemon","metadata":"{}"}]} +{"id":"node-5bm","title":"Wrap switch case var declarations in blocks","description":"CRITICAL: In handleNativeOperations.ts, var declarations in switch cases can leak. Affected lines: 55, 70-77, 102, 107, 124, 126, 131-138, 144-157. Wrap each case in blocks { }.","status":"closed","priority":0,"issue_type":"bug","created_at":"2026-01-04T18:19:40.341227726+01:00","created_by":"tcsenpai","updated_at":"2026-01-04T18:21:52.678800397+01:00","closed_at":"2026-01-04T18:21:52.678800397+01:00","close_reason":"Wrapped all switch cases in blocks and changed var to const","dependencies":[{"issue_id":"node-5bm","depends_on_id":"node-e6o","type":"parent-child","created_at":"2026-01-04T18:19:53.057748336+01:00","created_by":"tcsenpai"}]} {"id":"node-5l8","title":"Phase 5: Tokenomics - Pay to Pin, Earn to Host","description":"Implement economic model for IPFS operations.\n\n## Pricing Model\n\n### Regular Accounts\n- **Minimum**: 1 DEM\n- **Formula**: `max(1, ceil(fileSizeBytes / (100 * 1024 * 1024)))` DEM\n- **Rate**: 1 DEM per 100MB chunk\n\n| Size | Cost |\n|------|------|\n| 0-100MB | 1 DEM |\n| 100-200MB | 2 DEM |\n| 200-300MB | 3 DEM |\n| ... | +1 DEM per 100MB |\n\n### Genesis Accounts\n- **Free Tier**: 1 GB\n- **After Free**: 1 DEM per 1GB\n- **Detection**: Already flagged in genesis block\n\n## Fee Distribution\n\n| Phase | RPC Host | Treasury | Consensus Shard |\n|-------|----------|----------|-----------------|\n| **MVP** | 100% | 0% | 0% |\n| **Future** | 70% | 20% | 10% |\n\n## Storage Rules\n- **Duration**: Permanent (until user unpins)\n- **Unpin**: Allowed, no refund\n- **Replication**: Single node (user choice for multi-node later)\n\n## Transaction Flow\n1. User submits IPFS transaction with DEM fee\n2. Pre-consensus validation: Check balance \u003e= calculated fee\n3. Reject if insufficient funds (before consensus)\n4. On consensus: Deduct fee, execute IPFS op, credit host\n5. On failure: Revert fee deduction\n\n## Tasks\n1. Create ipfsTokenomics.ts with pricing calculations\n2. Add genesis account detection helper\n3. Add free allocation tracking to account IPFS state\n4. Implement balance check in transaction validation\n5. Implement fee deduction in ipfsOperations.ts\n6. Credit hosting RPC with 100% of fee (MVP)\n7. Add configuration for pricing constants\n8. Test pricing calculations\n\n## Future TODOs (Not This Phase)\n- [ ] Fee distribution split (70/20/10)\n- [ ] Time-based renewal option\n- [ ] Multi-node replication pricing\n- [ ] Node operator free allocations\n- [ ] DEM price calculator integration\n- [ ] Custom free allocation categories","design":"### Pricing Configuration\n```typescript\ninterface IPFSPricingConfig {\n // Regular accounts\n regularMinCost: bigint; // 1 DEM\n regularBytesPerUnit: number; // 100 * 1024 * 1024 (100MB)\n regularCostPerUnit: bigint; // 1 DEM\n \n // Genesis accounts \n genesisFreeBytes: number; // 1 * 1024 * 1024 * 1024 (1GB)\n genesisBytesPerUnit: number; // 1 * 1024 * 1024 * 1024 (1GB)\n genesisCostPerUnit: bigint; // 1 DEM\n \n // Fee distribution (MVP: 100% to host)\n hostShare: number; // 100 (percentage)\n treasuryShare: number; // 0 (percentage)\n consensusShare: number; // 0 (percentage)\n}\n```\n\n### Pricing Functions\n```typescript\nfunction calculatePinCost(\n fileSizeBytes: number, \n isGenesisAccount: boolean,\n usedFreeBytes: number\n): bigint {\n if (isGenesisAccount) {\n const freeRemaining = Math.max(0, config.genesisFreeBytes - usedFreeBytes);\n if (fileSizeBytes \u003c= freeRemaining) return 0n;\n const chargeableBytes = fileSizeBytes - freeRemaining;\n return BigInt(Math.ceil(chargeableBytes / config.genesisBytesPerUnit));\n }\n \n // Regular account\n const units = Math.ceil(fileSizeBytes / config.regularBytesPerUnit);\n return BigInt(Math.max(1, units)) * config.regularCostPerUnit;\n}\n```\n\n### Account State Extension\n```typescript\ninterface AccountIPFSState {\n // Existing fields...\n pins: PinnedContent[];\n totalPinnedBytes: number;\n \n // New tokenomics fields\n freeAllocationBytes: number; // Genesis: 1GB, Regular: 0\n usedFreeBytes: number; // Track free tier usage\n totalPaidDEM: bigint; // Lifetime paid\n earnedRewardsDEM: bigint; // Earned from hosting (future)\n}\n```\n\n### Fee Flow (MVP)\n```\nUser pays X DEM → ipfsOperations handler\n → deductBalance(user, X)\n → creditBalance(hostingRPC, X) // 100% MVP\n → execute IPFS operation\n → update account IPFS state\n```\n\n### Genesis Detection\n```typescript\n// Genesis accounts are already in genesis block\n// Use existing genesis address list or account flag\nfunction isGenesisAccount(address: string): boolean {\n return genesisAddresses.includes(address);\n // OR check account.isGenesis flag if exists\n}\n```","acceptance_criteria":"- [ ] Pricing correctly calculates 1 DEM per 100MB for regular accounts\n- [ ] Genesis accounts get 1GB free, then 1 DEM per GB\n- [ ] Transaction rejected pre-consensus if insufficient DEM balance\n- [ ] Fee deducted from user on successful pin\n- [ ] Fee credited to hosting RPC (100% for MVP)\n- [ ] Account IPFS state tracks free tier usage\n- [ ] Unpin does not refund DEM\n- [ ] Configuration allows future pricing adjustments","notes":"Phase 5 implementation complete:\n- ipfsTokenomics.ts: Pricing calculations (1 DEM/100MB regular, 1GB free + 1 DEM/GB genesis)\n- ipfsOperations.ts: Fee deduction \u0026 RPC credit integration\n- IPFSTypes.ts: Extended with tokenomics fields\n- GCRIPFSRoutines.ts: Updated for new IPFS state fields\n- All lint and type-check passed\n- Committed: 43bc5580","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-24T14:42:38.388881+01:00","updated_at":"2025-12-24T19:29:16.517786+01:00","closed_at":"2025-12-24T19:29:16.517786+01:00","close_reason":"Phase 5 IPFS Tokenomics implemented. Fee system integrates with ipfsAdd/ipfsPin operations. Genesis detection via content.extra.genesisData. Ready for Phase 6 SDK integration.","dependencies":[{"issue_id":"node-5l8","depends_on_id":"node-qz1","type":"parent-child","created_at":"2025-12-24T14:43:20.320116+01:00","created_by":"daemon"},{"issue_id":"node-5l8","depends_on_id":"node-xhh","type":"blocks","created_at":"2025-12-24T14:44:48.45804+01:00","created_by":"daemon"}]} {"id":"node-5rm","title":"Create peerlist generation script","description":"Create scripts/generate-peerlist.sh that:\n- Reads public keys from identity files\n- Generates demos_peerlist.json with Docker service names\n- Maps each pubkey to http://node-{N}:{PORT}","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-25T12:39:43.784375+01:00","updated_at":"2025-12-25T12:49:32.916473+01:00","closed_at":"2025-12-25T12:49:32.916473+01:00","close_reason":"Created generate-peerlist.sh that reads pubkeys and generates demos_peerlist.json with Docker service names","dependencies":[{"issue_id":"node-5rm","depends_on_id":"node-00o","type":"parent-child","created_at":"2025-12-25T12:40:12.027277+01:00","created_by":"daemon"},{"issue_id":"node-5rm","depends_on_id":"node-d4e","type":"blocks","created_at":"2025-12-25T12:40:34.127241+01:00","created_by":"daemon"}]} {"id":"node-65n","title":"Investigate logger signature issues (TS2345) - ~50 errors","description":"Many log.info/debug/warning/error calls pass wrong argument types. Pattern: passing unknown/number/string/Error where boolean is expected. Need to understand intended logger API - should second param accept data objects or just isDebug boolean?","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-16T16:34:47.830760343+01:00","updated_at":"2025-12-16T16:43:07.794967183+01:00","closed_at":"2025-12-16T16:43:07.794967183+01:00","dependencies":[{"issue_id":"node-65n","depends_on_id":"node-1ao","type":"parent-child","created_at":"2025-12-16T16:34:47.834870178+01:00","created_by":"daemon","metadata":"{}"}]} {"id":"node-66u","title":"Phase 2: TUI Framework Setup","description":"Set up the TUI framework using terminal-kit (already installed). Create the basic layout structure with panels.","status":"closed","priority":1,"issue_type":"feature","created_at":"2025-12-04T15:45:22.405530697+01:00","updated_at":"2025-12-04T16:03:17.66943608+01:00","closed_at":"2025-12-04T16:03:17.66943608+01:00","dependencies":[{"issue_id":"node-66u","depends_on_id":"node-1q8","type":"blocks","created_at":"2025-12-04T15:46:29.51715706+01:00","created_by":"daemon","metadata":"{}"},{"issue_id":"node-66u","depends_on_id":"node-wrd","type":"parent-child","created_at":"2025-12-04T15:46:41.730819864+01:00","created_by":"daemon","metadata":"{}"}]} {"id":"node-67f","title":"Phase 5: Migrate Existing Logging","description":"Replace all existing console.log, term.*, and Logger calls with the new categorized logger throughout the codebase.","notes":"Core migration complete:\n- Replaced src/utilities/logger.ts with re-export of LegacyLoggerAdapter\n- All existing log.* calls now route through CategorizedLogger\n- Migrated console.log and term.* calls in index.ts (main entry point)\n- Migrated mainLoop.ts\n\nBug fixes from tester feedback (2025-12-08):\n- Fixed scrolling: autoScroll now disables when scrolling up, re-enables when scrolling to bottom\n- Removed non-functional S/P/R controls (start/pause/restart don't apply to blockchain nodes)\n- Updated footer to show autoScroll status (ON/OFF indicator)\n- Updated README with TUI documentation and --no-tui flag for developers\n\nRemaining legacy calls (lower priority):\n- ~129 console.log calls in 20 files (many in tests/client/cli)\n- ~56 term.* calls in 13 files (excluding TUIManager which needs them)\n\nThe core logging infrastructure is now TUI-ready. Legacy calls will still work but bypass TUI display.","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-04T15:45:22.92693117+01:00","updated_at":"2025-12-08T14:56:11.180861659+01:00","closed_at":"2025-12-08T14:56:11.180865857+01:00","dependencies":[{"issue_id":"node-67f","depends_on_id":"node-1q8","type":"blocks","created_at":"2025-12-04T15:46:29.724713609+01:00","created_by":"daemon","metadata":"{}"},{"issue_id":"node-67f","depends_on_id":"node-s48","type":"blocks","created_at":"2025-12-04T15:46:29.777335113+01:00","created_by":"daemon","metadata":"{}"},{"issue_id":"node-67f","depends_on_id":"node-wrd","type":"parent-child","created_at":"2025-12-04T15:46:41.885331922+01:00","created_by":"daemon","metadata":"{}"}]} +{"id":"node-6ds","title":"Remove duplicate @octokit/core in package.json","description":"CRITICAL: Biome detected duplicate @octokit/core key at line 67. Remove the duplicate entry.","status":"closed","priority":0,"issue_type":"bug","created_at":"2026-01-04T18:19:40.245188084+01:00","created_by":"tcsenpai","updated_at":"2026-01-04T18:20:35.001178199+01:00","closed_at":"2026-01-04T18:20:35.001178199+01:00","close_reason":"Removed duplicate @octokit/core entry at line 67-68","dependencies":[{"issue_id":"node-6ds","depends_on_id":"node-e6o","type":"parent-child","created_at":"2026-01-04T18:19:52.974666778+01:00","created_by":"tcsenpai"}]} {"id":"node-6p0","title":"Phase 2: IPFS Core Operations - add/get/pin","description":"Implement core IPFS operations and expose via RPC endpoints.\n\n## Tasks\n1. Implement add() - add content, return CID\n2. Implement get() - retrieve content by CID\n3. Implement pin() - pin content for persistence\n4. Implement unpin() - remove pin\n5. Implement listPins() - list all pinned CIDs\n6. Create RPC endpoints for all operations\n7. Add error handling and validation","design":"### IPFSManager Methods\n```typescript\nasync add(content: Buffer | Uint8Array, filename?: string): Promise\u003cstring\u003e\nasync get(cid: string): Promise\u003cBuffer\u003e\nasync pin(cid: string): Promise\u003cvoid\u003e\nasync unpin(cid: string): Promise\u003cvoid\u003e\nasync listPins(): Promise\u003cstring[]\u003e\n```\n\n### RPC Endpoints\n- POST /ipfs/add (multipart form data) → { cid }\n- GET /ipfs/:cid → raw content\n- POST /ipfs/pin { cid } → { cid }\n- DELETE /ipfs/pin/:cid → { success }\n- GET /ipfs/pins → { pins: string[] }\n- GET /ipfs/status → { healthy, peerId, peers }\n\n### Kubo API Calls\n- POST /api/v0/add (multipart)\n- POST /api/v0/cat?arg={cid}\n- POST /api/v0/pin/add?arg={cid}\n- POST /api/v0/pin/rm?arg={cid}\n- POST /api/v0/pin/ls","acceptance_criteria":"- [ ] add() returns valid CID for content\n- [ ] get() retrieves exact content by CID\n- [ ] pin() successfully pins content\n- [ ] unpin() removes pin\n- [ ] listPins() returns array of pinned CIDs\n- [ ] All RPC endpoints respond correctly\n- [ ] Error handling for invalid CIDs\n- [ ] Error handling for missing content","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-24T14:35:57.736369+01:00","updated_at":"2025-12-24T17:10:07.487626+01:00","closed_at":"2025-12-24T17:10:07.487626+01:00","close_reason":"Completed: Implemented IPFSManager core methods (add/get/pin/unpin/listPins) and RPC endpoints. Commit b7dac5f6.","dependencies":[{"issue_id":"node-6p0","depends_on_id":"node-qz1","type":"parent-child","created_at":"2025-12-24T14:36:10.75642+01:00","created_by":"daemon"},{"issue_id":"node-6p0","depends_on_id":"node-2pd","type":"blocks","created_at":"2025-12-24T14:36:20.954338+01:00","created_by":"daemon"}]} {"id":"node-6qh","title":"Phase 5: IPFS Public Bridge - Gateway Access","description":"Add optional bridge to public IPFS network for content retrieval and publishing.\n\n## Tasks\n1. Configure optional public network connection\n2. Implement gateway fetch for public CIDs\n3. Add publish to public IPFS option\n4. Handle dual-network routing\n5. Security considerations for public exposure","design":"### Public Bridge Options\n```typescript\ninterface IPFSManagerConfig {\n privateOnly: boolean; // Default: true\n publicGateway?: string; // e.g., https://ipfs.io\n publishToPublic?: boolean; // Default: false\n}\n```\n\n### Gateway Methods\n```typescript\nasync fetchFromPublic(cid: string): Promise\u003cBuffer\u003e\nasync publishToPublic(cid: string): Promise\u003cvoid\u003e\nasync isPubliclyAvailable(cid: string): Promise\u003cboolean\u003e\n```\n\n### Routing Logic\n1. Check private network first\n2. If not found and publicGateway configured, try gateway\n3. For publish, optionally announce to public DHT\n\n### Security\n- Public bridge is opt-in only\n- Rate limiting for public fetches\n- No automatic public publishing","acceptance_criteria":"- [ ] Can fetch content from public IPFS gateway\n- [ ] Can optionally publish to public IPFS\n- [ ] Private network remains default\n- [ ] Clear configuration for public access\n- [ ] Rate limiting prevents abuse","status":"closed","priority":3,"issue_type":"task","created_at":"2025-12-24T14:36:00.170018+01:00","updated_at":"2025-12-25T11:23:41.42062+01:00","closed_at":"2025-12-25T11:23:41.42062+01:00","close_reason":"Completed Phase 5 - IPFS Public Bridge implementation with gateway access, publish capability, and rate limiting","dependencies":[{"issue_id":"node-6qh","depends_on_id":"node-qz1","type":"parent-child","created_at":"2025-12-24T14:36:12.364852+01:00","created_by":"daemon"},{"issue_id":"node-6qh","depends_on_id":"node-zmh","type":"blocks","created_at":"2025-12-24T14:36:22.569472+01:00","created_by":"daemon"}]} {"id":"node-718","title":"TypeScript Type Errors - Auto-Fixable (100% Confident)","description":"Type errors that can be automatically fixed with high confidence. These are clear-cut fixes with no ambiguity.","status":"closed","priority":1,"issue_type":"epic","created_at":"2025-12-16T16:34:00.157303937+01:00","updated_at":"2025-12-16T16:39:22.698926494+01:00","closed_at":"2025-12-16T16:39:22.698926494+01:00"} {"id":"node-7d8","title":"Console.log Migration to CategorizedLogger","description":"Migrate all rogue console.log/warn/error calls to use CategorizedLogger for async buffered output. This eliminates blocking I/O in hot paths and ensures consistent logging behavior.\n\nScope: ~400 calls across src/ (excluding ~100 acceptable CLI tools)\n\nSee CONSOLE_LOG_AUDIT.md for detailed file list.","status":"closed","priority":2,"issue_type":"epic","created_at":"2025-12-16T13:23:30.376506+01:00","updated_at":"2025-12-16T17:02:20.522894611+01:00","closed_at":"2025-12-16T15:41:29.390792179+01:00","labels":["logging","migration","performance"]} {"id":"node-93c","title":"Test and validate devnet connectivity","description":"Verify the devnet works:\n- All 4 nodes start successfully\n- Nodes discover each other via peerlist\n- HTTP RPC endpoints respond\n- OmniProtocol connections establish\n- Cross-node operations work","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-25T12:39:50.741593+01:00","updated_at":"2025-12-25T14:06:28.191498+01:00","closed_at":"2025-12-25T14:06:28.191498+01:00","close_reason":"Completed - user verified devnet works: all 4 nodes start, peer discovery works, connections established","dependencies":[{"issue_id":"node-93c","depends_on_id":"node-00o","type":"parent-child","created_at":"2025-12-25T12:40:18.319972+01:00","created_by":"daemon"},{"issue_id":"node-93c","depends_on_id":"node-vuy","type":"blocks","created_at":"2025-12-25T12:40:34.716509+01:00","created_by":"daemon"},{"issue_id":"node-93c","depends_on_id":"node-eqk","type":"blocks","created_at":"2025-12-25T12:40:35.376323+01:00","created_by":"daemon"}]} +{"id":"node-96d","title":"Add payload size validation in MessageFramer","description":"MEDIUM: MessageFramer.ts:171-211 has no max payload size check. Add MAX_PAYLOAD_SIZE (16MB) validation to prevent DoS.","status":"closed","priority":1,"issue_type":"bug","created_at":"2026-01-04T18:19:40.722981457+01:00","created_by":"tcsenpai","updated_at":"2026-01-04T18:23:45.934632552+01:00","closed_at":"2026-01-04T18:23:45.934632552+01:00","close_reason":"Added MAX_PAYLOAD_SIZE constant (16MB) and validation in parseHeader() to prevent DoS","dependencies":[{"issue_id":"node-96d","depends_on_id":"node-e6o","type":"parent-child","created_at":"2026-01-04T18:19:53.335780723+01:00","created_by":"tcsenpai"}]} +{"id":"node-98k","title":"Create SDK_INTEGRATION.md documentation","description":"Document requestTLSNproxy endpoint for SDK integration: request format, response format, error codes, usage examples.","status":"closed","priority":2,"issue_type":"task","created_at":"2026-01-03T16:47:19.324533126+01:00","updated_at":"2026-01-03T16:51:13.593988768+01:00","closed_at":"2026-01-03T16:51:13.593988768+01:00","close_reason":"Created SDK_INTEGRATION.md with full documentation","dependencies":[{"issue_id":"node-98k","depends_on_id":"node-y3o","type":"blocks","created_at":"2026-01-03T16:47:19.32561875+01:00","created_by":"tcsenpai"},{"issue_id":"node-98k","depends_on_id":"node-cwr","type":"blocks","created_at":"2026-01-03T16:47:30.548495387+01:00","created_by":"tcsenpai"}]} {"id":"node-9de","title":"Phase 3: Migrate MEDIUM priority console.log calls","description":"Convert MEDIUM priority console.log calls in modules with occasional execution:\n\nIdentity Module:\n- src/libs/identity/tools/twitter.ts\n- src/libs/identity/tools/discord.ts\n\nAbstraction Module:\n- src/libs/abstraction/index.ts\n- src/libs/abstraction/web2/github.ts\n- src/libs/abstraction/web2/parsers.ts\n\nCrypto Module:\n- src/libs/crypto/cryptography.ts\n- src/libs/crypto/forgeUtils.ts\n- src/libs/crypto/pqc/enigma.ts\n\nEstimated: ~50 calls","status":"closed","priority":2,"issue_type":"task","assignee":"claude","created_at":"2025-12-16T13:24:08.792194+01:00","updated_at":"2025-12-16T17:02:20.524012017+01:00","closed_at":"2025-12-16T15:29:40.754824828+01:00","labels":["logging"],"dependencies":[{"issue_id":"node-9de","depends_on_id":"node-7d8","type":"parent-child","created_at":"2025-12-16T13:24:23.53308+01:00","created_by":"daemon","metadata":"{}"},{"issue_id":"node-9de","depends_on_id":"node-whe","type":"blocks","created_at":"2025-12-16T13:24:24.666482+01:00","created_by":"daemon","metadata":"{}"}]} {"id":"node-9n2","title":"Write devnet README documentation","description":"Complete README.md with:\n- Quick start guide\n- Architecture explanation\n- Configuration options\n- Troubleshooting section\n- Examples of common operations","status":"closed","priority":3,"issue_type":"task","created_at":"2025-12-25T12:39:53.240705+01:00","updated_at":"2025-12-25T12:51:47.658501+01:00","closed_at":"2025-12-25T12:51:47.658501+01:00","close_reason":"README.md completed with full documentation including observability section","dependencies":[{"issue_id":"node-9n2","depends_on_id":"node-00o","type":"parent-child","created_at":"2025-12-25T12:40:20.669782+01:00","created_by":"daemon"},{"issue_id":"node-9n2","depends_on_id":"node-93c","type":"blocks","created_at":"2025-12-25T12:40:35.997446+01:00","created_by":"daemon"}]} {"id":"node-9pb","title":"Phase 6: SDK Integration - sdk.ipfs module (SDK)","description":"Implement sdk.ipfs module in @kynesyslabs/demosdk (../sdks).\n\n⚠️ **SDK ONLY**: All work in ../sdks repository.\nAfter completion, user must manually publish new SDK version.\n\n## Tasks\n1. Create IPFS module structure in SDK\n2. Implement read methods (demosCall wrappers)\n3. Implement transaction builders for writes\n4. Add TypeScript types and interfaces\n5. Write unit tests\n6. Update SDK exports and documentation\n7. Publish new SDK version (USER ACTION)","design":"### SDK Structure (../sdks)\n```\nsrc/\n├── ipfs/\n│ ├── index.ts\n│ ├── types.ts\n│ ├── reads.ts // demosCall wrappers\n│ ├── writes.ts // Transaction builders\n│ └── utils.ts\n```\n\n### Public Interface\n```typescript\nclass IPFSModule {\n // Reads (demosCall - gas free)\n async get(cid: string): Promise\u003cBuffer\u003e\n async pins(address?: string): Promise\u003cPinInfo[]\u003e\n async status(): Promise\u003cIPFSStatus\u003e\n async rewards(address?: string): Promise\u003cbigint\u003e\n\n // Writes (Transactions)\n async add(content: Buffer, opts?: AddOptions): Promise\u003cAddResult\u003e\n async pin(cid: string, opts?: PinOptions): Promise\u003cTxResult\u003e\n async unpin(cid: string): Promise\u003cTxResult\u003e\n async claimRewards(): Promise\u003cTxResult\u003e\n}\n```\n\n### Integration\n- Attach to main SDK instance as sdk.ipfs\n- Follow existing SDK patterns\n- Use shared transaction signing","notes":"This phase is SDK-only. User must publish after completion.","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-24T14:42:39.202179+01:00","updated_at":"2025-12-24T19:43:49.257733+01:00","closed_at":"2025-12-24T19:43:49.257733+01:00","close_reason":"Phase 6 complete: SDK ipfs module created with IPFSOperations class, payload creators, and utilities. Build verified.","dependencies":[{"issue_id":"node-9pb","depends_on_id":"node-qz1","type":"parent-child","created_at":"2025-12-24T14:43:20.843923+01:00","created_by":"daemon"},{"issue_id":"node-9pb","depends_on_id":"node-5l8","type":"blocks","created_at":"2025-12-24T14:44:49.017806+01:00","created_by":"daemon"}]} {"id":"node-9x8","title":"Fix OmniProtocol type errors (11 errors)","description":"OmniProtocol module has 11 type errors across multiple files:\\n\\n**auth/parser.ts** (1): bigint not assignable to number\\n**integration/startup.ts** (1): TLSServer | OmniProtocolServer union issue\\n**protocol/dispatcher.ts** (1): HandlerContext\u003cTPayload\u003e not assignable to HandlerContext\u003cBuffer\u003e\\n**protocol/handlers/transaction.ts** (4): missing default export, unknown→BridgeOperation, BridgePayload missing chain\\n**tls/certificates.ts** (3): Property 'message' on unknown type (catch blocks)\\n**transport/PeerConnection.ts** (1): unknown not assignable to Buffer","notes":"Verified 2025-12-17: Updated from ~40 to 11 errors. Previous description mentioned sync.ts, meta.ts, gcr.ts which are now clean.","status":"closed","priority":2,"issue_type":"task","assignee":"claude","created_at":"2025-12-16T16:34:47.925168022+01:00","updated_at":"2025-12-17T14:09:25.875844789+01:00","closed_at":"2025-12-17T14:09:25.875844789+01:00","close_reason":"Fixed all 11 OmniProtocol errors: certificates.ts catch blocks (3), parser.ts bigint (1), PeerConnection.ts Buffer cast (1), startup.ts return type (1), dispatcher.ts HandlerContext (1), transaction.ts default imports and type casts (4)","dependencies":[{"issue_id":"node-9x8","depends_on_id":"node-1ao","type":"parent-child","created_at":"2025-12-16T16:34:47.926905546+01:00","created_by":"daemon","metadata":"{}"},{"issue_id":"node-9x8","depends_on_id":"node-tsaudit","type":"parent-child","created_at":"2025-12-17T13:19:34.659607906+01:00","created_by":"daemon","metadata":"{}"}]} +{"id":"node-a1j","title":"Add TLSN_REQUEST tx handler - payment + token creation","description":"Add transaction handler for TLSN_REQUEST:\n- Fee: 1 DEM\n- Data: { targetUrl } - extract domain for lock\n- On success: create token via tokenManager\n- Return tokenId in tx result","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-04T10:23:40.74061868+01:00","created_by":"tcsenpai","updated_at":"2026-01-04T10:32:01.916691816+01:00","closed_at":"2026-01-04T10:32:01.916691816+01:00","close_reason":"Implemented tlsn_request native operation handler in handleNativeOperations.ts, burns 1 DEM fee, creates token. Added tlsnotary.getToken and tlsnotary.getTokenStats nodeCall endpoints.","dependencies":[{"issue_id":"node-a1j","depends_on_id":"node-azu","type":"parent-child","created_at":"2026-01-04T10:24:11.559053816+01:00","created_by":"tcsenpai"},{"issue_id":"node-a1j","depends_on_id":"node-f23","type":"blocks","created_at":"2026-01-04T10:24:19.03475961+01:00","created_by":"tcsenpai"}]} +{"id":"node-a3w","title":"Update getCategories() to use ALL_CATEGORIES","description":"LOW: CategorizedLogger.ts:924-937 - getCategories() doesn't include TLSN and CMD. Fix: return [...ALL_CATEGORIES]","status":"closed","priority":2,"issue_type":"bug","created_at":"2026-01-04T18:19:40.594002545+01:00","created_by":"tcsenpai","updated_at":"2026-01-04T18:23:01.456810437+01:00","closed_at":"2026-01-04T18:23:01.456810437+01:00","close_reason":"Updated getCategories() to return [...ALL_CATEGORIES] instead of hardcoded list","dependencies":[{"issue_id":"node-a3w","depends_on_id":"node-e6o","type":"parent-child","created_at":"2026-01-04T18:19:53.242160626+01:00","created_by":"tcsenpai"}]} +{"id":"node-a95","title":"Add payload size validation to encodeMessage()","description":"MessageFramer.ts:274-309 - Senders can encode oversized payloads that receivers reject. Add size check for consistency.","status":"open","priority":1,"issue_type":"bug","created_at":"2026-01-05T15:26:25.368342253+01:00","created_by":"tcsenpai","updated_at":"2026-01-05T15:26:25.368342253+01:00","dependencies":[{"issue_id":"node-a95","depends_on_id":"node-jhq","type":"blocks","created_at":"2026-01-05T15:26:25.369487809+01:00","created_by":"tcsenpai"}]} {"id":"node-a96","title":"Fix FHE test type errors (2 errors)","description":"src/features/fhe/fhe_test.ts has 2 type errors:\n\n1. Line 30: Expected 1-2 arguments, but got 6\n2. Line 45: Expected 1-2 arguments, but got 6\n\nTest file - function signature mismatch with current implementation.","status":"closed","priority":3,"issue_type":"task","created_at":"2025-12-17T13:19:11.440829802+01:00","updated_at":"2025-12-17T14:12:45.448191017+01:00","closed_at":"2025-12-17T14:12:45.448191017+01:00","close_reason":"Not planned - FHE test file, low priority","labels":["test"],"dependencies":[{"issue_id":"node-a96","depends_on_id":"node-tsaudit","type":"parent-child","created_at":"2025-12-17T13:19:34.783129099+01:00","created_by":"daemon","metadata":"{}"}]} +{"id":"node-acl","title":"Integrate tokenManager with proxyManager - require valid token","description":"Modify requestTLSNproxy nodeCall:\n- Require tokenId parameter\n- Validate token (owner, domain match, not expired, retries left)\n- Consume retry on spawn attempt\n- Link proxyId to token on success\n- Reject if invalid/expired/exhausted","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-04T10:23:45.799903361+01:00","created_by":"tcsenpai","updated_at":"2026-01-04T10:40:25.816988748+01:00","closed_at":"2026-01-04T10:40:25.816988748+01:00","close_reason":"Modified requestTLSNproxy to require tokenId+owner, validate token, consume retry on success. SDK updated with tlsn_request type.","dependencies":[{"issue_id":"node-acl","depends_on_id":"node-azu","type":"parent-child","created_at":"2026-01-04T10:24:11.637048818+01:00","created_by":"tcsenpai"},{"issue_id":"node-acl","depends_on_id":"node-f23","type":"blocks","created_at":"2026-01-04T10:24:19.114387856+01:00","created_by":"tcsenpai"},{"issue_id":"node-acl","depends_on_id":"node-a1j","type":"blocks","created_at":"2026-01-04T10:24:19.189541228+01:00","created_by":"tcsenpai"}]} {"id":"node-ao8","title":"Implement async/batched terminal output in CategorizedLogger","description":"Replace synchronous console.log in writeToTerminal with a buffered queue that flushes via setImmediate. This is the highest impact fix for event loop blocking.\n\nCurrent problem: console.log blocks event loop on every log call (572+ calls in hot paths).\n\nSolution: Buffer terminal output and flush asynchronously using setImmediate or process.nextTick.","status":"closed","priority":0,"issue_type":"feature","created_at":"2025-12-16T12:40:12.417915+01:00","updated_at":"2025-12-16T12:51:17.689035+01:00","closed_at":"2025-12-16T12:51:17.689035+01:00","close_reason":"Implemented async/batched terminal output using setImmediate and process.stdout.write","labels":["logging","performance"]} +{"id":"node-azu","title":"TLSNotary Monetization - Token System \u0026 Storage","description":"Implement paid TLSNotary attestation system:\n- 1 DEM for proxy access (domain-locked, 30min expiry, 3 retries)\n- Storage: 1 DEM base + 1 DEM/KB (on-chain + IPFS support)\n- In-memory token store\n- SDK: single requestAttestation() call","status":"closed","priority":1,"issue_type":"epic","created_at":"2026-01-04T10:23:30.195088029+01:00","created_by":"tcsenpai","updated_at":"2026-01-04T11:04:28.065885907+01:00","closed_at":"2026-01-04T11:04:28.065885907+01:00","close_reason":"All subtasks complete: TLSN_REQUEST handler, token validation, TLSN_STORE handler, SDK module (2.7.6), documentation updated"} {"id":"node-c98","title":"Investigate web2 UrlValidationResult type issues","description":"UrlValidationResult type union needs narrowing - 4 errors:\\n- DAHR.ts:78,79 - accessing .message and .status on ok:true variant\\n- handleWeb2ProxyRequest.ts:67,69 - same issue\\n\\nFix: Add proper type guard to check ok===false before accessing error properties.","notes":"Verified 2025-12-17: 4 errors in 2 files","status":"closed","priority":3,"issue_type":"task","created_at":"2025-12-16T16:34:48.213065679+01:00","updated_at":"2025-12-17T13:29:03.336724926+01:00","closed_at":"2025-12-17T13:29:03.336724926+01:00","close_reason":"Fixed 4 errors by adding explicit type narrowing. Root cause: strictNullChecks: false prevents discriminated union narrowing.","dependencies":[{"issue_id":"node-c98","depends_on_id":"node-1ao","type":"parent-child","created_at":"2025-12-16T16:34:48.21368185+01:00","created_by":"daemon","metadata":"{}"},{"issue_id":"node-c98","depends_on_id":"node-tsaudit","type":"parent-child","created_at":"2025-12-17T13:19:34.602900783+01:00","created_by":"daemon","metadata":"{}"}]} {"id":"node-clk","title":"Fix deprecated crypto methods createCipher/createDecipher","description":"Replace deprecated crypto.createCipher and crypto.createDecipher with createCipheriv and createDecipheriv in src/libs/crypto/cryptography.ts. 2 errors.","notes":"Verified 2025-12-17: Still 2 errors in cryptography.ts:64,78. Requires IV migration strategy - NOT auto-fixable without breaking existing encrypted data.","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-16T16:34:21.955553906+01:00","updated_at":"2025-12-17T14:18:59.757649743+01:00","closed_at":"2025-12-17T14:18:59.757649743+01:00","close_reason":"Removed dead code - saveEncrypted/loadEncrypted functions were never called and used deprecated crypto APIs","dependencies":[{"issue_id":"node-clk","depends_on_id":"node-718","type":"parent-child","created_at":"2025-12-16T16:34:21.957145876+01:00","created_by":"daemon","metadata":"{}"},{"issue_id":"node-clk","depends_on_id":"node-1ao","type":"parent-child","created_at":"2025-12-16T16:36:20.341614803+01:00","created_by":"daemon","metadata":"{}"},{"issue_id":"node-clk","depends_on_id":"node-tsaudit","type":"parent-child","created_at":"2025-12-17T13:19:34.536151244+01:00","created_by":"daemon","metadata":"{}"}]} +{"id":"node-cqy","title":"Upgrade node-forge to 1.3.2+ (CVE fixes)","description":"CRITICAL: node-forge 1.3.1 has known CVEs (CVE-2025-66031, CVE-2025-66030, CVE-2025-12816). Upgrade to 1.3.2+ in package.json","status":"closed","priority":0,"issue_type":"bug","created_at":"2026-01-04T18:19:40.14428187+01:00","created_by":"tcsenpai","updated_at":"2026-01-04T18:20:34.899306016+01:00","closed_at":"2026-01-04T18:20:34.899306016+01:00","close_reason":"Upgraded node-forge from ^1.3.1 to ^1.3.3 in package.json","dependencies":[{"issue_id":"node-cqy","depends_on_id":"node-e6o","type":"parent-child","created_at":"2026-01-04T18:19:52.891795556+01:00","created_by":"tcsenpai"}]} +{"id":"node-cwr","title":"Add requestTLSNproxy nodeCall handler","description":"Add handler in manageNodeCalls for action requestTLSNproxy. Takes targetUrl, optional authentication {pubKey, signature}. Returns {websocketProxyUrl, targetDomain, expiresIn, proxyId}.","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-03T16:47:19.222700228+01:00","updated_at":"2026-01-03T16:50:32.405410673+01:00","closed_at":"2026-01-03T16:50:32.405410673+01:00","close_reason":"Added requestTLSNproxy case to manageNodeCall.ts","dependencies":[{"issue_id":"node-cwr","depends_on_id":"node-y3o","type":"blocks","created_at":"2026-01-03T16:47:19.223798545+01:00","created_by":"tcsenpai"},{"issue_id":"node-cwr","depends_on_id":"node-2bq","type":"blocks","created_at":"2026-01-03T16:47:30.467286114+01:00","created_by":"tcsenpai"}]} {"id":"node-d4e","title":"Create identity generation script","description":"Create scripts/generate-identities.sh that:\n- Generates 4 unique .demos_identity files\n- Extracts public keys for each\n- Saves to devnet/identities/node{1-4}.identity\n- Outputs public keys for peerlist generation","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-25T12:39:41.717258+01:00","updated_at":"2025-12-25T12:48:29.838821+01:00","closed_at":"2025-12-25T12:48:29.838821+01:00","close_reason":"Created identity generation scripts (generate-identities.sh + generate-identity-helper.ts)","dependencies":[{"issue_id":"node-d4e","depends_on_id":"node-00o","type":"parent-child","created_at":"2025-12-25T12:40:11.424393+01:00","created_by":"daemon"}]} {"id":"node-d82","title":"Phase 4: Info Panel and Controls","description":"Implement the header info panel showing node status and the footer with control commands.","status":"closed","priority":1,"issue_type":"feature","created_at":"2025-12-04T15:45:22.750471894+01:00","updated_at":"2025-12-04T16:05:56.222574924+01:00","closed_at":"2025-12-04T16:05:56.222574924+01:00","dependencies":[{"issue_id":"node-d82","depends_on_id":"node-66u","type":"blocks","created_at":"2025-12-04T15:46:29.652996097+01:00","created_by":"daemon","metadata":"{}"},{"issue_id":"node-d82","depends_on_id":"node-wrd","type":"parent-child","created_at":"2025-12-04T15:46:41.831349124+01:00","created_by":"daemon","metadata":"{}"}]} {"id":"node-dc7","title":"Check for rogue console.log outside of CategorizedLogger.ts and report","description":"Audit the codebase for console.log/warn/error calls that bypass CategorizedLogger. These defeat the async buffering optimization and should be converted to use the logger.","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-16T12:53:17.048295+01:00","updated_at":"2025-12-16T13:18:35.677635+01:00","closed_at":"2025-12-16T13:18:35.677635+01:00","close_reason":"Completed audit of rogue console.log calls. Found 500+ calls outside CategorizedLogger. Created CONSOLE_LOG_AUDIT.md categorizing: ~200 HIGH priority (hot paths in consensus, network, peer, blockchain, omniprotocol), ~100 MEDIUM (identity, abstraction, crypto), ~150 LOW (feature modules), ~50 ACCEPTABLE (standalone tools). Report provides migration path for converting to CategorizedLogger.","labels":["audit","logging"]} {"id":"node-dkx","title":"Add warn method to LegacyLoggerAdapter","description":"LegacyLoggerAdapter is missing static warn method. startup.ts:121,127 calls LegacyLoggerAdapter.warn which doesn't exist. 2 errors.","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-16T16:34:22.136860234+01:00","updated_at":"2025-12-16T16:37:59.976496812+01:00","closed_at":"2025-12-16T16:37:59.976496812+01:00","dependencies":[{"issue_id":"node-dkx","depends_on_id":"node-718","type":"parent-child","created_at":"2025-12-16T16:34:22.141332061+01:00","created_by":"daemon","metadata":"{}"}]} +{"id":"node-dyd","title":"Use crypto.randomUUID() for token IDs","description":"MEDIUM: tokenManager.ts:61-67 uses Math.random() which is predictable. Replace with crypto.randomUUID() for secure token generation.","status":"closed","priority":1,"issue_type":"bug","created_at":"2026-01-04T18:19:40.476487213+01:00","created_by":"tcsenpai","updated_at":"2026-01-04T18:22:25.397703554+01:00","closed_at":"2026-01-04T18:22:25.397703554+01:00","close_reason":"Replaced Math.random() with crypto.randomUUID() for secure token generation","dependencies":[{"issue_id":"node-dyd","depends_on_id":"node-e6o","type":"parent-child","created_at":"2026-01-04T18:19:53.154173884+01:00","created_by":"tcsenpai"}]} +{"id":"node-e6o","title":"TLSNotary PR #554 Review Fixes","description":"Epic tracking all validated concerns from CodeRabbit/Qodo automated reviews for PR #554","status":"closed","priority":0,"issue_type":"epic","created_at":"2026-01-04T18:19:18.116396154+01:00","created_by":"tcsenpai","updated_at":"2026-01-04T18:24:23.545302424+01:00","closed_at":"2026-01-04T18:24:23.545302424+01:00","close_reason":"All 7 validated concerns from PR #554 review have been addressed"} {"id":"node-e9e","title":"Replace sha256 imports with sha3 in omniprotocol (like ucrypto)","description":"Search for sha256 imports in omniprotocol and replace them with sha3 just as done in ucrypto. This is causing bun run type-check to crash.","status":"closed","priority":0,"issue_type":"bug","assignee":"claude","created_at":"2025-12-16T16:52:23.194927913+01:00","updated_at":"2025-12-16T16:56:01.756780774+01:00","closed_at":"2025-12-16T16:56:01.756780774+01:00","labels":["blocking","crypto","typescript"]} +{"id":"node-eav","title":"Update SDK_INTEGRATION.md with paid flow","description":"Update documentation with:\n- New paid flow (token required)\n- Pricing: 1 DEM access, 1 DEM base + 1 DEM/KB storage\n- SDK examples using requestAttestation() and storeProof()\n- Token lifecycle explanation","status":"closed","priority":2,"issue_type":"task","created_at":"2026-01-04T10:24:03.508983065+01:00","created_by":"tcsenpai","updated_at":"2026-01-04T11:04:22.683460137+01:00","closed_at":"2026-01-04T11:04:22.683460137+01:00","close_reason":"Updated SDK_INTEGRATION.md with paid flow, token system, pricing, new endpoints, and complete examples","dependencies":[{"issue_id":"node-eav","depends_on_id":"node-azu","type":"parent-child","created_at":"2026-01-04T10:24:11.879296368+01:00","created_by":"tcsenpai"},{"issue_id":"node-eav","depends_on_id":"node-lzp","type":"blocks","created_at":"2026-01-04T10:24:19.428267629+01:00","created_by":"tcsenpai"}]} {"id":"node-eph","title":"Investigate SDK missing exports (EncryptedTransaction, SubnetPayload)","description":"SDK missing exports causing 4 errors:\\n- EncryptedTransaction not exported from @kynesyslabs/demosdk/types (3 files: parallelNetworks.ts, handleL2PS.ts, GCRSubnetsTxs.ts)\\n- SubnetPayload not exported from @kynesyslabs/demosdk/l2ps (endpointHandlers.ts)\\n\\nMay need SDK update or different import paths.","notes":"Verified 2025-12-17: 4 errors total","status":"closed","priority":2,"issue_type":"task","assignee":"claude","created_at":"2025-12-16T16:34:48.032263681+01:00","updated_at":"2025-12-17T13:54:09.757665526+01:00","closed_at":"2025-12-17T13:54:09.757665526+01:00","close_reason":"Fixed 4 errors: Created local types.ts for EncryptedTransaction and SubnetPayload (SDK missing exports), updated imports in parallelNetworks.ts, handleL2PS.ts, GCRSubnetsTxs.ts, and endpointHandlers.ts","dependencies":[{"issue_id":"node-eph","depends_on_id":"node-1ao","type":"parent-child","created_at":"2025-12-16T16:34:48.033202931+01:00","created_by":"daemon","metadata":"{}"},{"issue_id":"node-eph","depends_on_id":"node-tsaudit","type":"parent-child","created_at":"2025-12-17T13:19:34.481247049+01:00","created_by":"daemon","metadata":"{}"}]} {"id":"node-eqk","title":"Write Dockerfile for node containers","description":"Create Dockerfile that:\n- Uses oven/bun base image\n- Installs system dependencies\n- Copies package.json and bun.lockb\n- Runs bun install\n- Sets up entrypoint for ./run --external-db","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-25T12:39:48.562479+01:00","updated_at":"2025-12-25T12:48:30.329626+01:00","closed_at":"2025-12-25T12:48:30.329626+01:00","close_reason":"Created Dockerfile and entrypoint.sh for devnet nodes","dependencies":[{"issue_id":"node-eqk","depends_on_id":"node-00o","type":"parent-child","created_at":"2025-12-25T12:40:16.205138+01:00","created_by":"daemon"}]} {"id":"node-eqn","title":"Phase 3: IPFS Streaming - Large Files","description":"Add streaming support for large file uploads and downloads.\n\n## Tasks\n1. Implement addStream() for chunked uploads\n2. Implement getStream() for streaming downloads\n3. Add progress callback support\n4. Ensure memory efficiency for large files\n5. Update RPC endpoints to support streaming","design":"### Streaming Methods\n```typescript\nasync addStream(\n stream: ReadableStream,\n options?: { filename?: string; onProgress?: (bytes: number) =\u003e void }\n): Promise\u003cstring\u003e\n\nasync getStream(\n cid: string,\n options?: { onProgress?: (bytes: number) =\u003e void }\n): Promise\u003cReadableStream\u003e\n```\n\n### RPC Streaming\n- POST /ipfs/add with Transfer-Encoding: chunked\n- GET /ipfs/:cid returns streaming response\n- Progress via X-Progress header or SSE\n\n### Memory Considerations\n- Never load full file into memory\n- Use Bun's native streaming capabilities\n- Chunk size: 256KB default","acceptance_criteria":"- [ ] Can upload 1GB+ file without memory issues\n- [ ] Can download 1GB+ file without memory issues\n- [ ] Progress callbacks fire during transfer\n- [ ] RPC endpoints support chunked encoding\n- [ ] Memory usage stays bounded during large transfers","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-24T14:35:58.493566+01:00","updated_at":"2025-12-25T10:39:44.545906+01:00","closed_at":"2025-12-25T10:39:44.545906+01:00","close_reason":"Implemented IPFS streaming support for large files: addStream() for chunked uploads and getStream() for streaming downloads with progress callbacks. Added RPC endpoints ipfsAddStream and ipfsGetStream with session-based chunk management. Uses 256KB chunks for memory-efficient transfers of 1GB+ files.","dependencies":[{"issue_id":"node-eqn","depends_on_id":"node-qz1","type":"parent-child","created_at":"2025-12-24T14:36:11.288685+01:00","created_by":"daemon"},{"issue_id":"node-eqn","depends_on_id":"node-6p0","type":"blocks","created_at":"2025-12-24T14:36:21.49303+01:00","created_by":"daemon"}]} +{"id":"node-f23","title":"Create tokenManager.ts - in-memory token store","description":"Create src/features/tlsnotary/tokenManager.ts:\n- AttestationToken interface (id, owner, domain, status, createdAt, expiresAt, retriesLeft, txHash, proxyId)\n- In-memory Map storage in sharedState\n- createToken(), validateToken(), consumeRetry(), markCompleted(), cleanupExpired()\n- Token expiry: 30 min, retries: 3","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-04T10:23:36.226234827+01:00","created_by":"tcsenpai","updated_at":"2026-01-04T10:25:50.305651767+01:00","closed_at":"2026-01-04T10:25:50.305651767+01:00","close_reason":"Created tokenManager.ts with full token lifecycle management","dependencies":[{"issue_id":"node-f23","depends_on_id":"node-azu","type":"parent-child","created_at":"2026-01-04T10:24:11.48502456+01:00","created_by":"tcsenpai"}]} +{"id":"node-gia","title":"Add try/catch around JSON.parse in control.ts","description":"MEDIUM: control.ts:95-98 - Parsing peer metadata can crash on malformed data. Wrap in try/catch.","status":"closed","priority":1,"issue_type":"bug","created_at":"2026-01-04T18:19:40.855708336+01:00","created_by":"tcsenpai","updated_at":"2026-01-04T18:24:23.455960912+01:00","closed_at":"2026-01-04T18:24:23.455960912+01:00","close_reason":"Added try/catch around JSON.parse for peer metadata to handle malformed data gracefully","dependencies":[{"issue_id":"node-gia","depends_on_id":"node-e6o","type":"parent-child","created_at":"2026-01-04T18:19:53.435072097+01:00","created_by":"tcsenpai"}]} +{"id":"node-jhq","title":"PR #554 Review Round 2 Fixes","description":"Address CodeRabbit review feedback from second review pass","status":"open","priority":1,"issue_type":"epic","created_at":"2026-01-05T15:26:12.048447123+01:00","created_by":"tcsenpai","updated_at":"2026-01-05T15:26:12.048447123+01:00"} {"id":"node-kaa","title":"Phase 7: IPFS RPC Handler Integration","description":"Connect SDK IPFS operations with node RPC transaction handlers for end-to-end functionality.\n\n## Tasks\n1. Verify SDK version updated in node package.json\n2. Integrate tokenomics into ipfsOperations.ts handlers\n3. Ensure proper cost deduction during IPFS transactions\n4. Wire up fee distribution to hosting RPC\n5. End-to-end flow testing\n\n## Files\n- src/features/ipfs/ipfsOperations.ts - Transaction handlers\n- src/libs/blockchain/routines/ipfsTokenomics.ts - Pricing calculations\n- src/libs/blockchain/routines/executeOperations.ts - Transaction dispatch","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-24T19:46:37.970243+01:00","updated_at":"2025-12-25T10:16:34.711273+01:00","closed_at":"2025-12-25T10:16:34.711273+01:00","close_reason":"Phase 7 complete - RPC handler integration verified. The tokenomics module was already fully integrated into ipfsOperations.ts handlers (ipfsAdd, ipfsPin, ipfsUnpin) with cost validation, fee distribution, and state management. ESLint verification passed for IPFS files.","dependencies":[{"issue_id":"node-kaa","depends_on_id":"node-qz1","type":"blocks","created_at":"2025-12-24T19:46:37.971035+01:00","created_by":"daemon"}]} +{"id":"node-lhs","title":"Token creation error should propagate failure (atomicity)","description":"handleNativeOperations.ts:78-94 - If createToken fails, fee is burned but no token created. User loses DEM. Should rethrow error after logging.","status":"in_progress","priority":0,"issue_type":"bug","created_at":"2026-01-05T15:26:25.040414844+01:00","created_by":"tcsenpai","updated_at":"2026-01-05T15:26:47.908210857+01:00","dependencies":[{"issue_id":"node-lhs","depends_on_id":"node-jhq","type":"blocks","created_at":"2026-01-05T15:26:25.057280284+01:00","created_by":"tcsenpai"}]} +{"id":"node-lzp","title":"[SDK] Add tlsnotary module - requestAttestation() + storeProof()","description":"SDK changes required in ../sdks:\n- Add tlsnotary module to SDK\n- requestAttestation({ targetUrl }): submits TLSN_REQUEST tx, waits confirm, calls requestTLSNproxy, returns { proxyUrl, tokenId, expiresAt }\n- storeProof(tokenId, proof, { storage }): submits TLSN_STORE tx\n- calculateStorageFee(proofSizeKB): 1 + (KB * 1) DEM\n\n⚠️ REQUIRES SDK PUBLISH - will wait for user confirmation before proceeding with dependent tasks","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-04T10:23:58.611107339+01:00","created_by":"tcsenpai","updated_at":"2026-01-04T11:02:56.775352583+01:00","closed_at":"2026-01-04T11:02:56.775352583+01:00","close_reason":"SDK 2.7.6 published with TLSNotaryService, helpers, and NativeTablesHashes update. Node hashGCR.ts updated to include native_tlsnotary in integrity hash.","dependencies":[{"issue_id":"node-lzp","depends_on_id":"node-azu","type":"parent-child","created_at":"2026-01-04T10:24:11.791222752+01:00","created_by":"tcsenpai"},{"issue_id":"node-lzp","depends_on_id":"node-nyk","type":"blocks","created_at":"2026-01-04T10:24:19.346765305+01:00","created_by":"tcsenpai"}]} +{"id":"node-nyk","title":"Add TLSN_STORE tx handler - on-chain + IPFS storage","description":"Add transaction handler for TLSN_STORE:\n- Fee: 1 DEM base + 1 DEM per KB\n- Data: { tokenId, proof, storage: \"onchain\" | \"ipfs\" }\n- Validate token is completed (attestation done)\n- On-chain: store full proof in GCR\n- IPFS: store hash on-chain, proof to IPFS (prep for Demos swarm)\n- Mark token as stored","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-04T10:23:51.621036+01:00","created_by":"tcsenpai","updated_at":"2026-01-04T10:49:45.991546+01:00","closed_at":"2026-01-04T10:51:49.537326419+01:00","dependencies":[{"issue_id":"node-nyk","depends_on_id":"node-azu","type":"parent-child","created_at":"2026-01-04T10:24:11.714861115+01:00","created_by":"tcsenpai"},{"issue_id":"node-nyk","depends_on_id":"node-acl","type":"blocks","created_at":"2026-01-04T10:24:19.267337997+01:00","created_by":"tcsenpai"}]} {"id":"node-of0","title":"Replace sha256 imports with sha3 in omniprotocol (like ucrypto)","description":"Search for sha256 imports in omniprotocol and replace them with sha3, following the same pattern used in ucrypto.","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-16T16:41:22.731257623+01:00","updated_at":"2025-12-16T17:05:26.778690573+01:00","closed_at":"2025-12-16T17:05:26.778695412+01:00"} {"id":"node-p7v","title":"Add --external-db flag to ./run script","description":"Modify the ./run script to accept --external-db or -e flag that:\n- Skips internal Postgres docker-compose management\n- Expects DATABASE_URL env var to be set\n- Skips port availability check for PG_PORT\n- Still runs all other checks and bun start:bun","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-25T12:39:37.046892+01:00","updated_at":"2025-12-25T12:48:28.813599+01:00","closed_at":"2025-12-25T12:48:28.813599+01:00","close_reason":"Added --external-db flag to ./run script","dependencies":[{"issue_id":"node-p7v","depends_on_id":"node-00o","type":"parent-child","created_at":"2025-12-25T12:40:10.266421+01:00","created_by":"daemon"}]} {"id":"node-qz1","title":"IPFS Integration for Demos Network","description":"Integrate IPFS (Kubo) into Demos nodes with FULL BLOCKCHAIN INTEGRATION for decentralized file storage and P2P content distribution.\n\n## Key Architecture Decisions\n- **Reads**: demosCall (gas-free) → ipfs_get, ipfs_pins, ipfs_status\n- **Writes**: Demos Transactions (on-chain) → IPFS_ADD, IPFS_PIN, IPFS_UNPIN\n- **State**: Account-level ipfs_pins field in StateDB\n- **Economics**: Full tokenomics (pay to pin, earn to host)\n- **Infrastructure**: Kubo v0.26.0 via Docker Compose (internal network)\n\n## IMPORTANT: SDK Dependency\nTransaction types are defined in `../sdks` (@kynesyslabs/demosdk). Each phase involving SDK changes requires:\n1. Make changes in ../sdks\n2. User manually publishes new SDK version\n3. Update SDK version in node package.json\n4. Continue with node implementation\n\n## Scope (MVP for Testnet)\n- Phase 1: Infrastructure (Kubo Docker + IPFSManager)\n- Phase 2: Account State Schema (ipfs_pins field)\n- Phase 3: demosCall Handlers (gas-free reads)\n- Phase 4: Transaction Types (IPFS_ADD, etc.) - **SDK FIRST**\n- Phase 5: Tokenomics (costs + rewards)\n- Phase 6: SDK Integration (sdk.ipfs module) - **SDK FIRST**\n- Phase 7: Streaming (large files)\n- Phase 8: Cluster Sync (private network)\n- Phase 9: Public Bridge (optional, lower priority)\n\n## Acceptance Criteria\n- Kubo container starts with Demos node\n- Account state includes ipfs_pins field\n- demosCall handlers work for reads\n- Transaction types implemented (SDK + Node)\n- Tokenomics functional (pay to pin, earn to host)\n- SDK sdk.ipfs module works end-to-end\n- Large files stream without memory issues\n- Private network isolates Demos nodes","design":"## Technical Design\n\n### Infrastructure Layer\n- Image: ipfs/kubo:v0.26.0\n- Network: Docker internal only\n- API: http://demos-ipfs:5001 (internal)\n- Storage: Dedicated block store\n\n### Account State Schema\n```typescript\ninterface AccountIPFSState {\n pins: {\n cid: string;\n size: number;\n timestamp: number;\n metadata?: Record\u003cstring, unknown\u003e;\n }[];\n totalPinnedBytes: number;\n earnedRewards: bigint;\n paidCosts: bigint;\n}\n```\n\n### demosCall Operations (Gas-Free)\n- ipfs_get(cid) → content bytes\n- ipfs_pins(address?) → list of pins\n- ipfs_status() → node IPFS health\n\n### Transaction Types\n- IPFS_ADD → Upload content, auto-pin, pay cost\n- IPFS_PIN → Pin existing CID, pay cost\n- IPFS_UNPIN → Remove pin, potentially refund\n- IPFS_REQUEST_PIN → Request cluster-wide pin\n\n### Tokenomics Model\n- Cost to Pin: Based on size + duration\n- Reward to Host: Proportional to hosted bytes\n- Reward Distribution: Per epoch/block\n\n### SDK Interface (../sdks)\n- sdk.ipfs.get(cid): Promise\u003cBuffer\u003e\n- sdk.ipfs.pins(address?): Promise\u003cPinInfo[]\u003e\n- sdk.ipfs.add(content): Promise\u003c{tx, cid}\u003e\n- sdk.ipfs.pin(cid): Promise\u003c{tx}\u003e\n- sdk.ipfs.unpin(cid): Promise\u003c{tx}\u003e","acceptance_criteria":"- [ ] Kubo container starts with Demos node\n- [ ] Can add content and receive CID\n- [ ] Can retrieve content by CID\n- [ ] Can pin/unpin content\n- [ ] Large files stream without memory issues\n- [ ] Private network isolates Demos nodes\n- [ ] Optional public IPFS bridge works","status":"closed","priority":1,"issue_type":"epic","created_at":"2025-12-24T14:35:10.899456+01:00","updated_at":"2025-12-25T12:28:04.668799+01:00","closed_at":"2025-12-25T12:28:04.668799+01:00","close_reason":"All 8 phases completed: Phase 1 (Docker + IPFSManager), Phase 2 (Core Operations + Account State), Phase 3 (demosCall Handlers + Streaming), Phase 4 (Transaction Types + Cluster Sync), Phase 5 (Tokenomics + Public Bridge), Phase 6 (SDK Integration), Phase 7 (RPC Handler Integration). All acceptance criteria met: Kubo container integration, add/get/pin operations, large file streaming, private network isolation, and optional public gateway bridge."} @@ -46,11 +68,15 @@ {"id":"node-rgw","title":"Add observability helpers (logs, attach, tmux multi-view)","description":"Add convenience scripts for observing the devnet:\n- scripts/logs.sh: View logs from all or specific nodes\n- scripts/attach.sh: Attach to a specific node container\n- scripts/watch-all.sh: tmux-style multi-pane view of all 4 nodes","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-25T12:50:40.313401+01:00","updated_at":"2025-12-25T12:51:47.16427+01:00","closed_at":"2025-12-25T12:51:47.16427+01:00","close_reason":"Added logs.sh, attach.sh, and watch-all.sh (tmux multi-pane) observability scripts","dependencies":[{"issue_id":"node-rgw","depends_on_id":"node-00o","type":"parent-child","created_at":"2025-12-25T12:50:46.121652+01:00","created_by":"daemon"}]} {"id":"node-s48","title":"Phase 3: Log Display with Tabs","description":"Implement the tabbed log display with filtering by category. Users can switch between All logs and category-specific views.","status":"closed","priority":1,"issue_type":"feature","created_at":"2025-12-04T15:45:22.577437178+01:00","updated_at":"2025-12-04T16:05:56.159601702+01:00","closed_at":"2025-12-04T16:05:56.159601702+01:00","dependencies":[{"issue_id":"node-s48","depends_on_id":"node-66u","type":"blocks","created_at":"2025-12-04T15:46:29.57958254+01:00","created_by":"daemon","metadata":"{}"},{"issue_id":"node-s48","depends_on_id":"node-wrd","type":"parent-child","created_at":"2025-12-04T15:46:41.781338648+01:00","created_by":"daemon","metadata":"{}"}]} {"id":"node-sl9","title":"Phase 2: Account State Schema - ipfs_pins field","description":"Add IPFS-related fields to account state schema in StateDB.\n\n## Tasks\n1. Define AccountIPFSState interface\n2. Add ipfs field to account state schema\n3. Create migration if needed\n4. Add helper methods for pin management\n5. Test state persistence and retrieval","design":"### Account State Extension\n```typescript\ninterface AccountIPFSState {\n pins: IPFSPin[];\n totalPinnedBytes: number;\n earnedRewards: bigint;\n paidCosts: bigint;\n}\n\ninterface IPFSPin {\n cid: string;\n size: number;\n timestamp: number;\n expiresAt?: number;\n metadata?: Record\u003cstring, unknown\u003e;\n}\n```\n\n### State Location\n- Add to existing account state structure\n- Similar pattern to UD (Universal Domain) state\n\n### Helper Methods\n- addPin(address, pin): void\n- removePin(address, cid): void\n- getPins(address): IPFSPin[]\n- updateRewards(address, amount): void","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-24T14:42:35.941455+01:00","updated_at":"2025-12-24T17:19:57.279975+01:00","closed_at":"2025-12-24T17:19:57.279975+01:00","close_reason":"Completed - IPFSTypes.ts, GCR_Main.ts ipfs field, GCRIPFSRoutines.ts all implemented and committed","dependencies":[{"issue_id":"node-sl9","depends_on_id":"node-qz1","type":"parent-child","created_at":"2025-12-24T14:43:18.738305+01:00","created_by":"daemon"},{"issue_id":"node-sl9","depends_on_id":"node-2pd","type":"blocks","created_at":"2025-12-24T14:44:46.797624+01:00","created_by":"daemon"}]} +{"id":"node-tly","title":"Add validation for nativePayload.args before destructuring","description":"handleGCR.ts:419-424 - Code assumes nativePayload.args exists. Add validation to prevent runtime errors.","status":"open","priority":1,"issue_type":"bug","created_at":"2026-01-05T15:26:25.260059562+01:00","created_by":"tcsenpai","updated_at":"2026-01-05T15:26:25.260059562+01:00","dependencies":[{"issue_id":"node-tly","depends_on_id":"node-jhq","type":"blocks","created_at":"2026-01-05T15:26:25.261364409+01:00","created_by":"tcsenpai"}]} {"id":"node-tsaudit","title":"TypeScript Type Errors Audit (24 errors remaining)","description":"Comprehensive TypeScript type-check audit performed 2025-12-17.\n\n**Summary**: 16 type errors remaining across 4 categories (fixed 18 + excluded 4 test errors)\n\n| Category | Errors | Issue | Priority | Status |\n|----------|--------|-------|----------|--------|\n| OmniProtocol | 11 | node-9x8 | P2 | Open |\n| Deprecated Crypto | 2 | node-clk | P1 | Open |\n| FHE Test | 2 | node-a96 | P3 | Open |\n| Utils (showPubkey) | 1 | - | P3 | Untracked |\n| ~~SDK Missing Exports~~ | ~~4~~ | ~~node-eph~~ | ~~P2~~ | ✅ Fixed |\n| ~~UrlValidationResult~~ | ~~4~~ | ~~node-c98~~ | ~~P3~~ | ✅ Fixed |\n| ~~IMP Signaling~~ | ~~2~~ | ~~node-u9a~~ | ~~P2~~ | ✅ Fixed |\n| ~~Blockchain Routines~~ | ~~2~~ | ~~node-01y~~ | ~~P2~~ | ✅ Fixed |\n| ~~Network Module~~ | ~~6~~ | ~~node-tus~~ | ~~P2~~ | ✅ Fixed |\n| ~~Utils/Tests~~ | ~~4~~ | ~~node-2e8~~ | ~~P3~~ | ✅ Excluded |\n\n**Progress**: 16 errors remaining (58% reduction from 38)","notes":"Progress: 5 errors remaining (33/38 fixed, 87%). Remaining: node-clk (2 crypto), node-a96 (2 FHE), showPubkey.ts (1 untracked)","status":"closed","priority":2,"issue_type":"epic","created_at":"2025-12-17T13:19:23.96669023+01:00","updated_at":"2025-12-17T14:23:18.1940338+01:00","closed_at":"2025-12-17T14:23:18.1940338+01:00","close_reason":"TypeScript audit complete. 36/38 errors fixed (95%), 2 remaining in fhe_test.ts (closed as not planned). Production errors: 0."} {"id":"node-tus","title":"Fix Network module type errors (6 errors)","description":"Multiple network module files have type errors:\n\n**index.ts** (1): Module server_rpc has no exported member 'default'\n\n**manageNativeBridge.ts** (2):\n- Line 26: string not assignable to { type, data }\n- Line 43: Comparison between unrelated types (chain types vs 'EVM')\n\n**handleIdentityRequest.ts** (2):\n- Line 79: UDIdentityAssignPayload type mismatch between SDK type locations\n- Line 105: Property 'method' does not exist on type 'never'\n\n**server_rpc.ts** (1): Line 292: string[] not assignable to { username, points }[]","status":"closed","priority":2,"issue_type":"task","assignee":"claude","created_at":"2025-12-17T13:19:11.581799236+01:00","updated_at":"2025-12-17T13:48:37.501186037+01:00","closed_at":"2025-12-17T13:48:37.501186037+01:00","close_reason":"Fixed all 6 errors: (1) index.ts - changed to named exports, (2-3) manageNativeBridge.ts - fixed signature type and originChainType, (4-5) handleIdentityRequest.ts - type assertions for SDK mismatch and never type, (6) server_rpc.ts - fixed awardPoints param type","dependencies":[{"issue_id":"node-tus","depends_on_id":"node-tsaudit","type":"parent-child","created_at":"2025-12-17T13:19:34.907311538+01:00","created_by":"daemon","metadata":"{}"}]} {"id":"node-twi","title":"Phase 4: Migrate LOW priority console.log calls","description":"Convert LOW priority console.log calls in feature modules (cold paths):\n\n- src/index.ts (startup/shutdown - lines 387, 477-565)\n- src/features/multichain/*.ts\n- src/features/fhe/*.ts\n- src/features/bridges/*.ts\n- src/features/web2/*.ts\n- src/features/InstantMessagingProtocol/*.ts\n- src/features/activitypub/*.ts\n- src/features/pgp/*.ts\n\nNote: src/index.ts startup logs are acceptable but can be converted for consistency.\n\nEstimated: ~150 calls","status":"closed","priority":3,"issue_type":"task","created_at":"2025-12-16T13:24:09.572624+01:00","updated_at":"2025-12-16T17:01:54.43950117+01:00","closed_at":"2025-12-16T17:01:54.43950117+01:00","labels":["logging"],"dependencies":[{"issue_id":"node-twi","depends_on_id":"node-7d8","type":"parent-child","created_at":"2025-12-16T13:24:23.884492+01:00","created_by":"daemon","metadata":"{}"},{"issue_id":"node-twi","depends_on_id":"node-9de","type":"blocks","created_at":"2025-12-16T13:24:25.334953+01:00","created_by":"daemon","metadata":"{}"}]} +{"id":"node-u6n","title":"Fix incorrect exhaustive check pattern causing compile error","description":"handleNativeOperations.ts:161-166 - const _exhaustiveCheck: never = nativePayload will fail to compile. Remove exhaustive check, just log warning.","status":"open","priority":0,"issue_type":"bug","created_at":"2026-01-05T15:26:25.149753763+01:00","created_by":"tcsenpai","updated_at":"2026-01-05T15:26:25.149753763+01:00","dependencies":[{"issue_id":"node-u6n","depends_on_id":"node-jhq","type":"blocks","created_at":"2026-01-05T15:26:25.150627898+01:00","created_by":"tcsenpai"}]} {"id":"node-u9a","title":"Fix IMP Signaling Server type errors (2 errors)","description":"src/features/InstantMessagingProtocol/signalingServer/signalingServer.ts has 2 type errors:\n\n1. Line 104: Expected 1-2 arguments, but got 3\n2. Line 292: 'signedData' does not exist in type 'signedObject'\n\nNeed to check function signatures and type definitions.","status":"closed","priority":2,"issue_type":"task","assignee":"claude","created_at":"2025-12-17T13:19:11.366110655+01:00","updated_at":"2025-12-17T13:42:08.193891167+01:00","closed_at":"2025-12-17T13:42:08.193891167+01:00","close_reason":"Fixed both errors: (1) Combined 3 log.debug args into template literal, (2) Changed signedData to signature to match SDK's signedObject type","dependencies":[{"issue_id":"node-u9a","depends_on_id":"node-tsaudit","type":"parent-child","created_at":"2025-12-17T13:19:34.72184989+01:00","created_by":"daemon","metadata":"{}"}]} +{"id":"node-uak","title":"TLSNotary Backend Integration","description":"Integrated TLSNotary feature for HTTPS attestation into Demos node.\n\n## Files Created\n- libs/tlsn/libtlsn_notary.so - Pre-built Rust library\n- src/features/tlsnotary/ffi.ts - FFI bindings\n- src/features/tlsnotary/TLSNotaryService.ts - Service class\n- src/features/tlsnotary/routes.ts - BunServer routes\n- src/features/tlsnotary/index.ts - Feature entry point\n\n## Files Modified\n- src/index.ts - Added initialization and shutdown\n- src/libs/network/server_rpc.ts - Route registration\n\n## Routes\n- GET /tlsnotary/health\n- GET /tlsnotary/info\n- POST /tlsnotary/verify","status":"closed","priority":1,"issue_type":"feature","created_at":"2026-01-03T10:09:43.384641022+01:00","updated_at":"2026-01-03T10:10:31.097839+01:00","closed_at":"2026-01-03T10:12:18.183848095+01:00"} {"id":"node-ueo","title":"Reduce consensus logging verbosity in hot paths","description":"Reduce or optimize logging in consensus module (208 log calls total, 31 in PoRBFT.ts alone, 110 in secretaryManager.ts).\n\nOptions:\n- Guard debug logs with environment check\n- Use lazy evaluation for expensive log formatting\n- Remove JSON.stringify with pretty-print from hot paths\n- Convert verbose debug logs to trace level or remove entirely","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-16T12:40:12.969535+01:00","updated_at":"2025-12-16T12:54:58.945823+01:00","closed_at":"2025-12-16T12:54:58.945823+01:00","close_reason":"Demoted verbose info logs to debug, removed pretty-print from 21 JSON.stringify calls in consensus module","labels":["consensus","performance"]} +{"id":"node-vt5","title":"Add tlsnotary state to sharedState.ts","description":"Add tlsnotary property with TLSNotaryState type (proxies Map, portPool). Initialize in constructor.","status":"closed","priority":1,"issue_type":"task","created_at":"2026-01-03T16:47:19.011495919+01:00","updated_at":"2026-01-03T16:49:38.26880606+01:00","closed_at":"2026-01-03T16:49:38.26880606+01:00","close_reason":"Added tlsnotary: TLSNotaryState property to SharedState class","dependencies":[{"issue_id":"node-vt5","depends_on_id":"node-y3o","type":"blocks","created_at":"2026-01-03T16:47:19.012697561+01:00","created_by":"tcsenpai"}]} {"id":"node-vuy","title":"Write docker-compose.yml with 4 nodes + postgres","description":"Create the main docker-compose.yml with:\n- postgres service (4 databases via init script)\n- node-1, node-2, node-3, node-4 services\n- Proper networking (demos-network bridge)\n- Volume mounts for source code (hybrid build)\n- Environment variables for each node\n- Health checks and dependencies","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-25T12:39:45.981822+01:00","updated_at":"2025-12-25T12:49:33.435966+01:00","closed_at":"2025-12-25T12:49:33.435966+01:00","close_reason":"Created docker-compose.yml with postgres + 4 node services, proper networking, health checks, volume mounts","dependencies":[{"issue_id":"node-vuy","depends_on_id":"node-00o","type":"parent-child","created_at":"2025-12-25T12:40:14.129961+01:00","created_by":"daemon"},{"issue_id":"node-vuy","depends_on_id":"node-p7v","type":"blocks","created_at":"2025-12-25T12:40:32.883249+01:00","created_by":"daemon"},{"issue_id":"node-vuy","depends_on_id":"node-362","type":"blocks","created_at":"2025-12-25T12:40:33.536282+01:00","created_by":"daemon"}]} {"id":"node-vzx","title":"Investigate multichain executor type mismatches","description":"aptos_balance_query.ts, aptos_contract_read.ts, aptos_contract_write.ts, balance_query.ts have TS2345 errors passing wrong types. Need to understand expected types.","status":"closed","priority":3,"issue_type":"task","created_at":"2025-12-16T16:34:48.131601131+01:00","updated_at":"2025-12-17T13:18:44.515877671+01:00","closed_at":"2025-12-17T13:18:44.515877671+01:00","close_reason":"No longer present in type-check output - likely fixed or code changed","dependencies":[{"issue_id":"node-vzx","depends_on_id":"node-1ao","type":"parent-child","created_at":"2025-12-16T16:34:48.132420936+01:00","created_by":"daemon","metadata":"{}"}]} {"id":"node-w8x","title":"Phase 6: Testing and Polish","description":"Final testing, edge case handling, documentation, and polish for the TUI implementation.","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-04T15:45:23.120288464+01:00","updated_at":"2025-12-08T14:56:14.859612652+01:00","closed_at":"2025-12-08T14:56:14.85961676+01:00","dependencies":[{"issue_id":"node-w8x","depends_on_id":"node-67f","type":"blocks","created_at":"2025-12-04T15:46:29.841151783+01:00","created_by":"daemon","metadata":"{}"},{"issue_id":"node-w8x","depends_on_id":"node-wrd","type":"parent-child","created_at":"2025-12-04T15:46:41.94294082+01:00","created_by":"daemon","metadata":"{}"}]} @@ -60,4 +86,6 @@ {"id":"node-wug","title":"Add CMD to LogCategory type","description":"TUIManager.ts:937 - \"CMD\" is not assignable to LogCategory. Need to add \"CMD\" to the LogCategory type definition. 1 error.","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-16T16:34:22.023244327+01:00","updated_at":"2025-12-16T16:38:46.053070327+01:00","closed_at":"2025-12-16T16:38:46.053070327+01:00","dependencies":[{"issue_id":"node-wug","depends_on_id":"node-718","type":"parent-child","created_at":"2025-12-16T16:34:22.024717493+01:00","created_by":"daemon","metadata":"{}"}]} {"id":"node-wzh","title":"Phase 3: demosCall Handlers - IPFS Reads","description":"Implement gas-free demosCall handlers for IPFS read operations.\n\n## Tasks\n1. Create ipfs_get handler - retrieve content by CID\n2. Create ipfs_pins handler - list pins for address\n3. Create ipfs_status handler - node IPFS health\n4. Register handlers in demosCall router\n5. Add input validation and error handling","design":"### Handler Signatures\n```typescript\n// ipfs_get - Retrieve content by CID\nipfs_get({ cid: string }): Promise\u003c{ content: string }\u003e // base64 encoded\n\n// ipfs_pins - List pins for address (or caller)\nipfs_pins({ address?: string }): Promise\u003c{ pins: IPFSPin[] }\u003e\n\n// ipfs_status - Node IPFS health\nipfs_status(): Promise\u003c{\n healthy: boolean;\n peerId: string;\n peers: number;\n repoSize: number;\n}\u003e\n```\n\n### Integration\n- Add to existing demosCall handler structure\n- Use IPFSManager for actual IPFS operations\n- Read pin metadata from account state","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-24T14:42:36.765236+01:00","updated_at":"2025-12-24T17:25:08.575406+01:00","closed_at":"2025-12-24T17:25:08.575406+01:00","close_reason":"Completed - ipfsPins handler using GCRIPFSRoutines for account-based pin queries","dependencies":[{"issue_id":"node-wzh","depends_on_id":"node-qz1","type":"parent-child","created_at":"2025-12-24T14:43:19.233006+01:00","created_by":"daemon"},{"issue_id":"node-wzh","depends_on_id":"node-sl9","type":"blocks","created_at":"2025-12-24T14:44:47.356856+01:00","created_by":"daemon"}]} {"id":"node-xhh","title":"Phase 4: Transaction Types - IPFS Writes (SDK + Node)","description":"Implement on-chain transaction types for IPFS write operations.\n\n⚠️ **SDK DEPENDENCY**: Transaction types must be defined in ../sdks FIRST.\nAfter SDK changes, user must manually publish new SDK version and update node package.json.\n\n## Tasks (SDK - ../sdks)\n1. Define IPFS transaction type constants in SDK\n2. Create transaction payload interfaces\n3. Add transaction builder functions\n4. Publish new SDK version (USER ACTION)\n5. Update SDK in node package.json (USER ACTION)\n\n## Tasks (Node)\n6. Implement IPFS_ADD transaction handler\n7. Implement IPFS_PIN transaction handler\n8. Implement IPFS_UNPIN transaction handler\n9. Add transaction validation logic\n10. Update account state on successful transactions\n11. Emit events for indexing","design":"### Transaction Types\n```typescript\nenum IPFSTransactionType {\n IPFS_ADD = 'IPFS_ADD', // Upload + auto-pin\n IPFS_PIN = 'IPFS_PIN', // Pin existing CID\n IPFS_UNPIN = 'IPFS_UNPIN', // Remove pin\n}\n```\n\n### Transaction Payloads\n```typescript\ninterface IPFSAddPayload {\n content: string; // base64 encoded\n filename?: string;\n metadata?: Record\u003cstring, unknown\u003e;\n}\n\ninterface IPFSPinPayload {\n cid: string;\n duration?: number; // blocks or time\n}\n\ninterface IPFSUnpinPayload {\n cid: string;\n}\n```\n\n### Handler Flow\n1. Validate transaction\n2. Calculate cost (tokenomics)\n3. Deduct from sender balance\n4. Execute IPFS operation\n5. Update account state\n6. Emit event","notes":"BLOCKING: User must publish SDK and update node before node-side implementation can begin.","status":"closed","priority":1,"issue_type":"task","created_at":"2025-12-24T14:42:37.58695+01:00","updated_at":"2025-12-24T18:42:27.256065+01:00","closed_at":"2025-12-24T18:42:27.256065+01:00","close_reason":"Implemented IPFS transaction handlers (ipfsOperations.ts) with ipfs_add, ipfs_pin, ipfs_unpin operations. Integrated into executeOperations.ts switch dispatch. SDK types from v2.6.0 are used.","dependencies":[{"issue_id":"node-xhh","depends_on_id":"node-qz1","type":"parent-child","created_at":"2025-12-24T14:43:19.73201+01:00","created_by":"daemon"},{"issue_id":"node-xhh","depends_on_id":"node-wzh","type":"blocks","created_at":"2025-12-24T14:44:47.911725+01:00","created_by":"daemon"}]} +{"id":"node-xqa","title":"Clean up redundant extractDomain call","description":"handleNativeOperations.ts:57-65 - Domain extracted for validation but result only used for logging. Simplify.","status":"open","priority":2,"issue_type":"chore","created_at":"2026-01-05T15:26:25.481409591+01:00","created_by":"tcsenpai","updated_at":"2026-01-05T15:26:25.481409591+01:00","dependencies":[{"issue_id":"node-xqa","depends_on_id":"node-jhq","type":"blocks","created_at":"2026-01-05T15:26:25.482344421+01:00","created_by":"tcsenpai"}]} +{"id":"node-y3o","title":"TLSNotary WebSocket Proxy Manager","description":"Dynamic wstcp proxy spawning system for domain-specific TLS attestation requests. Manages port pool (55000-57000), spawns wstcp processes on-demand per target domain, auto-kills idle proxies after 30s, and exposes via nodeCall requestTLSNproxy action.","status":"closed","priority":1,"issue_type":"epic","created_at":"2026-01-03T16:47:04.791583636+01:00","updated_at":"2026-01-03T16:51:13.676611832+01:00","closed_at":"2026-01-03T16:51:13.676611832+01:00","close_reason":"All subtasks completed: portAllocator.ts, proxyManager.ts, sharedState.ts update, nodeCall handler, SDK documentation"} {"id":"node-zmh","title":"Phase 4: IPFS Cluster Sync - Private Network","description":"Configure private IPFS network for Demos nodes with cluster pinning.\n\n## Tasks\n1. Generate and manage swarm key\n2. Configure bootstrap nodes\n3. Implement peer discovery using Demos node list\n4. Add cluster-wide pinning (pin on multiple nodes)\n5. Monitor peer connections","design":"### Swarm Key Management\n- Generate key: 64-byte hex string\n- Store in config or environment\n- Distribute to all Demos nodes\n\n### Bootstrap Configuration\n- Remove public bootstrap nodes\n- Add Demos bootstrap nodes dynamically\n- Use Demos node discovery for peer list\n\n### Cluster Pinning\n```typescript\nasync clusterPin(cid: string, replication?: number): Promise\u003cvoid\u003e\nasync getClusterPeers(): Promise\u003cPeerInfo[]\u003e\nasync connectPeer(multiaddr: string): Promise\u003cvoid\u003e\n```\n\n### Environment Variables\n- DEMOS_IPFS_SWARM_KEY\n- DEMOS_IPFS_BOOTSTRAP_NODES\n- LIBP2P_FORCE_PNET=1","acceptance_criteria":"- [ ] Swarm key generated and distributed\n- [ ] Nodes only connect to other Demos nodes\n- [ ] Peer discovery works via Demos network\n- [ ] Content replicates across cluster\n- [ ] Public IPFS nodes cannot connect","status":"closed","priority":2,"issue_type":"task","created_at":"2025-12-24T14:35:59.315614+01:00","updated_at":"2025-12-25T10:51:27.33254+01:00","closed_at":"2025-12-25T10:51:27.33254+01:00","close_reason":"Closed via update","dependencies":[{"issue_id":"node-zmh","depends_on_id":"node-qz1","type":"parent-child","created_at":"2025-12-24T14:36:11.824926+01:00","created_by":"daemon"},{"issue_id":"node-zmh","depends_on_id":"node-6p0","type":"blocks","created_at":"2025-12-24T14:36:22.014249+01:00","created_by":"daemon"}]} diff --git a/.env.example b/.env.example index 8a3101e3..4c742a29 100644 --- a/.env.example +++ b/.env.example @@ -27,3 +27,15 @@ OMNI_MAX_CONNECTIONS_PER_IP=10 OMNI_MAX_REQUESTS_PER_SECOND_PER_IP=100 OMNI_MAX_REQUESTS_PER_SECOND_PER_IDENTITY=200 +# TLSNotary HTTPS Attestation (optional - disabled by default) +# Enables MPC-TLS attestation for verifiable HTTPS proofs +TLSNOTARY_ENABLED=false +TLSNOTARY_PORT=7047 +# TLSNOTARY_SIGNING_KEY: 32-byte hex secp256k1 private key (required if enabled) +# Generate with: openssl rand -hex 32 +TLSNOTARY_SIGNING_KEY= +# WebSocket proxy port for browser TCP tunneling +TLSNOTARY_PROXY_PORT=55688 +# Optional: Adjust data limits (bytes) +TLSNOTARY_MAX_SENT_DATA=16384 +TLSNOTARY_MAX_RECV_DATA=65536 diff --git a/.gitignore b/.gitignore index 5c36ba4a..d2315cf3 100644 --- a/.gitignore +++ b/.gitignore @@ -205,3 +205,6 @@ devnet/identities/ devnet/.env devnet/postgres-data/ ipfs_53550/data_53550/ipfs +.tlsnotary-key +src/features/tlsnotary/SDK_INTEGRATION.md +src/features/tlsnotary/SDK_INTEGRATION.md diff --git a/.serena/memories/tlsnotary_integration_context.md b/.serena/memories/tlsnotary_integration_context.md new file mode 100644 index 00000000..25eefa30 --- /dev/null +++ b/.serena/memories/tlsnotary_integration_context.md @@ -0,0 +1,79 @@ +# TLSNotary Backend Integration Context + +## Beads Tracking + +- **Epic**: `node-6lo` - TLSNotary Backend Integration +- **Tasks** (in dependency order): + 1. `node-3yq` - Copy pre-built .so library (READY) + 2. `node-ebc` - Create FFI bindings + 3. `node-r72` - Create TLSNotaryService + 4. `node-9kw` - Create Fastify routes + 5. `node-mwm` - Create feature entry point + 6. `node-2fw` - Integrate with node startup + 7. `node-hgf` - Add SDK discovery endpoint + 8. `node-8sq` - Type check and lint + +## Reference Code Locations + +### Pre-built Binary +``` +/home/tcsenpai/tlsn/demos_tlsnotary/node/rust/target/release/libtlsn_notary.so +``` +Target: `libs/tlsn/libtlsn_notary.so` + +### FFI Reference Implementation +``` +/home/tcsenpai/tlsn/demos_tlsnotary/node/ts/TLSNotary.ts +``` +Complete working bun:ffi bindings to adapt for `src/features/tlsnotary/ffi.ts` + +### Demo App Reference +``` +/home/tcsenpai/tlsn/demos_tlsnotary/demo/src/app.tsx +``` +Browser-side attestation flow with tlsn-js WASM + +### Integration Documentation +``` +/home/tcsenpai/tlsn/demos_tlsnotary/BACKEND_INTEGRATION.md +/home/tcsenpai/tlsn/demos_tlsnotary/INTEGRATION.md +``` + +## FFI Symbols (from reference TLSNotary.ts) + +```typescript +const symbols = { + tlsn_init: { args: [], returns: FFIType.i32 }, + tlsn_notary_create: { args: [FFIType.ptr], returns: FFIType.ptr }, + tlsn_notary_start_server: { args: [FFIType.ptr, FFIType.u16], returns: FFIType.i32 }, + tlsn_notary_stop_server: { args: [FFIType.ptr], returns: FFIType.i32 }, + tlsn_verify_attestation: { args: [FFIType.ptr, FFIType.u64], returns: FFIType.ptr }, + tlsn_notary_get_public_key: { args: [FFIType.ptr, FFIType.ptr, FFIType.u64], returns: FFIType.i32 }, + tlsn_notary_destroy: { args: [FFIType.ptr], returns: FFIType.void }, + tlsn_free_verification_result: { args: [FFIType.ptr], returns: FFIType.void }, + tlsn_free_string: { args: [FFIType.ptr], returns: FFIType.void }, +}; +``` + +## FFI Struct Layouts + +### NotaryConfig (40 bytes) +- signing_key ptr (8 bytes) +- signing_key_len (8 bytes) +- max_sent_data (8 bytes) +- max_recv_data (8 bytes) +- server_port (2 bytes + padding) + +### VerificationResultFFI (40 bytes) +- status (4 bytes + 4 padding) +- server_name ptr (8 bytes) +- connection_time (8 bytes) +- sent_len (4 bytes) +- recv_len (4 bytes) +- error_message ptr (8 bytes) + +## SDK Integration (Already Complete) + +Package `@kynesyslabs/demosdk` v2.7.2 has `tlsnotary/` module with: +- TLSNotary class: initialize(), attest(), verify(), getTranscript() +- Located in `/home/tcsenpai/kynesys/sdks/src/tlsnotary/` diff --git a/devnet/scripts/generate-identity-helper.ts b/devnet/scripts/generate-identity-helper.ts index a4e5c1a7..d0f1dd7c 100644 --- a/devnet/scripts/generate-identity-helper.ts +++ b/devnet/scripts/generate-identity-helper.ts @@ -35,5 +35,5 @@ const identity = await ucrypto.getIdentity("ed25519") // uint8ArrayToHex already includes 0x prefix const pubkeyHex = uint8ArrayToHex(identity.publicKey) -console.log('MNEMONIC:' + mnemonic) -console.log('PUBKEY:' + pubkeyHex) +console.log("MNEMONIC:" + mnemonic) +console.log("PUBKEY:" + pubkeyHex) diff --git a/install-deps.sh b/install-deps.sh new file mode 100755 index 00000000..794e98a6 --- /dev/null +++ b/install-deps.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash +set -e +set -u +set -o pipefail + +# Verify prerequisites +command -v bun >/dev/null 2>&1 || { echo "Error: bun is not installed" >&2; exit 1; } +command -v cargo >/dev/null 2>&1 || { echo "Error: cargo is not installed" >&2; exit 1; } + +bun install +bun pm trust --all || true + +# Install wstcp only if not already present +if ! command -v wstcp >/dev/null 2>&1; then + echo "Installing wstcp..." + cargo install wstcp +else + echo "wstcp already installed, skipping" +fi + +echo "All dependencies have been installed" + diff --git a/libs/tlsn/libtlsn_notary.so b/libs/tlsn/libtlsn_notary.so new file mode 100755 index 00000000..aab93f56 Binary files /dev/null and b/libs/tlsn/libtlsn_notary.so differ diff --git a/package.json b/package.json index 8e9ca165..f042a88d 100644 --- a/package.json +++ b/package.json @@ -59,14 +59,13 @@ "@fastify/cors": "^9.0.1", "@fastify/swagger": "^8.15.0", "@fastify/swagger-ui": "^4.1.0", - "@kynesyslabs/demosdk": "^2.5.13", + "@kynesyslabs/demosdk": "^2.7.10", "@metaplex-foundation/js": "^0.20.1", "@modelcontextprotocol/sdk": "^1.13.3", "@noble/ed25519": "^3.0.0", "@noble/hashes": "^2.0.1", "@octokit/core": "^6.1.5", "@scure/bip39": "^2.0.1", - "@octokit/core": "^6.1.5", "@solana/web3.js": "^1.98.4", "@types/express": "^4.17.21", "@types/http-proxy": "^1.17.14", @@ -91,7 +90,7 @@ "lodash": "^4.17.21", "node-disk-info": "^1.3.0", "node-fetch": "2", - "node-forge": "^1.3.1", + "node-forge": "^1.3.3", "node-seal": "^5.1.3", "npm-check-updates": "^16.14.18", "ntp-client": "^0.5.3", diff --git a/run b/run index 5d11c807..b4a70540 100755 --- a/run +++ b/run @@ -351,6 +351,12 @@ function ctrl_c() { docker compose down cd .. fi + # Stop TLSNotary container if running (enabled by default) + if [ "$TLSNOTARY_DISABLED" != "true" ] && [ -d "tlsnotary" ]; then + (cd tlsnotary && docker compose down --timeout 5 2>/dev/null) || true + # Force kill if still running + docker rm -f "tlsn-notary-${TLSNOTARY_PORT:-7047}" 2>/dev/null || true + fi } # Function to check if we are on the first run with the .RUN file @@ -746,6 +752,51 @@ if [ "$EXTERNAL_DB" = false ]; then fi fi +# TLSNotary Docker container management (enabled by default) +# Set TLSNOTARY_DISABLED=true to disable +if [ "$TLSNOTARY_DISABLED" != "true" ]; then + TLSNOTARY_PORT="${TLSNOTARY_PORT:-7047}" + echo "🔐 Starting TLSNotary notary container..." + + if [ -d "tlsnotary" ]; then + cd tlsnotary + + # Stop any existing container + docker compose down > /dev/null 2>&1 || true + + # Start the TLSNotary container + log_verbose "Starting TLSNotary container on port $TLSNOTARY_PORT" + if ! TLSNOTARY_PORT=$TLSNOTARY_PORT docker compose up -d; then + echo "⚠️ Warning: Failed to start TLSNotary container" + echo "💡 TLSNotary attestation features will not be available" + else + echo "✅ TLSNotary container started on port $TLSNOTARY_PORT" + + # Wait for TLSNotary to be healthy (max 15 seconds) + log_verbose "Waiting for TLSNotary to be healthy..." + TLSN_TIMEOUT=15 + TLSN_COUNT=0 + while ! curl -sf "http://localhost:$TLSNOTARY_PORT/info" > /dev/null 2>&1; do + TLSN_COUNT=$((TLSN_COUNT+1)) + if [ $TLSN_COUNT -gt $TLSN_TIMEOUT ]; then + echo "⚠️ Warning: TLSNotary health check timeout" + break + fi + sleep 1 + done + + if [ $TLSN_COUNT -le $TLSN_TIMEOUT ]; then + echo "✅ TLSNotary is ready" + fi + fi + cd .. + else + echo "⚠️ Warning: tlsnotary folder not found, skipping TLSNotary setup" + fi +else + log_verbose "TLSNotary disabled (TLSNOTARY_DISABLED=true)" +fi + # Ensuring the logs folder exists mkdir -p logs @@ -809,6 +860,26 @@ if [ "$EXTERNAL_DB" = false ]; then cd .. fi +# Stop TLSNotary container if it was started (enabled by default) +if [ "$TLSNOTARY_DISABLED" != "true" ] && [ -d "tlsnotary" ]; then + echo "🛑 Stopping TLSNotary container..." + TLSN_CONTAINER="tlsn-notary-${TLSNOTARY_PORT:-7047}" + + # Try graceful shutdown first with short timeout + cd tlsnotary + docker compose down --timeout 5 2>/dev/null || true + cd .. + + # Force kill if still running + if docker ps -q -f "name=$TLSN_CONTAINER" 2>/dev/null | grep -q .; then + echo " Force stopping TLSNotary container..." + docker kill "$TLSN_CONTAINER" 2>/dev/null || true + docker rm -f "$TLSN_CONTAINER" 2>/dev/null || true + fi + + echo "✅ TLSNotary stopped" +fi + echo "" echo "🏁 Demos Network node session completed" echo "💡 Thank you for running a Demos Network node!" diff --git a/src/features/tlsnotary/PROXY_MANAGER_PLAN.md b/src/features/tlsnotary/PROXY_MANAGER_PLAN.md new file mode 100644 index 00000000..2fbfb801 --- /dev/null +++ b/src/features/tlsnotary/PROXY_MANAGER_PLAN.md @@ -0,0 +1,301 @@ +# TLSNotary WebSocket Proxy Manager - Implementation Plan + +## Overview + +Dynamic wstcp proxy spawning system for domain-specific TLS attestation requests. + +## Architecture + +``` +┌─────────────────────────────────────────────────────────────────────────┐ +│ SDK Request │ +│ nodeCall({ action: "requestTLSNproxy", ... }) │ +└─────────────────────────┬───────────────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────────────────┐ +│ TLSNotary Proxy Manager │ +│ ┌─────────────────┐ ┌──────────────────┐ ┌───────────────────────┐ │ +│ │ Port Allocator │ │ Proxy Registry │ │ Lifecycle Manager │ │ +│ │ 55000-57000 │ │ (sharedState) │ │ (stdout monitor + │ │ +│ │ sequential + │ │ │ │ lazy cleanup) │ │ +│ │ recycle │ │ │ │ │ │ +│ └────────┬────────┘ └────────┬─────────┘ └───────────┬───────────┘ │ +│ │ │ │ │ +│ └────────────────────┼────────────────────────┘ │ +│ │ │ +└────────────────────────────────┼────────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────────────────┐ +│ wstcp Processes │ +│ ┌──────────────────┐ ┌──────────────────┐ ┌──────────────────┐ │ +│ │ :55000 → api.com │ │ :55001 → x.io │ │ :55002 → ... │ │ +│ │ (idle: 12s) │ │ (idle: 5s) │ │ (idle: 28s) │ │ +│ └──────────────────┘ └──────────────────┘ └──────────────────┘ │ +└─────────────────────────────────────────────────────────────────────────┘ +``` + +## Decisions Summary + +| Aspect | Decision | +|--------|----------| +| Proxy Granularity | One per domain (shared) | +| Port Allocation | Sequential 55000→57000, then recycle freed | +| Public URL | Auto-detect → `EXPOSED_URL` → IP fallback | +| Concurrency | Separate proxies per request | +| Failure Handling | Retry 3x with different ports, then diagnostic error | +| Usage Detection | Any wstcp stdout activity resets 30s idle timer | +| Cleanup | Lazy - on next request, clean stale proxies | +| wstcp Binary | Expect in PATH, `cargo install wstcp` if missing | +| Endpoint | nodeCall action: `requestTLSNproxy` | +| Response | Extended with proxyId, expiresIn, targetDomain | +| State | `sharedState.tlsnotary = { proxies, portPool }` | +| Persistence | None - ephemeral, dies with node | +| Port inference | :443 from https, unless URL contains explicit port | + +## Data Structures + +### sharedState.tlsnotary + +```typescript +interface TLSNotaryState { + proxies: Map // keyed by domain + portPool: { + next: number // next port to try (55000-57000) + max: number // 57000 + recycled: number[] // freed ports available for reuse + } +} + +interface ProxyInfo { + proxyId: string // uuid + domain: string // "api.example.com" + port: number // 55123 + process: ChildProcess // wstcp process handle + lastActivity: number // Date.now() timestamp + spawnedAt: number // Date.now() timestamp + websocketProxyUrl: string // "ws://node.demos.sh:55123" +} +``` + +## API Contract + +### Request (nodeCall) + +```typescript +{ + action: "requestTLSNproxy", + targetUrl: "https://api.example.com/endpoint", + authentication?: { // optional, future use + pubKey: string, + signature: string + } +} +``` + +### Success Response + +```typescript +{ + websocketProxyUrl: "ws://node.demos.sh:55123", + targetDomain: "api.example.com", + expiresIn: 30000, // ms until auto-cleanup (resets on activity) + proxyId: "uuid-here" +} +``` + +### Error Response + +```typescript +{ + error: "PROXY_SPAWN_FAILED", + message: "Failed to spawn proxy after 3 attempts", + targetDomain: "api.example.com", + lastError: "Port 55003 already in use" +} +``` + +## Lifecycle Flow + +``` +1. SDK calls requestTLSNproxy({ targetUrl: "https://api.example.com/..." }) + │ +2. Extract domain + port: "api.example.com:443" (443 inferred from https) + │ - If URL has explicit port like https://api.example.com:8443, use that + │ +3. Lazy cleanup: scan proxies, kill any with lastActivity > 30s ago + │ +4. Check if proxy exists for domain + │ + ├─► EXISTS & ALIVE → update lastActivity, return existing proxy info + │ + └─► NOT EXISTS + │ + 4a. Allocate port (recycled.pop() || next++) + │ + 4b. Spawn: wstcp --bind-addr 0.0.0.0:{port} {domain}:{targetPort} + │ + ├─► FAIL → retry up to 3x with new port + │ + └─► SUCCESS + │ + 4c. Attach stdout listener (any output → reset lastActivity) + │ + 4d. Register in sharedState.tlsnotary.proxies + │ + 4e. Return ProxyInfo +``` + +## Files to Create/Modify + +### New Files + +1. **src/features/tlsnotary/proxyManager.ts** - Main proxy lifecycle management + - `ensureWstcp()` - Check/install wstcp binary + - `extractDomainAndPort(url)` - Parse target URL + - `getPublicUrl(port)` - Build websocketProxyUrl + - `spawnProxy(domain, targetPort)` - Spawn wstcp process + - `cleanupStaleProxies()` - Lazy cleanup + - `requestProxy(targetUrl)` - Main entry point + - `killProxy(proxyId)` - Manual cleanup if needed + +2. **src/features/tlsnotary/portAllocator.ts** - Port pool management + - `initPortPool()` - Initialize pool state + - `allocatePort()` - Get next available port + - `releasePort(port)` - Return port to recycled pool + - `isPortAvailable(port)` - Check if port is free + +3. **src/features/tlsnotary/SDK_INTEGRATION.md** - SDK integration docs + +### Files to Modify + +1. **src/utilities/sharedState.ts** + - Add `tlsnotary` property with type `TLSNotaryState` + - Initialize in constructor + +2. **src/libs/network/server_rpc.ts** (or wherever nodeCall handlers are) + - Add handler for `action: "requestTLSNproxy"` + - Import and call `requestProxy()` from proxyManager + +3. **src/libs/network/docs_nodeCall.md** + - Document new `requestTLSNproxy` action + +4. **src/libs/network/methodListing.ts** + - Add to availableMethods if needed + +## Implementation Order + +1. [ ] Create `portAllocator.ts` - port pool management +2. [ ] Create `proxyManager.ts` - proxy lifecycle management +3. [ ] Modify `sharedState.ts` - add tlsnotary state +4. [ ] Add nodeCall handler for `requestTLSNproxy` +5. [ ] Test manually with curl/SDK +6. [ ] Create `SDK_INTEGRATION.md` documentation + +## Public URL Resolution Logic + +```typescript +function getPublicUrl(port: number, requestOrigin?: string): string { + // 1. Try auto-detect from request origin (if available in headers) + if (requestOrigin) { + const url = new URL(requestOrigin) + return `ws://${url.hostname}:${port}` + } + + // 2. Fall back to EXPOSED_URL + if (process.env.EXPOSED_URL) { + const url = new URL(process.env.EXPOSED_URL) + return `ws://${url.hostname}:${port}` + } + + // 3. Fall back to sharedState.exposedUrl or connectionString + const sharedState = SharedState.getInstance() + const url = new URL(sharedState.exposedUrl) + return `ws://${url.hostname}:${port}` +} +``` + +## wstcp Binary Check + +```typescript +async function ensureWstcp(): Promise { + const { exec } = await import('child_process') + const { promisify } = await import('util') + const execAsync = promisify(exec) + + try { + await execAsync('which wstcp') + log.debug('[TLSNotary] wstcp binary found') + } catch { + log.info('[TLSNotary] wstcp not found, installing via cargo...') + try { + await execAsync('cargo install wstcp') + log.info('[TLSNotary] wstcp installed successfully') + } catch (installError) { + throw new Error(`Failed to install wstcp: ${installError.message}`) + } + } +} +``` + +## Domain/Port Extraction + +```typescript +function extractDomainAndPort(targetUrl: string): { domain: string; port: number } { + const url = new URL(targetUrl) + const domain = url.hostname + + // If explicit port in URL, use it + if (url.port) { + return { domain, port: parseInt(url.port, 10) } + } + + // Otherwise infer from protocol + const port = url.protocol === 'https:' ? 443 : 80 + return { domain, port } +} +``` + +## Stdout Activity Monitor + +```typescript +function attachActivityMonitor(process: ChildProcess, proxyInfo: ProxyInfo): void { + // Any stdout activity resets the idle timer + process.stdout?.on('data', () => { + proxyInfo.lastActivity = Date.now() + }) + + process.stderr?.on('data', () => { + proxyInfo.lastActivity = Date.now() + }) + + process.on('exit', (code) => { + log.info(`[TLSNotary] Proxy for ${proxyInfo.domain} exited with code ${code}`) + // Cleanup will happen lazily on next request + }) +} +``` + +## Constants + +```typescript +const PROXY_CONFIG = { + PORT_MIN: 55000, + PORT_MAX: 57000, + IDLE_TIMEOUT_MS: 30000, // 30 seconds + MAX_SPAWN_RETRIES: 3, + SPAWN_TIMEOUT_MS: 5000, // 5 seconds to wait for wstcp to start +} +``` + +## Error Codes + +```typescript +enum ProxyError { + PROXY_SPAWN_FAILED = 'PROXY_SPAWN_FAILED', + PORT_EXHAUSTED = 'PORT_EXHAUSTED', + INVALID_URL = 'INVALID_URL', + WSTCP_NOT_AVAILABLE = 'WSTCP_NOT_AVAILABLE', +} +``` diff --git a/src/features/tlsnotary/TLSNotaryService.ts b/src/features/tlsnotary/TLSNotaryService.ts new file mode 100644 index 00000000..80118cdb --- /dev/null +++ b/src/features/tlsnotary/TLSNotaryService.ts @@ -0,0 +1,846 @@ +/** + * TLSNotary Service for Demos Node + * + * High-level service class that wraps TLSNotary functionality with lifecycle management, + * configuration from environment, and integration with the Demos node ecosystem. + * + * Supports two modes: + * - FFI Mode: Uses Rust FFI bindings (requires libtlsn_notary.so) - DEPRECATED + * - Docker Mode: Uses official Docker notary-server image (recommended) + * + * @module features/tlsnotary/TLSNotaryService + */ + +// REVIEW: TLSNotaryService - updated to support Docker mode alongside FFI +import { TLSNotaryFFI, type NotaryConfig, type VerificationResult, type NotaryHealthStatus } from "./ffi" +import { existsSync, readFileSync, writeFileSync } from "fs" +import { join } from "path" +import { randomBytes } from "crypto" +import log from "@/utilities/logger" + +// ============================================================================ +// Types +// ============================================================================ + +/** + * TLSNotary operational mode + */ +export type TLSNotaryMode = "ffi" | "docker"; + +/** + * Service configuration options + */ +export interface TLSNotaryServiceConfig { + /** Port to run the notary WebSocket server on */ + port: number; + /** 32-byte secp256k1 private key (hex string or Uint8Array) - only used in FFI mode */ + signingKey?: string | Uint8Array; + /** Maximum bytes the prover can send (default: 16KB) */ + maxSentData?: number; + /** Maximum bytes the prover can receive (default: 64KB) */ + maxRecvData?: number; + /** Whether to auto-start the server on initialization */ + autoStart?: boolean; + /** Operational mode: 'ffi' (Rust FFI) or 'docker' (Docker container) */ + mode?: TLSNotaryMode; +} + +/** + * Service status information + */ +export interface TLSNotaryServiceStatus { + /** Whether the service is enabled */ + enabled: boolean; + /** Whether the service is running */ + running: boolean; + /** Port the service is listening on */ + port: number; + /** Health status from the underlying notary */ + health: NotaryHealthStatus; + /** Operating mode: docker or ffi */ + mode?: TLSNotaryMode; +} + +// ============================================================================ +// Environment Configuration +// ============================================================================ + +// REVIEW: Key file path for persistent storage of auto-generated keys +const SIGNING_KEY_FILE = ".tlsnotary-key" + +/** + * Resolve the TLSNotary signing key with priority: ENV > file > auto-generate + * + * Priority order: + * 1. TLSNOTARY_SIGNING_KEY environment variable (highest priority) + * 2. .tlsnotary-key file in project root + * 3. Auto-generate and save to .tlsnotary-key file + * + * @returns 64-character hex string (32-byte key) or null on error + */ +function resolveSigningKey(): string | null { + // Priority 1: Environment variable + const envKey = process.env.TLSNOTARY_SIGNING_KEY + if (envKey && envKey.length === 64) { + log.info("[TLSNotary] Using signing key from environment variable") + return envKey + } else if (envKey && envKey.length !== 64) { + log.warning("[TLSNotary] TLSNOTARY_SIGNING_KEY must be 64 hex characters (32 bytes)") + return null + } + + // Priority 2: Key file + const keyFilePath = join(process.cwd(), SIGNING_KEY_FILE) + if (existsSync(keyFilePath)) { + try { + const fileKey = readFileSync(keyFilePath, "utf-8").trim() + if (fileKey.length === 64) { + log.info(`[TLSNotary] Using signing key from ${SIGNING_KEY_FILE}`) + return fileKey + } else { + log.warning(`[TLSNotary] Invalid key in ${SIGNING_KEY_FILE} (must be 64 hex characters)`) + return null + } + } catch (error) { + log.warning(`[TLSNotary] Failed to read ${SIGNING_KEY_FILE}: ${error}`) + return null + } + } + + // Priority 3: Auto-generate and save + try { + const generatedKey = randomBytes(32).toString("hex") + writeFileSync(keyFilePath, generatedKey, { mode: 0o600 }) // Restrictive permissions + log.info(`[TLSNotary] Auto-generated signing key saved to ${SIGNING_KEY_FILE}`) + return generatedKey + } catch (error) { + log.error(`[TLSNotary] Failed to auto-generate signing key: ${error}`) + return null + } +} + +/** + * Check if TLSNotary errors should be fatal (for debugging) + * When TLSNOTARY_FATAL=true, errors will cause process exit + */ +export function isTLSNotaryFatal(): boolean { + return process.env.TLSNOTARY_FATAL?.toLowerCase() === "true" +} + +/** + * Check if TLSNotary debug mode is enabled + * When TLSNOTARY_DEBUG=true, additional logging is enabled + */ +export function isTLSNotaryDebug(): boolean { + return process.env.TLSNOTARY_DEBUG?.toLowerCase() === "true" +} + +/** + * Check if TLSNotary proxy mode is enabled + * When TLSNOTARY_PROXY=true, a TCP proxy intercepts and logs all incoming data + * before forwarding to the Rust server. Useful for debugging what data is arriving. + */ +export function isTLSNotaryProxy(): boolean { + return process.env.TLSNOTARY_PROXY?.toLowerCase() === "true" +} + +/** + * Get TLSNotary configuration from environment variables + * + * Environment variables: + * - TLSNOTARY_DISABLED: Disable the service (default: false, i.e. enabled by default) + * - TLSNOTARY_MODE: Operational mode - 'docker' (default) or 'ffi' + * - TLSNOTARY_PORT: Port for the notary server (default: 7047) + * - TLSNOTARY_SIGNING_KEY: 32-byte hex-encoded secp256k1 private key (only for FFI mode) + * - TLSNOTARY_MAX_SENT_DATA: Maximum sent data bytes (default: 16384) + * - TLSNOTARY_MAX_RECV_DATA: Maximum received data bytes (default: 65536) + * - TLSNOTARY_AUTO_START: Auto-start on initialization (default: true) + * - TLSNOTARY_FATAL: Make TLSNotary errors fatal for debugging (default: false) + * - TLSNOTARY_DEBUG: Enable verbose debug logging (default: false) + * - TLSNOTARY_PROXY: Enable TCP proxy to log incoming data before forwarding (default: false) + * + * Signing Key Resolution Priority (FFI mode only): + * 1. TLSNOTARY_SIGNING_KEY environment variable + * 2. .tlsnotary-key file in project root + * 3. Auto-generate and save to .tlsnotary-key + * + * @returns Configuration object or null if service is disabled + */ +export function getConfigFromEnv(): TLSNotaryServiceConfig | null { + const disabled = process.env.TLSNOTARY_DISABLED?.toLowerCase() === "true" + + if (disabled) { + return null + } + + // Determine mode: default to 'docker' as it's more compatible with tlsn-js + const mode = (process.env.TLSNOTARY_MODE?.toLowerCase() === "ffi" ? "ffi" : "docker") as TLSNotaryMode + + // Only require signing key for FFI mode + let signingKey: string | undefined + if (mode === "ffi") { + signingKey = resolveSigningKey() ?? undefined + if (!signingKey) { + log.warning("[TLSNotary] Failed to resolve signing key for FFI mode") + return null + } + } + + return { + port: parseInt(process.env.TLSNOTARY_PORT ?? "7047", 10), + signingKey, + maxSentData: parseInt(process.env.TLSNOTARY_MAX_SENT_DATA ?? "16384", 10), + maxRecvData: parseInt(process.env.TLSNOTARY_MAX_RECV_DATA ?? "65536", 10), + autoStart: process.env.TLSNOTARY_AUTO_START?.toLowerCase() !== "false", + mode, + } +} + +// ============================================================================ +// TLSNotaryService Class +// ============================================================================ + +/** + * TLSNotary Service + * + * Manages the TLSNotary instance lifecycle, provides health checks, + * and exposes verification functionality. + * + * @example + * ```typescript + * import { TLSNotaryService } from '@/features/tlsnotary/TLSNotaryService'; + * + * // Initialize from environment + * const service = TLSNotaryService.fromEnvironment(); + * if (service) { + * await service.start(); + * console.log('TLSNotary running on port', service.getPort()); + * console.log('Public key:', service.getPublicKeyHex()); + * } + * + * // Or with explicit config + * const service = new TLSNotaryService({ + * port: 7047, + * signingKey: '0x...', // 64 hex chars + * }); + * await service.start(); + * ``` + */ +export class TLSNotaryService { + private ffi: TLSNotaryFFI | null = null + private readonly config: TLSNotaryServiceConfig + private running = false + private dockerPublicKey: string | null = null // Cached public key from Docker notary + private proxyServer: import("net").Server | null = null + + /** + * Create a new TLSNotaryService instance + * @param config - Service configuration + */ + constructor(config: TLSNotaryServiceConfig) { + this.config = { + ...config, + mode: config.mode ?? "docker", // Default to docker mode + } + } + + /** + * Get the operational mode + */ + getMode(): TLSNotaryMode { + return this.config.mode ?? "docker" + } + + /** + * Create a TLSNotaryService from environment variables + * @returns Service instance or null if not enabled/configured + */ + static fromEnvironment(): TLSNotaryService | null { + const config = getConfigFromEnv() + if (!config) { + return null + } + return new TLSNotaryService(config) + } + + /** + * Initialize and optionally start the notary service + * @throws Error if initialization fails + */ + async initialize(): Promise { + const debug = isTLSNotaryDebug() + const fatal = isTLSNotaryFatal() + const mode = this.getMode() + + if (debug) { + log.info("[TLSNotary] Debug mode enabled - verbose logging active") + } + if (fatal) { + log.warning("[TLSNotary] Fatal mode enabled - errors will cause process exit") + } + + log.info(`[TLSNotary] Initializing in ${mode.toUpperCase()} mode`) + + if (mode === "docker") { + // Docker mode: just verify the container is accessible + await this.initializeDockerMode() + } else { + // FFI mode: initialize Rust FFI + await this.initializeFFIMode() + } + + // Auto-start if configured + if (this.config.autoStart) { + await this.start() + } + } + + /** + * Initialize Docker mode - verify container is running + * @private + */ + private async initializeDockerMode(): Promise { + const debug = isTLSNotaryDebug() + + if (debug) { + log.info(`[TLSNotary] Docker mode: expecting container on port ${this.config.port}`) + } + + // In Docker mode, we don't start the container here - that's handled by the run script + // We just mark as initialized and will check connectivity in start() + log.info("[TLSNotary] Docker mode initialized (container managed externally)") + + if (debug) { + log.info(`[TLSNotary] Config: port=${this.config.port}`) + log.info("[TLSNotary] Container should be started via: cd tlsnotary && docker compose up -d") + } + } + + /** + * Initialize FFI mode - load Rust library + * @private + */ + private async initializeFFIMode(): Promise { + if (this.ffi) { + log.warning("[TLSNotary] FFI already initialized") + return + } + + const debug = isTLSNotaryDebug() + const fatal = isTLSNotaryFatal() + + // Convert signing key to Uint8Array if it's a hex string + let signingKeyBytes: Uint8Array + if (typeof this.config.signingKey === "string") { + signingKeyBytes = Buffer.from(this.config.signingKey, "hex") + } else if (this.config.signingKey) { + signingKeyBytes = this.config.signingKey + } else { + const error = new Error("Signing key required for FFI mode") + if (fatal) { + log.error("[TLSNotary] FATAL: " + error.message) + process.exit(1) + } + throw error + } + + if (signingKeyBytes.length !== 32) { + const error = new Error("Signing key must be exactly 32 bytes") + if (fatal) { + log.error("[TLSNotary] FATAL: " + error.message) + process.exit(1) + } + throw error + } + + const ffiConfig: NotaryConfig = { + signingKey: signingKeyBytes, + maxSentData: this.config.maxSentData, + maxRecvData: this.config.maxRecvData, + } + + try { + this.ffi = new TLSNotaryFFI(ffiConfig) + log.info("[TLSNotary] FFI service initialized") + + if (debug) { + log.info(`[TLSNotary] Config: port=${this.config.port}, maxSentData=${this.config.maxSentData}, maxRecvData=${this.config.maxRecvData}`) + } + } catch (error) { + log.error("[TLSNotary] Failed to initialize FFI: " + error) + if (fatal) { + log.error("[TLSNotary] FATAL: Exiting due to initialization failure") + process.exit(1) + } + throw error + } + } + + /** + * Start the notary WebSocket server + * @throws Error if not initialized or server fails to start + */ + async start(): Promise { + const mode = this.getMode() + + if (this.running) { + log.warning("[TLSNotary] Server already running") + return + } + + if (mode === "docker") { + await this.startDockerMode() + } else { + await this.startFFIMode() + } + } + + /** + * Start in Docker mode - verify container is running and accessible + * @private + */ + private async startDockerMode(): Promise { + const debug = isTLSNotaryDebug() + const fatal = isTLSNotaryFatal() + + log.info(`[TLSNotary] Docker mode: checking container on port ${this.config.port}...`) + + try { + // Try to fetch /info endpoint to verify container is running + const infoUrl = `http://localhost:${this.config.port}/info` + const response = await fetch(infoUrl, { signal: AbortSignal.timeout(5000) }) + + if (!response.ok) { + throw new Error(`Notary server returned ${response.status}`) + } + + const info = await response.json() as { publicKey?: string; version?: string } + this.dockerPublicKey = info.publicKey ?? null + + this.running = true + log.info("[TLSNotary] Docker container is running and accessible") + + if (debug) { + log.info(`[TLSNotary] Notary info: ${JSON.stringify(info)}`) + } + + if (this.dockerPublicKey) { + log.info(`[TLSNotary] Notary public key: ${this.dockerPublicKey}`) + } + + } catch (error) { + const message = error instanceof Error ? error.message : String(error) + log.error(`[TLSNotary] Failed to connect to Docker notary on port ${this.config.port}: ${message}`) + log.error("[TLSNotary] Make sure the Docker container is running:") + log.error("[TLSNotary] cd tlsnotary && TLSNOTARY_PORT=${TLSNOTARY_PORT} docker compose up -d") + + if (fatal) { + log.error("[TLSNotary] FATAL: Exiting due to Docker container not available") + process.exit(1) + } + throw new Error(`Docker notary container not accessible: ${message}`) + } + } + + /** + * Start in FFI mode - start the Rust WebSocket server + * @private + */ + private async startFFIMode(): Promise { + const debug = isTLSNotaryDebug() + const fatal = isTLSNotaryFatal() + const proxyEnabled = isTLSNotaryProxy() + + if (!this.ffi) { + const error = new Error("FFI not initialized. Call initialize() first.") + if (fatal) { + log.error("[TLSNotary] FATAL: " + error.message) + process.exit(1) + } + throw error + } + + try { + if (debug) { + log.info(`[TLSNotary] Starting WebSocket server on port ${this.config.port}...`) + log.info("[TLSNotary] NOTE: TLSNotary only accepts WebSocket connections via HTTP GET") + log.info("[TLSNotary] Non-GET requests (POST, PUT, etc.) will fail with WebSocket upgrade error") + } + + // REVIEW: Debug proxy mode - intercepts and logs all incoming data before forwarding + if (proxyEnabled) { + await this.startWithProxy() + } else { + await this.ffi.startServer(this.config.port) + } + + this.running = true + log.info(`[TLSNotary] FFI server started on port ${this.config.port}`) + + if (debug) { + log.info(`[TLSNotary] Public key: ${this.ffi.getPublicKeyHex()}`) + log.info("[TLSNotary] Waiting for prover connections...") + } + + if (proxyEnabled) { + log.warning("[TLSNotary] DEBUG PROXY ENABLED - All incoming data will be logged!") + } + } catch (error) { + log.error(`[TLSNotary] Failed to start FFI server on port ${this.config.port}: ${error}`) + if (fatal) { + log.error("[TLSNotary] FATAL: Exiting due to server start failure") + process.exit(1) + } + throw error + } + } + + /** + * Start with a debug proxy that logs all incoming data + * The proxy listens on the configured port and forwards to Rust on port+1 + * @private + */ + private async startWithProxy(): Promise { + const net = await import("net") + const publicPort = this.config.port + const rustPort = this.config.port + 1 + + // Start Rust server on internal port + await this.ffi!.startServer(rustPort) + log.info(`[TLSNotary] Rust server started on internal port ${rustPort}`) + + // Close any previous proxy server (defensive) + if (this.proxyServer) { + try { + await new Promise((resolve, reject) => { + this.proxyServer!.once("error", reject) + this.proxyServer!.close((err) => (err ? reject(err) : resolve())) + }) + } catch { + // ignore + } + this.proxyServer = null + } + + // Create proxy server on public port + this.proxyServer = net.createServer((clientSocket) => { + const clientAddr = `${clientSocket.remoteAddress}:${clientSocket.remotePort}` + log.info(`[TLSNotary-Proxy] New connection from ${clientAddr}`) + + // Connect to Rust server + const rustSocket = net.connect(rustPort, "127.0.0.1", () => { + log.debug(`[TLSNotary-Proxy] Connected to Rust server for ${clientAddr}`) + }) + + // Log and forward data from client to Rust + clientSocket.on("data", (data) => { + const preview = data.slice(0, 500).toString("utf-8") + const hexPreview = data.slice(0, 100).toString("hex") + log.info(`[TLSNotary-Proxy] <<< FROM ${clientAddr} (${data.length} bytes):`) + log.info(`[TLSNotary-Proxy] Text: ${preview}`) + log.info(`[TLSNotary-Proxy] Hex: ${hexPreview}`) + rustSocket.write(data) + }) + + // Forward data from Rust to client (no logging needed) + rustSocket.on("data", (data) => { + clientSocket.write(data) + }) + + // Handle errors and close + clientSocket.on("error", (err) => { + log.warning(`[TLSNotary-Proxy] Client error ${clientAddr}: ${err.message}`) + rustSocket.destroy() + }) + + rustSocket.on("error", (err) => { + log.warning(`[TLSNotary-Proxy] Rust connection error for ${clientAddr}: ${err.message}`) + clientSocket.destroy() + }) + + clientSocket.on("close", () => { + log.debug(`[TLSNotary-Proxy] Client ${clientAddr} disconnected`) + rustSocket.destroy() + }) + + rustSocket.on("close", () => { + clientSocket.destroy() + }) + }) + + await new Promise((resolve, reject) => { + this.proxyServer!.once("error", reject) + this.proxyServer!.listen(publicPort, () => { + log.info(`[TLSNotary-Proxy] Listening on port ${publicPort}, forwarding to ${rustPort}`) + resolve() + }) + }) + } + + /** + * Stop the notary WebSocket server + * In Docker mode, this is a no-op as the container is managed externally + */ + async stop(): Promise { + if (!this.running) { + return + } + + const mode = this.getMode() + + if (mode === "docker") { + // In Docker mode, we don't control the container lifecycle + // Just mark as not running from our perspective + this.running = false + log.info("[TLSNotary] Docker mode - marked as stopped (container still running)") + return + } + + // FFI mode + if (!this.ffi) { + return + } + + // Close the proxy server if it exists + if (this.proxyServer) { + try { + this.proxyServer.close() + } catch { + // ignore + } + this.proxyServer = null + } + + await this.ffi.stopServer() + this.running = false + log.info("[TLSNotary] Server stopped") + } + + /** + * Shutdown the service completely + * Stops the server and releases all resources + * In Docker mode, only clears local state (container managed externally) + */ + async shutdown(): Promise { + await this.stop() + + const mode = this.getMode() + + if (mode === "docker") { + this.dockerPublicKey = null + log.info("[TLSNotary] Docker mode - service shutdown complete (container still running)") + return + } + + // FFI mode + if (this.ffi) { + this.ffi.destroy() + this.ffi = null + } + + log.info("[TLSNotary] Service shutdown complete") + } + + /** + * Verify an attestation + * @param attestation - Serialized attestation bytes (Uint8Array or base64 string) + * @returns Verification result + * @note In Docker mode, verification is not yet supported (attestations are verified client-side) + */ + verify(attestation: Uint8Array | string): VerificationResult { + const mode = this.getMode() + + if (mode === "docker") { + // Docker notary-server handles verification internally + // Client-side tlsn-js also verifies attestations + // For now, we don't have a way to verify via HTTP API + return { + success: false, + error: "Verification not supported in Docker mode - use client-side verification", + } + } + + // FFI mode + if (!this.ffi) { + return { + success: false, + error: "Service not initialized", + } + } + + let attestationBytes: Uint8Array + if (typeof attestation === "string") { + // Assume base64 encoded + attestationBytes = Buffer.from(attestation, "base64") + } else { + attestationBytes = attestation + } + + return this.ffi.verifyAttestation(attestationBytes) + } + + /** + * Get the notary's public key as bytes + * @returns Compressed secp256k1 public key (33 bytes) + * @throws Error if service not initialized + */ + getPublicKey(): Uint8Array { + const mode = this.getMode() + + if (mode === "docker") { + if (!this.dockerPublicKey) { + throw new Error("Docker public key not available - service not started") + } + // Convert hex string to Uint8Array + return Buffer.from(this.dockerPublicKey, "hex") + } + + // FFI mode + if (!this.ffi) { + throw new Error("Service not initialized") + } + return this.ffi.getPublicKey() + } + + /** + * Get the notary's public key as hex string + * @returns Hex-encoded compressed public key + * @throws Error if service not initialized + */ + getPublicKeyHex(): string { + const mode = this.getMode() + + if (mode === "docker") { + if (!this.dockerPublicKey) { + throw new Error("Docker public key not available - service not started") + } + return this.dockerPublicKey + } + + // FFI mode + if (!this.ffi) { + throw new Error("Service not initialized") + } + return this.ffi.getPublicKeyHex() + } + + /** + * Get the configured port + */ + getPort(): number { + return this.config.port + } + + /** + * Check if the service is running + */ + isRunning(): boolean { + return this.running + } + + /** + * Check if the service is initialized + */ + isInitialized(): boolean { + const mode = this.getMode() + + if (mode === "docker") { + return this.dockerPublicKey !== null + } + + return this.ffi !== null + } + + /** + * Get full service status + * @returns Service status object + */ + getStatus(): TLSNotaryServiceStatus { + const mode = this.getMode() + + let health: NotaryHealthStatus + + if (mode === "docker") { + health = { + healthy: this.running && this.dockerPublicKey !== null, + initialized: this.dockerPublicKey !== null, + serverRunning: this.running, + error: this.running ? undefined : "Docker container not accessible", + } + } else { + health = this.ffi + ? this.ffi.getHealthStatus() + : { + healthy: false, + initialized: false, + serverRunning: false, + error: "Service not initialized", + } + } + + return { + enabled: true, + running: this.running, + port: this.config.port, + health, + mode, // Include mode in status + } + } + + /** + * Health check for the service + * @returns True if service is healthy + */ + isHealthy(): boolean { + const mode = this.getMode() + + if (mode === "docker") { + return this.running && this.dockerPublicKey !== null + } + + // FFI mode + if (!this.ffi) { + return false + } + return this.ffi.getHealthStatus().healthy + } +} + +// Export singleton management +let serviceInstance: TLSNotaryService | null = null + +/** + * Get or create the global TLSNotaryService instance + * Uses environment configuration + * @returns Service instance or null if not enabled + */ +export function getTLSNotaryService(): TLSNotaryService | null { + if (serviceInstance === null) { + serviceInstance = TLSNotaryService.fromEnvironment() + } + return serviceInstance +} + +/** + * Initialize and start the global TLSNotaryService + * @returns Service instance or null if not enabled + */ +export async function initializeTLSNotaryService(): Promise { + const service = getTLSNotaryService() + if (service && !service.isInitialized()) { + await service.initialize() + } + return service +} + +/** + * Shutdown the global TLSNotaryService + */ +export async function shutdownTLSNotaryService(): Promise { + if (serviceInstance) { + await serviceInstance.shutdown() + serviceInstance = null + } +} + +export default TLSNotaryService diff --git a/src/features/tlsnotary/ffi.ts b/src/features/tlsnotary/ffi.ts new file mode 100644 index 00000000..53670f94 --- /dev/null +++ b/src/features/tlsnotary/ffi.ts @@ -0,0 +1,485 @@ +/** + * TLSNotary FFI Bindings for Demos Node + * + * Uses bun:ffi to interface with the Rust TLSNotary library. + * Adapted from reference implementation at demos_tlsnotary/node/ts/TLSNotary.ts + * + * @module features/tlsnotary/ffi + */ + +// REVIEW: TLSNotary FFI bindings - new feature for HTTPS attestation +import { dlopen, FFIType, ptr, toArrayBuffer, CString } from "bun:ffi" +import { join, dirname } from "path" + +// ============================================================================ +// Types +// ============================================================================ + +/** + * Configuration for the TLSNotary instance + */ +export interface NotaryConfig { + /** 32-byte secp256k1 private key for signing attestations */ + signingKey: Uint8Array; + /** Maximum bytes the prover can send (default: 16KB) */ + maxSentData?: number; + /** Maximum bytes the prover can receive (default: 64KB) */ + maxRecvData?: number; +} + +/** + * Result of attestation verification + */ +export interface VerificationResult { + /** Whether verification succeeded */ + success: boolean; + /** Server name from the TLS session */ + serverName?: string; + /** Unix timestamp of the connection */ + connectionTime?: number; + /** Bytes sent by the prover */ + sentLength?: number; + /** Bytes received by the prover */ + recvLength?: number; + /** Error message if verification failed */ + error?: string; +} + +/** + * Health check status for the notary service + */ +export interface NotaryHealthStatus { + /** Whether the notary is operational */ + healthy: boolean; + /** Whether the library is initialized */ + initialized: boolean; + /** Whether the server is running */ + serverRunning: boolean; + /** Compressed public key (33 bytes, hex encoded) */ + publicKey?: string; + /** Error message if unhealthy */ + error?: string; +} + +// ============================================================================ +// FFI Bindings +// ============================================================================ + +/** + * Get the path to the native TLSNotary library + * @returns Path to the shared library + */ +function getLibraryPath(): string { + // Library is stored in libs/tlsn/ at project root + // __dirname equivalent for ESM + const currentDir = dirname(new URL(import.meta.url).pathname) + // Navigate from src/features/tlsnotary to project root + const projectRoot = join(currentDir, "../../..") + const libDir = join(projectRoot, "libs/tlsn") + + switch (process.platform) { + case "darwin": + return join(libDir, "libtlsn_notary.dylib") + case "win32": + return join(libDir, "tlsn_notary.dll") + default: + // Linux and other Unix-like systems + return join(libDir, "libtlsn_notary.so") + } +} + +/** + * FFI symbols exported by the Rust library + */ +const symbols = { + tlsn_init: { + args: [] as const, + returns: FFIType.i32, + }, + tlsn_notary_create: { + args: [FFIType.ptr] as const, // NotaryConfigFFI* + returns: FFIType.ptr, // NotaryHandle* + }, + tlsn_notary_start_server: { + args: [FFIType.ptr, FFIType.u16] as const, + returns: FFIType.i32, + }, + tlsn_notary_stop_server: { + args: [FFIType.ptr] as const, + returns: FFIType.i32, + }, + tlsn_verify_attestation: { + args: [FFIType.ptr, FFIType.u64] as const, + returns: FFIType.ptr, // VerificationResultFFI* + }, + tlsn_notary_get_public_key: { + args: [FFIType.ptr, FFIType.ptr, FFIType.u64] as const, + returns: FFIType.i32, + }, + tlsn_notary_destroy: { + args: [FFIType.ptr] as const, + returns: FFIType.void, + }, + tlsn_free_verification_result: { + args: [FFIType.ptr] as const, + returns: FFIType.void, + }, + tlsn_free_string: { + args: [FFIType.ptr] as const, + returns: FFIType.void, + }, +} as const + +// Type for the loaded library +type TLSNLibrary = ReturnType>; + +// ============================================================================ +// TLSNotaryFFI Class +// ============================================================================ + +/** + * Low-level FFI wrapper for the TLSNotary Rust library + * + * This class handles the raw FFI calls and memory management. + * Use TLSNotaryService for the high-level service interface. + * + * @example + * ```typescript + * import { TLSNotaryFFI } from '@/features/tlsnotary/ffi'; + * + * const ffi = new TLSNotaryFFI({ + * signingKey: new Uint8Array(32), // Your 32-byte secp256k1 private key + * maxSentData: 16384, + * maxRecvData: 65536, + * }); + * + * // Start WebSocket server for browser provers + * await ffi.startServer(7047); + * + * // Verify an attestation + * const result = ffi.verifyAttestation(attestationBytes); + * + * // Cleanup + * ffi.destroy(); + * ``` + */ +export class TLSNotaryFFI { + private lib: TLSNLibrary + private handle: number | null = null + private initialized = false + private serverRunning = false + private readonly config: NotaryConfig + // Strong references to buffers passed to native code to prevent GC + private _signingKey: Uint8Array | null = null + private _configBuffer: Uint8Array | null = null + + /** + * Create a new TLSNotary FFI instance + * @param config - Notary configuration + * @throws Error if signing key is invalid or library fails to load + */ + constructor(config: NotaryConfig) { + // Validate signing key + if (!config.signingKey || config.signingKey.length !== 32) { + throw new Error("signingKey must be exactly 32 bytes") + } + + this.config = config + + // Load the native library + const libPath = getLibraryPath() + try { + this.lib = dlopen(libPath, symbols) + } catch (error) { + throw new Error( + `Failed to load TLSNotary library from ${libPath}: ${error instanceof Error ? error.message : String(error)}`, + ) + } + + // Initialize the library + const initResult = this.lib.symbols.tlsn_init() + if (initResult !== 0) { + throw new Error(`Failed to initialize TLSNotary library: error code ${initResult}`) + } + + // Create notary instance + this.createNotary() + } + + /** + * Create the native notary instance + * @private + */ + private createNotary(): void { + // Build FFI config struct + // NotaryConfigFFI layout (40 bytes): + // signing_key: *const u8 (8 bytes) + // signing_key_len: usize (8 bytes) + // max_sent_data: usize (8 bytes) + // max_recv_data: usize (8 bytes) + // server_port: u16 (2 bytes + 6 padding) + + const configBuffer = new ArrayBuffer(40) + const configView = new DataView(configBuffer) + + // Store strong reference to signing key to prevent GC while native code holds pointer + this._signingKey = this.config.signingKey + const signingKeyPtr = ptr(this._signingKey) + + // Write struct fields (little-endian) + configView.setBigUint64(0, BigInt(signingKeyPtr), true) // signing_key ptr + configView.setBigUint64(8, BigInt(32), true) // signing_key_len + configView.setBigUint64(16, BigInt(this.config.maxSentData ?? 16384), true) // max_sent_data + configView.setBigUint64(24, BigInt(this.config.maxRecvData ?? 65536), true) // max_recv_data + configView.setUint16(32, 0, true) // server_port (0 = don't auto-start) + + // Store strong reference to config buffer to prevent GC + this._configBuffer = new Uint8Array(configBuffer) + const configPtr = ptr(this._configBuffer) + this.handle = this.lib.symbols.tlsn_notary_create(configPtr) as number + + if (this.handle === 0 || this.handle === null) { + throw new Error("Failed to create Notary instance") + } + + this.initialized = true + } + + /** + * Start the WebSocket server for accepting prover connections + * @param port - Port to listen on (default: 7047) + * @throws Error if notary not initialized or server fails to start + */ + async startServer(port = 7047): Promise { + if (!this.initialized || !this.handle) { + throw new Error("Notary not initialized") + } + + if (this.serverRunning) { + throw new Error("Server already running") + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const result = this.lib.symbols.tlsn_notary_start_server(this.handle as any, port) + + if (result !== 0) { + throw new Error(`Failed to start server: error code ${result}`) + } + + this.serverRunning = true + } + + /** + * Stop the WebSocket server + */ + async stopServer(): Promise { + if (!this.initialized || !this.handle) { + return + } + + if (!this.serverRunning) { + return + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this.lib.symbols.tlsn_notary_stop_server(this.handle as any) + this.serverRunning = false + } + + /** + * Verify an attestation/presentation + * @param attestation - Serialized attestation bytes + * @returns Verification result with success status and metadata + */ + verifyAttestation(attestation: Uint8Array): VerificationResult { + if (!this.initialized) { + return { + success: false, + error: "Notary not initialized", + } + } + + // Handle empty attestation before FFI call (bun:ffi can't handle empty buffers) + if (attestation.length === 0) { + return { + success: false, + error: "Invalid attestation data: empty buffer", + } + } + + const attestationPtr = ptr(attestation) + const resultPtr = this.lib.symbols.tlsn_verify_attestation(attestationPtr, BigInt(attestation.length)) + + if (resultPtr === 0 || resultPtr === null) { + return { + success: false, + error: "Verification returned null", + } + } + + try { + // Read VerificationResultFFI struct (40 bytes) + // Layout: + // status: i32 (4 bytes + 4 padding) + // server_name: *mut c_char (8 bytes) + // connection_time: u64 (8 bytes) + // sent_len: u32 (4 bytes) + // recv_len: u32 (4 bytes) + // error_message: *mut c_char (8 bytes) + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const resultBuffer = toArrayBuffer(resultPtr as any, 0, 40) + const view = new DataView(resultBuffer) + + const status = view.getInt32(0, true) + const serverNamePtr = view.getBigUint64(8, true) + const connectionTime = view.getBigUint64(16, true) + const sentLen = view.getUint32(24, true) + const recvLen = view.getUint32(28, true) + const errorMessagePtr = view.getBigUint64(32, true) + + let serverName: string | undefined + if (serverNamePtr !== 0n) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + serverName = new CString(Number(serverNamePtr) as any).toString() + } + + let errorMessage: string | undefined + if (errorMessagePtr !== 0n) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + errorMessage = new CString(Number(errorMessagePtr) as any).toString() + } + + if (status === 0) { + return { + success: true, + serverName, + connectionTime: Number(connectionTime), + sentLength: sentLen, + recvLength: recvLen, + } + } else { + return { + success: false, + error: errorMessage ?? `Verification failed with status ${status}`, + } + } + } finally { + // Free the result struct + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this.lib.symbols.tlsn_free_verification_result(resultPtr as any) + } + } + + /** + * Get the notary's compressed public key (33 bytes) + * Share this with the SDK so clients can verify attestations + * @returns Compressed secp256k1 public key + * @throws Error if notary not initialized or key retrieval fails + */ + getPublicKey(): Uint8Array { + if (!this.initialized || !this.handle) { + throw new Error("Notary not initialized") + } + + const keyBuffer = new Uint8Array(33) + const keyPtr = ptr(keyBuffer) + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const result = this.lib.symbols.tlsn_notary_get_public_key( + this.handle as any, + keyPtr, + BigInt(33), + ) + + if (result < 0) { + throw new Error(`Failed to get public key: error code ${result}`) + } + + return keyBuffer.slice(0, result) + } + + /** + * Get the public key as a hex-encoded string + * @returns Hex-encoded compressed public key + */ + getPublicKeyHex(): string { + const key = this.getPublicKey() + return Buffer.from(key).toString("hex") + } + + /** + * Get health status of the notary + * @returns Health status object + */ + getHealthStatus(): NotaryHealthStatus { + if (!this.initialized) { + return { + healthy: false, + initialized: false, + serverRunning: false, + error: "Notary not initialized", + } + } + + try { + const publicKey = this.getPublicKeyHex() + return { + healthy: true, + initialized: this.initialized, + serverRunning: this.serverRunning, + publicKey, + } + } catch (error) { + return { + healthy: false, + initialized: this.initialized, + serverRunning: this.serverRunning, + error: error instanceof Error ? error.message : String(error), + } + } + } + + /** + * Cleanup and release resources + * Call this when shutting down the notary + */ + destroy(): void { + if (this.handle) { + // Best-effort stop if server is still running + if (this.serverRunning) { + try { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this.lib.symbols.tlsn_notary_stop_server(this.handle as any) + } finally { + this.serverRunning = false + } + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + this.lib.symbols.tlsn_notary_destroy(this.handle as any) + this.handle = null + } + // Clear buffer references after native handle is released + this._signingKey = null + this._configBuffer = null + this.initialized = false + this.serverRunning = false + } + + /** + * Check if the notary is initialized + */ + isInitialized(): boolean { + return this.initialized + } + + /** + * Check if the server is running + */ + isServerRunning(): boolean { + return this.serverRunning + } +} + +export default TLSNotaryFFI diff --git a/src/features/tlsnotary/index.ts b/src/features/tlsnotary/index.ts new file mode 100644 index 00000000..c3aff2e0 --- /dev/null +++ b/src/features/tlsnotary/index.ts @@ -0,0 +1,151 @@ +/** + * TLSNotary Feature Module + * + * Provides HTTPS attestation capabilities using TLSNotary (MPC-TLS). + * Enables verifiable proofs of web content without compromising user privacy. + * + * ## Architecture + * + * ``` + * Browser (tlsn-js WASM) <--WebSocket--> Notary Server (Rust FFI) + * │ │ + * │ attest() │ participates in MPC-TLS + * ▼ ▼ + * Generates Attestation Signs attestation with secp256k1 + * │ + * ▼ + * SDK (demosdk/tlsnotary) <--HTTP--> Node (/tlsnotary/verify) + * │ + * ▼ + * Verifies signature & data + * ``` + * + * ## Environment Variables + * + * - TLSNOTARY_DISABLED: Disable the feature (default: false, i.e. enabled by default) + * - TLSNOTARY_PORT: WebSocket port (default: 7047) + * - TLSNOTARY_SIGNING_KEY: 32-byte hex secp256k1 key (required if enabled) + * - TLSNOTARY_MAX_SENT_DATA: Max sent bytes (default: 16384) + * - TLSNOTARY_MAX_RECV_DATA: Max recv bytes (default: 65536) + * - TLSNOTARY_AUTO_START: Auto-start on init (default: true) + * - TLSNOTARY_FATAL: Make errors fatal for debugging (default: false) + * - TLSNOTARY_DEBUG: Enable verbose debug logging (default: false) + * - TLSNOTARY_PROXY: Enable TCP proxy to log incoming data (default: false) + * + * ## Usage + * + * ```typescript + * import { initializeTLSNotary, shutdownTLSNotary } from '@/features/tlsnotary'; + * + * // Initialize (reads from environment, optionally pass BunServer for routes) + * await initializeTLSNotary(bunServer); + * + * // On shutdown + * await shutdownTLSNotary(); + * ``` + * + * @module features/tlsnotary + */ + +// REVIEW: TLSNotary feature module - entry point for HTTPS attestation feature +import type { BunServer } from "@/libs/network/bunServer" +import { + TLSNotaryService, + getTLSNotaryService, + initializeTLSNotaryService, + shutdownTLSNotaryService, + getConfigFromEnv, +} from "./TLSNotaryService" +import { registerTLSNotaryRoutes } from "./routes" +import log from "@/utilities/logger" + +// Re-export types and classes +export { TLSNotaryService, getTLSNotaryService, getConfigFromEnv, isTLSNotaryFatal, isTLSNotaryDebug, isTLSNotaryProxy } from "./TLSNotaryService" +export { TLSNotaryFFI } from "./ffi" +export type { NotaryConfig, VerificationResult, NotaryHealthStatus } from "./ffi" +export type { TLSNotaryServiceConfig, TLSNotaryServiceStatus } from "./TLSNotaryService" + +/** + * Initialize TLSNotary feature + * + * Reads configuration from environment, initializes the service if enabled, + * and optionally registers HTTP routes with BunServer. + * + * @param server - Optional BunServer instance for route registration + * @returns True if enabled and initialized successfully + */ +export async function initializeTLSNotary(server?: BunServer): Promise { + const config = getConfigFromEnv() + + if (!config) { + log.info("[TLSNotary] Feature disabled (TLSNOTARY_DISABLED=true)") + return false + } + + try { + // Initialize the service + const service = await initializeTLSNotaryService() + + if (!service) { + log.warning("[TLSNotary] Failed to create service instance") + return false + } + + // Register HTTP routes if server is provided + if (server) { + registerTLSNotaryRoutes(server) + } + + const publicKeyHex = service.getPublicKeyHex() + log.info("[TLSNotary] Feature initialized successfully") + log.info(`[TLSNotary] WebSocket server on port: ${service.getPort()}`) + log.info(`[TLSNotary] Public key: ${publicKeyHex}`) + + return true + } catch (error) { + log.error("[TLSNotary] Failed to initialize:", error) + return false + } +} + +/** + * Shutdown TLSNotary feature + * + * Stops the WebSocket server and releases all resources. + */ +export async function shutdownTLSNotary(): Promise { + try { + await shutdownTLSNotaryService() + log.info("[TLSNotary] Feature shutdown complete") + } catch (error) { + log.error("[TLSNotary] Error during shutdown:", error) + } +} + +/** + * Check if TLSNotary is enabled + * @returns True if enabled in environment + */ +export function isTLSNotaryEnabled(): boolean { + return getConfigFromEnv() !== null +} + +/** + * Get TLSNotary service status + * @returns Service status or null if not enabled + */ +export function getTLSNotaryStatus() { + const service = getTLSNotaryService() + if (!service) { + return null + } + return service.getStatus() +} + +export default { + initialize: initializeTLSNotary, + shutdown: shutdownTLSNotary, + isEnabled: isTLSNotaryEnabled, + getStatus: getTLSNotaryStatus, + getService: getTLSNotaryService, +} diff --git a/src/features/tlsnotary/portAllocator.ts b/src/features/tlsnotary/portAllocator.ts new file mode 100644 index 00000000..d23b439c --- /dev/null +++ b/src/features/tlsnotary/portAllocator.ts @@ -0,0 +1,164 @@ +/** + * TLSNotary Port Allocator + * + * Manages a pool of ports (55000-57000) for wstcp proxy instances. + * Uses sequential allocation with recycling of freed ports. + * + * @module features/tlsnotary/portAllocator + */ + +// REVIEW: TLSNotary port pool management for wstcp proxy instances +import * as net from "net" +import log from "@/utilities/logger" + +/** + * Configuration constants for port allocation + */ +export const PORT_CONFIG = { + PORT_MIN: 55000, + PORT_MAX: 57000, + IDLE_TIMEOUT_MS: 30000, // 30 seconds + MAX_SPAWN_RETRIES: 3, + SPAWN_TIMEOUT_MS: 5000, // 5 seconds to wait for wstcp to start +} + +/** + * Port pool state interface + */ +export interface PortPoolState { + next: number // next port to try (55000-57000) + max: number // 57000 + recycled: number[] // freed ports available for reuse +} + +/** + * Initialize a new port pool state + * @returns Fresh port pool state + */ +export function initPortPool(): PortPoolState { + return { + next: PORT_CONFIG.PORT_MIN, + max: PORT_CONFIG.PORT_MAX, + recycled: [], + } +} + +/** + * Check if a port is available by attempting to bind to it + * @param port - Port number to check + * @returns True if port is available + */ +export async function isPortAvailable(port: number): Promise { + return new Promise(resolve => { + const server = net.createServer() + let settled = false + + const timer = setTimeout(() => { + try { + server.close() + } finally { + finish(false) + } + }, PORT_CONFIG.SPAWN_TIMEOUT_MS) + + const finish = (available: boolean) => { + if (settled) return + settled = true + clearTimeout(timer) + resolve(available) + } + + server.once("error", () => { + try { + server.close() + } finally { + finish(false) + } + }) + + server.once("listening", () => { + server.close(() => finish(true)) + }) + + server.listen(port, "0.0.0.0") + }) +} + +/** + * Allocate a port from the pool + * First tries recycled ports, then sequential allocation + * @param pool - Port pool state + * @returns Allocated port number or null if exhausted + */ +export async function allocatePort( + pool: PortPoolState, +): Promise { + // First try recycled ports + while (pool.recycled.length > 0) { + const recycledPort = pool.recycled.pop()! + if (await isPortAvailable(recycledPort)) { + log.debug(`[TLSNotary] Allocated recycled port: ${recycledPort}`) + return recycledPort + } + // Port was recycled but is now in use, skip it + log.debug( + `[TLSNotary] Recycled port ${recycledPort} is in use, trying next`, + ) + } + + // Try sequential allocation + while (pool.next <= pool.max) { + const port = pool.next + pool.next++ + + if (await isPortAvailable(port)) { + log.debug(`[TLSNotary] Allocated sequential port: ${port}`) + return port + } + // Port in use, try next + log.debug(`[TLSNotary] Port ${port} is in use, trying next`) + } + + // All ports exhausted + log.warning("[TLSNotary] Port pool exhausted") + return null +} + +/** + * Release a port back to the recycled pool + * @param pool - Port pool state + * @param port - Port number to release + */ +export function releasePort(pool: PortPoolState, port: number): void { + // Only recycle valid ports + if (port >= PORT_CONFIG.PORT_MIN && port <= PORT_CONFIG.PORT_MAX) { + // Avoid duplicates + if (!pool.recycled.includes(port)) { + pool.recycled.push(port) + log.debug(`[TLSNotary] Released port ${port} to recycled pool`) + } + } +} + +/** + * Get current pool statistics + * @param pool - Port pool state + * @returns Pool statistics object + */ +export function getPoolStats(pool: PortPoolState): { + allocated: number + recycled: number + remaining: number + total: number +} { + const total = PORT_CONFIG.PORT_MAX - PORT_CONFIG.PORT_MIN + 1 + const remaining = pool.max - pool.next + 1 + pool.recycled.length + const allocated = total - remaining + + return { + allocated, + recycled: pool.recycled.length, + remaining, + total, + } +} diff --git a/src/features/tlsnotary/proxyManager.ts b/src/features/tlsnotary/proxyManager.ts new file mode 100644 index 00000000..86da4ac1 --- /dev/null +++ b/src/features/tlsnotary/proxyManager.ts @@ -0,0 +1,604 @@ +/** + * TLSNotary WebSocket Proxy Manager + * + * Manages wstcp proxy processes for domain-specific TLS attestation. + * Spawns proxies on-demand, monitors activity, and cleans up idle instances. + * + * ## Architecture + * + * ``` + * SDK Request → requestProxy(targetUrl) + * │ + * ▼ + * ┌──────────────┐ + * │ Lazy Cleanup │ ─── Kill proxies idle > 30s + * └──────────────┘ + * │ + * ▼ + * ┌──────────────────┐ + * │ Check Existing? │ + * └──────────────────┘ + * │ + * ┌────────────┴────────────┐ + * ▼ ▼ + * EXISTS NOT EXISTS + * Update lastActivity Spawn new wstcp + * Return existing Register & return + * ``` + * + * @module features/tlsnotary/proxyManager + */ + +// REVIEW: TLSNotary proxy manager - manages wstcp processes for TLS attestation +import { spawn, type ChildProcess } from "child_process" +import { exec } from "child_process" +import { promisify } from "util" +import log from "@/utilities/logger" +import { getSharedState } from "@/utilities/sharedState" +import { + PORT_CONFIG, + initPortPool, + allocatePort, + releasePort, + type PortPoolState, +} from "./portAllocator" + +const execAsync = promisify(exec) + +/** + * Error codes for proxy operations + */ +export enum ProxyError { + PROXY_SPAWN_FAILED = "PROXY_SPAWN_FAILED", + PORT_EXHAUSTED = "PORT_EXHAUSTED", + INVALID_URL = "INVALID_URL", + WSTCP_NOT_AVAILABLE = "WSTCP_NOT_AVAILABLE", +} + +/** + * Information about a running proxy + */ +export interface ProxyInfo { + proxyId: string // uuid + domain: string // "api.example.com" + targetPort: number // 443 + port: number // allocated local port (55123) + process: ChildProcess // wstcp process handle + lastActivity: number // Date.now() timestamp + spawnedAt: number // Date.now() timestamp + websocketProxyUrl: string // "ws://node.demos.sh:55123" +} + +/** + * TLSNotary state stored in sharedState + */ +export interface TLSNotaryState { + proxies: Map // keyed by "domain:port" + portPool: PortPoolState +} + +/** + * Success response for proxy request + */ +export interface ProxyRequestSuccess { + websocketProxyUrl: string + targetDomain: string + expiresIn: number + proxyId: string +} + +/** + * Error response for proxy request + */ +export interface ProxyRequestError { + error: ProxyError + message: string + targetDomain?: string + lastError?: string +} + +/** + * Generate a cryptographically secure UUID + */ +function generateUuid(): string { + return crypto.randomUUID() +} + +/** + * Get the TLSNotary state, initializing if needed + */ +function getTLSNotaryState(): TLSNotaryState { + const sharedState = getSharedState + if (!sharedState.tlsnotary) { + sharedState.tlsnotary = { + proxies: new Map(), + portPool: initPortPool(), + } + log.info("[TLSNotary] Initialized proxy manager state") + } + return sharedState.tlsnotary +} + +/** + * Ensure wstcp binary is available, installing if needed + * @throws Error if wstcp cannot be found or installed + */ +export async function ensureWstcp(): Promise { + try { + await execAsync("which wstcp") + log.debug("[TLSNotary] wstcp binary found") + } catch { + log.info("[TLSNotary] wstcp not found, installing via cargo...") + try { + await execAsync("cargo install wstcp") + log.info("[TLSNotary] wstcp installed successfully") + } catch (installError: any) { + throw new Error(`Failed to install wstcp: ${installError.message}`) + } + } +} + +/** + * Extract domain and port from a target URL + * @param targetUrl - Full URL like "https://api.example.com:8443/endpoint" + * @returns Domain and port extracted from URL + */ +export function extractDomainAndPort(targetUrl: string): { + domain: string + port: number +} { + try { + const url = new URL(targetUrl) + const domain = url.hostname + + // If explicit port in URL, use it + if (url.port) { + return { domain, port: parseInt(url.port, 10) } + } + + // Otherwise infer from protocol + const port = url.protocol === "https:" ? 443 : 80 + return { domain, port } + } catch { + throw new Error(`Invalid URL: ${targetUrl}`) + } +} + +/** + * Build the public WebSocket URL for the proxy + * @param localPort - Local port the proxy is listening on + * @param requestOrigin - Optional request origin for auto-detection + * @returns WebSocket URL like "ws://node.demos.sh:55123" + */ +export function getPublicUrl(localPort: number, requestOrigin?: string): string { + const build = (base: string) => { + const url = new URL(base) + const wsScheme = url.protocol === "https:" ? "wss" : "ws" + return `${wsScheme}://${url.hostname}:${localPort}` + } + + // 1. Try auto-detect from request origin (if available in headers) + if (requestOrigin) { + try { + return build(requestOrigin) + } catch { + // Invalid origin, continue to fallback + } + } + + // 2. Fall back to EXPOSED_URL + if (process.env.EXPOSED_URL) { + try { + return build(process.env.EXPOSED_URL) + } catch { + // Invalid EXPOSED_URL, continue to fallback + } + } + + // 3. Fall back to sharedState.exposedUrl + const sharedState = getSharedState + try { + return build(sharedState.exposedUrl) + } catch { + // Last resort: localhost + return `ws://localhost:${localPort}` + } +} + +/** + * Attach activity monitors to the process + * Any stdout/stderr activity resets the idle timer + */ +function attachActivityMonitor( + process: ChildProcess, + proxyInfo: ProxyInfo, + state: TLSNotaryState, +): void { + // Any stdout activity resets the idle timer + process.stdout?.on("data", (data: Buffer) => { + proxyInfo.lastActivity = Date.now() + log.debug( + `[TLSNotary] Proxy ${proxyInfo.domain} stdout: ${data.toString().trim()}`, + ) + }) + + process.stderr?.on("data", (data: Buffer) => { + proxyInfo.lastActivity = Date.now() + log.debug( + `[TLSNotary] Proxy ${proxyInfo.domain} stderr: ${data.toString().trim()}`, + ) + }) + + process.on("exit", code => { + log.info( + `[TLSNotary] Proxy for ${proxyInfo.domain} exited with code ${code}`, + ) + // Remove from registry + const key = `${proxyInfo.domain}:${proxyInfo.targetPort}` + state.proxies.delete(key) + // Release port back to pool + releasePort(state.portPool, proxyInfo.port) + }) + + process.on("error", err => { + log.error(`[TLSNotary] Proxy ${proxyInfo.domain} error: ${err.message}`) + }) +} + +/** + * Spawn a new wstcp proxy process + * @param domain - Target domain + * @param targetPort - Target port (usually 443) + * @param localPort - Local port to bind + * @param requestOrigin - Optional request origin for URL building + * @returns ProxyInfo on success + */ +async function spawnProxy( + domain: string, + targetPort: number, + localPort: number, + requestOrigin?: string, +): Promise { + const state = getTLSNotaryState() + + // Spawn wstcp: wstcp --bind-addr 0.0.0.0:{port} {domain}:{targetPort} + const args = ["--bind-addr", `0.0.0.0:${localPort}`, `${domain}:${targetPort}`] + log.info(`[TLSNotary] Spawning wstcp: wstcp ${args.join(" ")}`) + + const childProcess = spawn("wstcp", args, { + stdio: ["ignore", "pipe", "pipe"], + detached: false, + }) + + const proxyId = generateUuid() + const now = Date.now() + const websocketProxyUrl = getPublicUrl(localPort, requestOrigin) + + const proxyInfo: ProxyInfo = { + proxyId, + domain, + targetPort, + port: localPort, + process: childProcess, + lastActivity: now, + spawnedAt: now, + websocketProxyUrl, + } + + // Wait for either success (INFO message) or failure (panic/error) + await new Promise((resolve, reject) => { + let stderrBuffer = "" + let resolved = false + + const cleanup = () => { + resolved = true + childProcess.stderr?.removeAllListeners("data") + childProcess.removeAllListeners("error") + childProcess.removeAllListeners("exit") + } + + const timeout = setTimeout(() => { + if (!resolved) { + cleanup() + // No output after timeout - assume failure + reject(new Error(`wstcp startup timeout - no response after ${PORT_CONFIG.SPAWN_TIMEOUT_MS}ms`)) + } + }, PORT_CONFIG.SPAWN_TIMEOUT_MS) + + // wstcp writes all output to stderr (Rust tracing) + childProcess.stderr?.on("data", (data: Buffer) => { + const output = data.toString() + stderrBuffer += output + + // Check for panic (Rust panic message) + if (output.includes("panicked at") || output.includes("thread 'main'")) { + clearTimeout(timeout) + if (!resolved) { + cleanup() + // Extract useful error message + const addrInUse = stderrBuffer.includes("AddrInUse") || stderrBuffer.includes("Address already in use") + if (addrInUse) { + reject(new Error(`Port ${localPort} already in use`)) + } else { + reject(new Error(`wstcp panic: ${output.trim().substring(0, 200)}`)) + } + } + return + } + + // Check for success (INFO Starts a WebSocket proxy server) + if (output.includes("INFO") && output.includes("Starts a WebSocket")) { + clearTimeout(timeout) + if (!resolved) { + cleanup() + log.info(`[TLSNotary] wstcp started successfully on port ${localPort}`) + resolve() + } + return + } + }) + + childProcess.on("error", err => { + clearTimeout(timeout) + if (!resolved) { + cleanup() + reject(err) + } + }) + + childProcess.on("exit", code => { + clearTimeout(timeout) + if (!resolved) { + cleanup() + if (code !== null && code !== 0) { + reject(new Error(`wstcp exited with code ${code}: ${stderrBuffer.trim().substring(0, 200)}`)) + } + } + }) + }) + + // Attach activity monitors after successful spawn + attachActivityMonitor(childProcess, proxyInfo, state) + + return proxyInfo +} + +/** + * Clean up stale proxies (idle > 30s) + * Called lazily on each new request + */ +export function cleanupStaleProxies(): void { + const state = getTLSNotaryState() + const now = Date.now() + const staleThreshold = now - PORT_CONFIG.IDLE_TIMEOUT_MS + + for (const [key, proxy] of state.proxies) { + if (proxy.lastActivity < staleThreshold) { + log.info( + `[TLSNotary] Cleaning up stale proxy for ${proxy.domain} (idle ${Math.floor( + (now - proxy.lastActivity) / 1000, + )}s)`, + ) + // Kill the process + try { + proxy.process.kill("SIGTERM") + } catch { + // Process may have already exited + } + // Remove from registry (exit handler will also do this) + state.proxies.delete(key) + // Release port + releasePort(state.portPool, proxy.port) + } + } +} + +/** + * Check if a proxy process is still alive + */ +function isProxyAlive(proxy: ProxyInfo): boolean { + try { + // Send signal 0 to check if process exists + return proxy.process.kill(0) + } catch { + return false + } +} + +/** + * Request a proxy for the given target URL + * Main entry point for the proxy manager + * + * @param targetUrl - Full URL like "https://api.example.com/endpoint" + * @param requestOrigin - Optional request origin for URL building + * @returns Success or error response + */ +export async function requestProxy( + targetUrl: string, + requestOrigin?: string, +): Promise { + // 1. Ensure wstcp is available + try { + await ensureWstcp() + } catch (err: any) { + return { + error: ProxyError.WSTCP_NOT_AVAILABLE, + message: err.message, + } + } + + // 2. Extract domain and port + let domain: string + let targetPort: number + try { + const extracted = extractDomainAndPort(targetUrl) + domain = extracted.domain + targetPort = extracted.port + } catch (err: any) { + return { + error: ProxyError.INVALID_URL, + message: err.message, + } + } + + // 3. Lazy cleanup of stale proxies + cleanupStaleProxies() + + const state = getTLSNotaryState() + const key = `${domain}:${targetPort}` + + // 4. Check if proxy exists and is alive + const existingProxy = state.proxies.get(key) + if (existingProxy && isProxyAlive(existingProxy)) { + // Update lastActivity and return existing + existingProxy.lastActivity = Date.now() + log.info(`[TLSNotary] Reusing existing proxy for ${domain}:${targetPort}`) + return { + websocketProxyUrl: existingProxy.websocketProxyUrl, + targetDomain: domain, + expiresIn: PORT_CONFIG.IDLE_TIMEOUT_MS, + proxyId: existingProxy.proxyId, + } + } + + // 5. Need to spawn a new proxy - try up to MAX_SPAWN_RETRIES times + let lastError = "" + for (let attempt = 0; attempt < PORT_CONFIG.MAX_SPAWN_RETRIES; attempt++) { + // Allocate a port + const localPort = await allocatePort(state.portPool) + if (localPort === null) { + return { + error: ProxyError.PORT_EXHAUSTED, + message: "All ports in range 55000-57000 are exhausted", + targetDomain: domain, + } + } + + try { + const proxyInfo = await spawnProxy( + domain, + targetPort, + localPort, + requestOrigin, + ) + + // Register in state + state.proxies.set(key, proxyInfo) + log.info( + `[TLSNotary] Spawned proxy for ${domain}:${targetPort} on port ${localPort}`, + ) + + return { + websocketProxyUrl: proxyInfo.websocketProxyUrl, + targetDomain: domain, + expiresIn: PORT_CONFIG.IDLE_TIMEOUT_MS, + proxyId: proxyInfo.proxyId, + } + } catch (err: any) { + lastError = err.message + log.warning( + `[TLSNotary] Spawn attempt ${attempt + 1} failed for ${domain}: ${lastError}`, + ) + // Release the port since spawn failed + releasePort(state.portPool, localPort) + } + } + + // All attempts failed + return { + error: ProxyError.PROXY_SPAWN_FAILED, + message: `Failed to spawn proxy after ${PORT_CONFIG.MAX_SPAWN_RETRIES} attempts`, + targetDomain: domain, + lastError, + } +} + +/** + * Kill a specific proxy by ID + * @param proxyId - Proxy UUID to kill + * @returns True if found and killed + */ +export function killProxy(proxyId: string): boolean { + const state = getTLSNotaryState() + + for (const [key, proxy] of state.proxies) { + if (proxy.proxyId === proxyId) { + log.info(`[TLSNotary] Manually killing proxy ${proxyId} for ${proxy.domain}`) + try { + proxy.process.kill("SIGTERM") + } catch { + // Process may have already exited + } + state.proxies.delete(key) + releasePort(state.portPool, proxy.port) + return true + } + } + + return false +} + +/** + * Kill all active proxies (cleanup on shutdown) + */ +export function killAllProxies(): void { + const state = getTLSNotaryState() + + for (const [key, proxy] of state.proxies) { + log.info(`[TLSNotary] Killing proxy for ${proxy.domain}`) + try { + proxy.process.kill("SIGTERM") + } catch { + // Process may have already exited + } + } + + state.proxies.clear() + log.info("[TLSNotary] All proxies killed") +} + +/** + * Get current proxy manager status + */ +export function getProxyManagerStatus(): { + activeProxies: number + proxies: Array<{ + proxyId: string + domain: string + port: number + idleSeconds: number + }> + portPool: { + allocated: number + recycled: number + remaining: number + } +} { + const state = getTLSNotaryState() + const now = Date.now() + + const proxies = Array.from(state.proxies.values()).map(p => ({ + proxyId: p.proxyId, + domain: p.domain, + port: p.port, + idleSeconds: Math.floor((now - p.lastActivity) / 1000), + })) + + const total = PORT_CONFIG.PORT_MAX - PORT_CONFIG.PORT_MIN + 1 + const remaining = + state.portPool.max - + state.portPool.next + + 1 + + state.portPool.recycled.length + const allocated = total - remaining + + return { + activeProxies: state.proxies.size, + proxies, + portPool: { + allocated, + recycled: state.portPool.recycled.length, + remaining, + }, + } +} diff --git a/src/features/tlsnotary/routes.ts b/src/features/tlsnotary/routes.ts new file mode 100644 index 00000000..d29a5736 --- /dev/null +++ b/src/features/tlsnotary/routes.ts @@ -0,0 +1,226 @@ +/** + * TLSNotary Routes for BunServer + * + * HTTP API endpoints for TLSNotary operations: + * - GET /tlsnotary/health - Health check + * - GET /tlsnotary/info - Service info with public key + * - POST /tlsnotary/verify - Verify attestation + * + * @module features/tlsnotary/routes + */ + +// REVIEW: TLSNotary routes - new API endpoints for HTTPS attestation +import { getTLSNotaryService } from "./TLSNotaryService" +import type { BunServer } from "@/libs/network/bunServer" +import { jsonResponse } from "@/libs/network/bunServer" +import log from "@/utilities/logger" + +// ============================================================================ +// Request/Response Types +// ============================================================================ + +/** + * Verify attestation request body + */ +interface VerifyRequestBody { + /** Base64-encoded attestation bytes */ + attestation: string; +} + +/** + * Health response + */ +interface HealthResponse { + status: "healthy" | "unhealthy" | "disabled"; + service: string; + initialized?: boolean; + serverRunning?: boolean; + error?: string; +} + +/** + * Info response + */ +interface InfoResponse { + enabled: boolean; + port: number; + publicKey?: string; + running?: boolean; +} + +/** + * Verify response + */ +interface VerifyResponse { + success: boolean; + serverName?: string; + connectionTime?: number; + sentLength?: number; + recvLength?: number; + error?: string; +} + +// ============================================================================ +// Route Handlers +// ============================================================================ + +/** + * Health check handler + */ +async function healthHandler(): Promise { + const service = getTLSNotaryService() + + if (!service) { + const response: HealthResponse = { + status: "disabled", + service: "tlsnotary", + } + return jsonResponse(response) + } + + const status = service.getStatus() + + if (!status.health.healthy) { + const response: HealthResponse = { + status: "unhealthy", + service: "tlsnotary", + initialized: status.health.initialized, + serverRunning: status.health.serverRunning, + error: status.health.error, + } + return jsonResponse(response, 503) + } + + const response: HealthResponse = { + status: "healthy", + service: "tlsnotary", + initialized: status.health.initialized, + serverRunning: status.health.serverRunning, + } + return jsonResponse(response) +} + +/** + * Service info handler + */ +async function infoHandler(): Promise { + const service = getTLSNotaryService() + + if (!service) { + const response: InfoResponse = { + enabled: false, + port: 0, + } + return jsonResponse(response) + } + + const status = service.getStatus() + + const response: InfoResponse = { + enabled: status.enabled, + port: status.port, + publicKey: status.health.publicKey, + running: status.running, + } + return jsonResponse(response) +} + +/** + * Verify attestation handler + */ +async function verifyHandler(req: Request): Promise { + const service = getTLSNotaryService() + + if (!service) { + const response: VerifyResponse = { + success: false, + error: "TLSNotary service is not enabled", + } + return jsonResponse(response, 503) + } + + if (!service.isRunning()) { + const response: VerifyResponse = { + success: false, + error: "TLSNotary service is not running", + } + return jsonResponse(response, 503) + } + + let body: VerifyRequestBody + try { + body = await req.json() + } catch { + const response: VerifyResponse = { + success: false, + error: "Invalid JSON body", + } + return jsonResponse(response, 400) + } + + const { attestation } = body + + if (!attestation || typeof attestation !== "string") { + const response: VerifyResponse = { + success: false, + error: "Missing or invalid attestation parameter", + } + return jsonResponse(response, 400) + } + + try { + const result = service.verify(attestation) + + if (result.success) { + const response: VerifyResponse = { + success: true, + serverName: result.serverName, + connectionTime: result.connectionTime, + sentLength: result.sentLength, + recvLength: result.recvLength, + } + return jsonResponse(response) + } else { + const response: VerifyResponse = { + success: false, + error: result.error, + } + return jsonResponse(response, 400) + } + } catch (error) { + const response: VerifyResponse = { + success: false, + error: error instanceof Error ? error.message : "Unknown error during verification", + } + return jsonResponse(response, 500) + } +} + +// ============================================================================ +// Route Registration +// ============================================================================ + +/** + * Register TLSNotary routes with BunServer + * + * Routes: + * - GET /tlsnotary/health - Health check endpoint + * - GET /tlsnotary/info - Service info with public key (for SDK discovery) + * - POST /tlsnotary/verify - Verify an attestation + * + * @param server - BunServer instance + */ +export function registerTLSNotaryRoutes(server: BunServer): void { + // Health check + server.get("/tlsnotary/health", healthHandler) + + // Service info (for SDK discovery) + server.get("/tlsnotary/info", infoHandler) + + // Verify attestation + server.post("/tlsnotary/verify", verifyHandler) + + log.info("[TLSNotary] Routes registered: /tlsnotary/health, /tlsnotary/info, /tlsnotary/verify") +} + +export default registerTLSNotaryRoutes diff --git a/src/features/tlsnotary/tokenManager.ts b/src/features/tlsnotary/tokenManager.ts new file mode 100644 index 00000000..39b9f0bb --- /dev/null +++ b/src/features/tlsnotary/tokenManager.ts @@ -0,0 +1,349 @@ +/** + * TLSNotary Attestation Token Manager + * + * Manages in-memory tokens for paid TLSNotary attestation access. + * Tokens are domain-locked, expire after 30 minutes, and allow 3 retries. + * + * @module features/tlsnotary/tokenManager + */ + +// REVIEW: TLSNotary token management for paid attestation access +import { randomUUID } from "crypto" +import log from "@/utilities/logger" +import { getSharedState } from "@/utilities/sharedState" + +/** + * Token configuration constants + */ +export const TOKEN_CONFIG = { + EXPIRY_MS: 30 * 60 * 1000, // 30 minutes + MAX_RETRIES: 3, + CLEANUP_INTERVAL_MS: 60 * 1000, // cleanup every minute +} + +/** + * Token status enum + */ +export enum TokenStatus { + PENDING = "pending", // Created, not yet used + ACTIVE = "active", // Proxy spawned, attestation in progress + COMPLETED = "completed", // Attestation successful + STORED = "stored", // Proof stored on-chain/IPFS + EXHAUSTED = "exhausted", // Max retries reached + EXPIRED = "expired", // Time limit exceeded +} + +/** + * Attestation token structure + */ +export interface AttestationToken { + id: string + owner: string // pubkey of the payer + domain: string // locked domain (e.g., "api.example.com") + status: TokenStatus + createdAt: number // timestamp + expiresAt: number // timestamp + retriesLeft: number + txHash: string // original payment tx hash + proxyId?: string // linked proxy ID once spawned +} + +/** + * Token store state (stored in sharedState) + */ +export interface TokenStoreState { + tokens: Map + cleanupTimer?: ReturnType +} + +/** + * Generate a cryptographically secure UUID for token IDs + */ +function generateTokenId(): string { + return `tlsn_${randomUUID()}` +} + +/** + * Get or initialize the token store from sharedState + */ +function getTokenStore(): TokenStoreState { + const sharedState = getSharedState + if (!sharedState.tlsnTokenStore) { + sharedState.tlsnTokenStore = { + tokens: new Map(), + } + // Start cleanup timer + startCleanupTimer() + log.info("[TLSNotary] Initialized token store") + } + return sharedState.tlsnTokenStore +} + +/** + * Start periodic cleanup of expired tokens + */ +function startCleanupTimer(): void { + const store = getSharedState.tlsnTokenStore + if (store && !store.cleanupTimer) { + store.cleanupTimer = setInterval(() => { + cleanupExpiredTokens() + }, TOKEN_CONFIG.CLEANUP_INTERVAL_MS) + log.debug("[TLSNotary] Started token cleanup timer") + } +} + +/** + * Extract domain from a URL + */ +export function extractDomain(targetUrl: string): string { + try { + const url = new URL(targetUrl) + return url.hostname + } catch { + throw new Error(`Invalid URL: ${targetUrl}`) + } +} + +/** + * Create a new attestation token + * + * @param owner - Public key of the token owner + * @param targetUrl - Target URL (domain will be extracted and locked) + * @param txHash - Transaction hash of the payment + * @returns The created token + */ +export function createToken( + owner: string, + targetUrl: string, + txHash: string, +): AttestationToken { + const store = getTokenStore() + const now = Date.now() + const domain = extractDomain(targetUrl) + + const token: AttestationToken = { + id: generateTokenId(), + owner, + domain, + status: TokenStatus.PENDING, + createdAt: now, + expiresAt: now + TOKEN_CONFIG.EXPIRY_MS, + retriesLeft: TOKEN_CONFIG.MAX_RETRIES, + txHash, + } + + store.tokens.set(token.id, token) + log.info(`[TLSNotary] Created token ${token.id} for ${domain} (owner: ${owner.substring(0, 16)}...)`) + + return token +} + +/** + * Validation result for token checks + */ +export interface TokenValidationResult { + valid: boolean + error?: string + token?: AttestationToken +} + +/** + * Validate a token for use + * + * @param tokenId - Token ID to validate + * @param owner - Public key claiming to own the token + * @param targetUrl - Target URL being requested + * @returns Validation result with token if valid + */ +export function validateToken( + tokenId: string, + owner: string, + targetUrl: string, +): TokenValidationResult { + const store = getTokenStore() + const token = store.tokens.get(tokenId) + + if (!token) { + return { valid: false, error: "TOKEN_NOT_FOUND" } + } + + // Check ownership + if (token.owner !== owner) { + return { valid: false, error: "TOKEN_OWNER_MISMATCH" } + } + + // Check expiry + if (Date.now() > token.expiresAt) { + token.status = TokenStatus.EXPIRED + return { valid: false, error: "TOKEN_EXPIRED" } + } + + // Check domain lock + const requestedDomain = extractDomain(targetUrl) + if (token.domain !== requestedDomain) { + return { valid: false, error: "TOKEN_DOMAIN_MISMATCH", token } + } + + // Check status + if (token.status === TokenStatus.EXHAUSTED) { + return { valid: false, error: "TOKEN_EXHAUSTED" } + } + if (token.status === TokenStatus.EXPIRED) { + return { valid: false, error: "TOKEN_EXPIRED" } + } + if (token.status === TokenStatus.STORED) { + return { valid: false, error: "TOKEN_ALREADY_STORED" } + } + + // Check retries + if (token.retriesLeft <= 0) { + token.status = TokenStatus.EXHAUSTED + return { valid: false, error: "TOKEN_NO_RETRIES_LEFT" } + } + + return { valid: true, token } +} + +/** + * Consume a retry attempt and mark token as active + * + * @param tokenId - Token ID + * @param proxyId - Proxy ID being spawned + * @returns Updated token or null if not found + */ +export function consumeRetry(tokenId: string, proxyId: string): AttestationToken | null { + const store = getTokenStore() + const token = store.tokens.get(tokenId) + + if (!token) { + return null + } + + token.retriesLeft -= 1 + token.status = TokenStatus.ACTIVE + token.proxyId = proxyId + + log.info(`[TLSNotary] Token ${tokenId} consumed retry (${token.retriesLeft} left), proxyId: ${proxyId}`) + + if (token.retriesLeft <= 0) { + log.warning(`[TLSNotary] Token ${tokenId} has no retries left`) + } + + return token +} + +/** + * Mark token as completed (attestation successful) + * + * @param tokenId - Token ID + * @returns Updated token or null if not found + */ +export function markCompleted(tokenId: string): AttestationToken | null { + const store = getTokenStore() + const token = store.tokens.get(tokenId) + + if (!token) { + return null + } + + token.status = TokenStatus.COMPLETED + log.info(`[TLSNotary] Token ${tokenId} marked as completed`) + + return token +} + +/** + * Mark token as stored (proof saved on-chain or IPFS) + * + * @param tokenId - Token ID + * @returns Updated token or null if not found + */ +export function markStored(tokenId: string): AttestationToken | null { + const store = getTokenStore() + const token = store.tokens.get(tokenId) + + if (!token) { + return null + } + + token.status = TokenStatus.STORED + log.info(`[TLSNotary] Token ${tokenId} marked as stored`) + + return token +} + +/** + * Get a token by ID + * + * @param tokenId - Token ID + * @returns Token or undefined + */ +export function getToken(tokenId: string): AttestationToken | undefined { + const store = getTokenStore() + return store.tokens.get(tokenId) +} + +/** + * Get token by transaction hash + * + * @param txHash - Transaction hash + * @returns Token or undefined + */ +export function getTokenByTxHash(txHash: string): AttestationToken | undefined { + const store = getTokenStore() + for (const token of store.tokens.values()) { + if (token.txHash === txHash) { + return token + } + } + return undefined +} + +/** + * Cleanup expired tokens + */ +export function cleanupExpiredTokens(): number { + const store = getTokenStore() + const now = Date.now() + let cleaned = 0 + + for (const [id, token] of store.tokens) { + if (now > token.expiresAt && token.status !== TokenStatus.STORED) { + store.tokens.delete(id) + cleaned++ + } + } + + if (cleaned > 0) { + log.debug(`[TLSNotary] Cleaned up ${cleaned} expired tokens`) + } + + return cleaned +} + +/** + * Get token store statistics + */ +export function getTokenStats(): { + total: number + byStatus: Record +} { + const store = getTokenStore() + const byStatus = { + [TokenStatus.PENDING]: 0, + [TokenStatus.ACTIVE]: 0, + [TokenStatus.COMPLETED]: 0, + [TokenStatus.STORED]: 0, + [TokenStatus.EXHAUSTED]: 0, + [TokenStatus.EXPIRED]: 0, + } + + for (const token of store.tokens.values()) { + byStatus[token.status]++ + } + + return { + total: store.tokens.size, + byStatus, + } +} diff --git a/src/index.ts b/src/index.ts index 7f571c2b..dc421334 100644 --- a/src/index.ts +++ b/src/index.ts @@ -53,6 +53,10 @@ const indexState: { OMNI_ENABLED: boolean OMNI_PORT: number omniServer: any + // REVIEW: TLSNotary configuration - new HTTPS attestation feature + TLSNOTARY_ENABLED: boolean + TLSNOTARY_PORT: number + tlsnotaryService: any } = { OVERRIDE_PORT: null, OVERRIDE_IS_TESTER: null, @@ -73,6 +77,10 @@ const indexState: { OMNI_ENABLED: false, OMNI_PORT: 0, omniServer: null, + // REVIEW: TLSNotary defaults - disabled by default, requires signing key + TLSNOTARY_ENABLED: process.env.TLSNOTARY_ENABLED?.toLowerCase() === "true", + TLSNOTARY_PORT: parseInt(process.env.TLSNOTARY_PORT ?? "7047", 10), + tlsnotaryService: null, } // SECTION Preparation methods @@ -119,7 +127,7 @@ async function digestArguments() { log.info("[MAIN] TUI disabled, using scrolling log output") indexState.TUI_ENABLED = false break - case "log-level": + case "log-level": { const level = param[1]?.toLowerCase() if (["debug", "info", "warning", "error", "critical"].includes(level)) { CategorizedLogger.getInstance().setMinLevel(level as "debug" | "info" | "warning" | "error" | "critical") @@ -128,6 +136,7 @@ async function digestArguments() { log.warning(`[MAIN] Invalid log level: ${param[1]}. Valid: debug, info, warning, error, critical`) } break + } default: log.warning("[MAIN] Invalid parameter: " + param) } @@ -523,6 +532,68 @@ async function main() { // Continue without MCP (failsafe) } } + + // REVIEW: Start TLSNotary service (failsafe - optional HTTPS attestation feature) + // Routes are registered in server_rpc.ts via registerTLSNotaryRoutes + if (indexState.TLSNOTARY_ENABLED) { + try { + const { initializeTLSNotary, getTLSNotaryService, isTLSNotaryFatal, isTLSNotaryDebug } = await import("./features/tlsnotary") + const fatal = isTLSNotaryFatal() + const debug = isTLSNotaryDebug() + + // REVIEW: Check for port collision with OmniProtocol + // OmniProtocol derives peer ports as HTTP_PORT + 1, which could collide with TLSNotary + if (indexState.OMNI_ENABLED) { + // Check if TLSNotary port could be hit by OmniProtocol peer connections + // This happens when a peer runs on HTTP port (TLSNotary port - 1) + const potentialCollisionPort = indexState.TLSNOTARY_PORT - 1 + log.warning(`[TLSNotary] ⚠️ OmniProtocol is enabled. If any peer runs on HTTP port ${potentialCollisionPort}, OmniProtocol will try to connect to port ${indexState.TLSNOTARY_PORT} (TLSNotary)`) + log.warning("[TLSNotary] This can cause 'WebSocket upgrade failed: Unsupported HTTP method' errors") + log.warning("[TLSNotary] Consider using a different TLSNOTARY_PORT to avoid collisions") + } + + if (debug) { + log.info("[TLSNotary] Debug mode: TLSNOTARY_DEBUG=true") + log.info(`[TLSNotary] Fatal mode: TLSNOTARY_FATAL=${fatal}`) + log.info(`[TLSNotary] Port: ${indexState.TLSNOTARY_PORT}`) + } + + // Initialize without passing BunServer - routes are registered separately in server_rpc.ts + const initialized = await initializeTLSNotary() + if (initialized) { + indexState.tlsnotaryService = getTLSNotaryService() + log.info(`[TLSNotary] WebSocket server started on port ${indexState.TLSNOTARY_PORT}`) + // Update TUI with TLSNotary info + if (indexState.TUI_ENABLED && indexState.tuiManager) { + indexState.tuiManager.updateNodeInfo({ + tlsnotary: { + enabled: true, + port: indexState.TLSNOTARY_PORT, + running: true, + }, + }) + } + } else { + const msg = "[TLSNotary] Service disabled or failed to initialize (check TLSNOTARY_SIGNING_KEY)" + if (fatal) { + log.error("[TLSNotary] FATAL: " + msg) + process.exit(1) + } + log.warning(msg) + } + } catch (error) { + log.error("[TLSNotary] Failed to start TLSNotary service: " + error) + const { isTLSNotaryFatal } = await import("./features/tlsnotary") + if (isTLSNotaryFatal()) { + log.error("[TLSNotary] FATAL: Exiting due to TLSNotary failure") + process.exit(1) + } + // Continue without TLSNotary (failsafe) + } + } else { + log.info("[TLSNotary] Service disabled (set TLSNOTARY_ENABLED=true to enable)") + } + log.info("[MAIN] ✅ Starting the background loop") // Update TUI status to running @@ -562,6 +633,17 @@ async function gracefulShutdown(signal: string) { } } + // REVIEW: Stop TLSNotary service if running + if (indexState.tlsnotaryService) { + console.log("[SHUTDOWN] Stopping TLSNotary service...") + try { + const { shutdownTLSNotary } = await import("./features/tlsnotary") + await shutdownTLSNotary() + } catch (error) { + console.error("[SHUTDOWN] Error stopping TLSNotary:", error) + } + } + console.log("[SHUTDOWN] Cleanup complete, exiting...") process.exit(0) } catch (error) { diff --git a/src/libs/blockchain/block.ts b/src/libs/blockchain/block.ts index 0bb9af0e..ca353a0f 100644 --- a/src/libs/blockchain/block.ts +++ b/src/libs/blockchain/block.ts @@ -43,6 +43,7 @@ export default class Block implements BlockType { native_tables_hashes: { native_gcr: "placeholder", native_subnets_txs: "placeholder", + native_tlsnotary: "placeholder", }, } this.proposer = null diff --git a/src/libs/blockchain/gcr/gcr_routines/GCRTLSNotaryRoutines.ts b/src/libs/blockchain/gcr/gcr_routines/GCRTLSNotaryRoutines.ts new file mode 100644 index 00000000..f306dcce --- /dev/null +++ b/src/libs/blockchain/gcr/gcr_routines/GCRTLSNotaryRoutines.ts @@ -0,0 +1,130 @@ +import { Repository } from "typeorm" + +import { GCREdit, GCREditTLSNotary } from "node_modules/@kynesyslabs/demosdk/build/types/blockchain/GCREdit" + +import { GCRTLSNotary } from "@/model/entities/GCRv2/GCR_TLSNotary" +import log from "@/utilities/logger" + +import { GCRResult } from "../handleGCR" + +// REVIEW: TLSNotary proof storage routines for GCR +/** + * GCRTLSNotaryRoutines handles the storage and retrieval of TLSNotary attestation proofs. + * Proofs are stored via the tlsn_store native operation after fee burning. + */ +export class GCRTLSNotaryRoutines { + /** + * Apply a TLSNotary GCR edit operation (store proof) + * @param editOperation - The GCREditTLSNotary operation + * @param gcrTLSNotaryRepository - TypeORM repository for GCRTLSNotary + * @param simulate - If true, don't persist changes + */ + static async apply( + editOperation: GCREdit, + gcrTLSNotaryRepository: Repository, + simulate: boolean, + ): Promise { + if (editOperation.type !== "tlsnotary") { + return { success: false, message: "Invalid GCREdit type" } + } + + const tlsnEdit = editOperation as GCREditTLSNotary + + log.debug( + `[TLSNotary] Applying GCREdit: ${tlsnEdit.operation} for token ${tlsnEdit.data.tokenId} ` + + `(${tlsnEdit.isRollback ? "ROLLBACK" : "NORMAL"})`, + ) + + // Handle rollback: delete the stored proof + if (tlsnEdit.isRollback) { + if (!simulate) { + try { + await gcrTLSNotaryRepository.delete({ + tokenId: tlsnEdit.data.tokenId, + }) + log.info(`[TLSNotary] Rolled back proof for token ${tlsnEdit.data.tokenId}`) + } catch (error) { + log.error(`[TLSNotary] Failed to rollback proof: ${error}`) + return { success: false, message: "Failed to rollback TLSNotary proof" } + } + } + return { success: true, message: "TLSNotary proof rolled back" } + } + + // Handle store operation + if (tlsnEdit.operation === "store") { + // Check if proof already exists for this token + const existing = await gcrTLSNotaryRepository.findOneBy({ + tokenId: tlsnEdit.data.tokenId, + }) + + if (existing) { + log.warning(`[TLSNotary] Proof already exists for token ${tlsnEdit.data.tokenId}`) + return { success: false, message: "Proof already stored for this token" } + } + + // Create new proof entry + const proofEntry = new GCRTLSNotary() + proofEntry.tokenId = tlsnEdit.data.tokenId + proofEntry.owner = tlsnEdit.account + proofEntry.domain = tlsnEdit.data.domain + proofEntry.proof = tlsnEdit.data.proof + proofEntry.storageType = tlsnEdit.data.storageType + proofEntry.txhash = tlsnEdit.txhash + proofEntry.proofTimestamp = String(tlsnEdit.data.timestamp) + + if (!simulate) { + try { + await gcrTLSNotaryRepository.save(proofEntry) + log.info( + `[TLSNotary] Stored proof for token ${tlsnEdit.data.tokenId}, ` + + `domain: ${tlsnEdit.data.domain}, type: ${tlsnEdit.data.storageType}`, + ) + } catch (error) { + log.error(`[TLSNotary] Failed to store proof: ${error}`) + return { success: false, message: "Failed to store TLSNotary proof" } + } + } + + return { success: true, message: "TLSNotary proof stored" } + } + + return { success: false, message: `Unknown TLSNotary operation: ${tlsnEdit.operation}` } + } + + /** + * Get a stored proof by tokenId + * @param tokenId - The token ID to look up + * @param gcrTLSNotaryRepository - TypeORM repository + */ + static async getProof( + tokenId: string, + gcrTLSNotaryRepository: Repository, + ): Promise { + return gcrTLSNotaryRepository.findOneBy({ tokenId }) + } + + /** + * Get all proofs for an owner + * @param owner - The account address + * @param gcrTLSNotaryRepository - TypeORM repository + */ + static async getProofsByOwner( + owner: string, + gcrTLSNotaryRepository: Repository, + ): Promise { + return gcrTLSNotaryRepository.findBy({ owner }) + } + + /** + * Get all proofs for a domain + * @param domain - The domain to look up + * @param gcrTLSNotaryRepository - TypeORM repository + */ + static async getProofsByDomain( + domain: string, + gcrTLSNotaryRepository: Repository, + ): Promise { + return gcrTLSNotaryRepository.findBy({ domain }) + } +} diff --git a/src/libs/blockchain/gcr/gcr_routines/handleNativeOperations.ts b/src/libs/blockchain/gcr/gcr_routines/handleNativeOperations.ts index 28a3de61..b1c2c9c6 100644 --- a/src/libs/blockchain/gcr/gcr_routines/handleNativeOperations.ts +++ b/src/libs/blockchain/gcr/gcr_routines/handleNativeOperations.ts @@ -3,6 +3,12 @@ import { GCREdit } from "node_modules/@kynesyslabs/demosdk/build/types/blockchai import { Transaction } from "node_modules/@kynesyslabs/demosdk/build/types/blockchain/Transaction" import { INativePayload } from "node_modules/@kynesyslabs/demosdk/build/types/native" import log from "src/utilities/logger" +import { extractDomain, getToken, markStored, TokenStatus } from "@/features/tlsnotary/tokenManager" + +// REVIEW: TLSNotary native operation pricing (1 DEM = 1 unit, no decimals) +const TLSN_REQUEST_FEE = 1 +const TLSN_STORE_BASE_FEE = 1 +const TLSN_STORE_PER_KB_FEE = 1 // NOTE This class is responsible for handling native operations such as sending native tokens, etc. export class HandleNativeOperations { @@ -17,13 +23,12 @@ export class HandleNativeOperations { // Switching on the native operation type switch (nativePayload.nativeOperation) { // Balance operations for the send native method - case "send": - // eslint-disable-next-line no-var - var [to, amount] = nativePayload.args + case "send": { + const [to, amount] = nativePayload.args // First, remove the amount from the sender's balance log.debug("to: " + to) log.debug("amount: " + amount) - var subtractEdit: GCREdit = { + const subtractEdit: GCREdit = { type: "balance", operation: "remove", isRollback: isRollback, @@ -33,7 +38,7 @@ export class HandleNativeOperations { } edits.push(subtractEdit) // Then, add the amount to the receiver's balance - var addEdit: GCREdit = { + const addEdit: GCREdit = { type: "balance", operation: "add", isRollback: isRollback, @@ -43,10 +48,111 @@ export class HandleNativeOperations { } edits.push(addEdit) break - default: - log.warning("Unknown native operation: " + nativePayload.nativeOperation) // TODO Better error handling - // throw new Error("Unknown native operation: " + nativePayload.nativeOperation) + } + // REVIEW: TLSNotary attestation request - burns 1 DEM fee, creates token + case "tlsn_request": { + const [targetUrl] = nativePayload.args as [string] + log.info(`[TLSNotary] Processing tlsn_request for ${targetUrl} from ${tx.content.from}`) + + // Validate URL format + try { + extractDomain(targetUrl) // Validates URL format + log.debug(`[TLSNotary] URL validated: ${targetUrl}`) + } catch { + log.error(`[TLSNotary] Invalid URL in tlsn_request: ${targetUrl}`) + throw new Error("Invalid URL in tlsn_request") + } + + // Burn the fee (remove from sender, no add - effectively burns the token) + const burnFeeEdit: GCREdit = { + type: "balance", + operation: "remove", + isRollback: isRollback, + account: tx.content.from as string, + txhash: tx.hash, + amount: TLSN_REQUEST_FEE, + } + edits.push(burnFeeEdit) + + // Token creation is handled as a native side-effect during mempool simulation + // in `HandleGCR.processNativeSideEffects()` to avoid duplicate tokens. + break + } + + // REVIEW: TLSNotary proof storage - burns fee based on size, stores proof + case "tlsn_store": { + const [tokenId, proof, storageType] = nativePayload.args + log.info(`[TLSNotary] Processing tlsn_store for token ${tokenId}, storage: ${storageType}`) + + // Validate token exists and belongs to sender + const token = getToken(tokenId) + if (!token) { + log.error(`[TLSNotary] Token not found: ${tokenId}`) + throw new Error("Token not found") + } + if (token.owner !== tx.content.from) { + log.error(`[TLSNotary] Token owner mismatch: ${token.owner} !== ${tx.content.from}`) + throw new Error("Token owner mismatch") + } + // Token should be completed (attestation done) or active (in progress) + if (token.status !== TokenStatus.COMPLETED && token.status !== TokenStatus.ACTIVE) { + log.error(`[TLSNotary] Token not ready for storage: ${token.status}`) + throw new Error("Token not ready for storage") + } + + // Calculate storage fee: base + per KB (use byte length, not string length) + const proofBytes = + typeof proof === "string" + ? Buffer.byteLength(proof, "utf8") + : (proof as Uint8Array).byteLength + + const proofSizeKB = Math.ceil(proofBytes / 1024) + const storageFee = TLSN_STORE_BASE_FEE + (proofSizeKB * TLSN_STORE_PER_KB_FEE) + log.info(`[TLSNotary] Proof size: ${proofSizeKB}KB, fee: ${storageFee} DEM`) + + // Burn the storage fee + const burnStorageFeeEdit: GCREdit = { + type: "balance", + operation: "remove", + isRollback: isRollback, + account: tx.content.from as string, + txhash: tx.hash, + amount: storageFee, + } + edits.push(burnStorageFeeEdit) + + // Store the proof (on-chain via GCR) + // For IPFS: in future, proof will be IPFS hash, actual data stored externally + const storeProofEdit: GCREdit = { + type: "tlsnotary", + operation: "store", + account: tx.content.from as string, + data: { + tokenId: tokenId, + domain: token.domain, + proof: proof, + storageType: storageType, + timestamp: Date.now(), + }, + txhash: tx.hash, + isRollback: isRollback, + } + edits.push(storeProofEdit) + + // Mark token as stored (only if not a rollback) + if (!isRollback) { + markStored(tokenId) + log.info(`[TLSNotary] Token ${tokenId} marked as stored`) + } + break + } + + default: { + // Log unknown operations - INativePayload may have more operations than handled here + // Cast needed because TypeScript narrows to never after exhaustive switch + log.warning("Unknown native operation: " + (nativePayload as INativePayload).nativeOperation) break + } } return edits diff --git a/src/libs/blockchain/gcr/gcr_routines/hashGCR.ts b/src/libs/blockchain/gcr/gcr_routines/hashGCR.ts index d52c608e..0638de87 100644 --- a/src/libs/blockchain/gcr/gcr_routines/hashGCR.ts +++ b/src/libs/blockchain/gcr/gcr_routines/hashGCR.ts @@ -2,6 +2,7 @@ import { EntityTarget, Repository, FindOptionsOrder } from "typeorm" import Datasource from "../../../../model/datasource" import Hashing from "src/libs/crypto/hashing" import { GCRSubnetsTxs } from "../../../../model/entities/GCRv2/GCRSubnetsTxs" +import { GCRTLSNotary } from "../../../../model/entities/GCRv2/GCR_TLSNotary" import { GlobalChangeRegistry } from "../../../../model/entities/GCR/GlobalChangeRegistry" import { GCRHashes } from "../../../../model/entities/GCRv2/GCRHashes" import Chain from "src/libs/blockchain/chain" @@ -55,6 +56,38 @@ export async function hashSubnetsTxsTable(): Promise { return Hashing.sha256(tableString) } +// REVIEW: TLSNotary proofs table hash for integrity verification +/** + * Generates a SHA-256 hash for the GCRTLSNotary table. + * Orders by tokenId for deterministic hashing. + * + * @returns Promise - SHA-256 hash of the TLSNotary proofs table + */ +export async function hashTLSNotaryTable(): Promise { + const db = await Datasource.getInstance() + const repository = db.getDataSource().getRepository(GCRTLSNotary) + + const records = await repository.find({ + order: { + tokenId: "ASC", + }, + }) + + // Normalize to plain objects with fixed field order for deterministic hashing + const normalized = records.map(r => ({ + tokenId: r.tokenId, + owner: r.owner, + domain: r.domain, + proof: r.proof, + storageType: r.storageType, + txhash: r.txhash, + proofTimestamp: String(r.proofTimestamp), + createdAt: r.createdAt ? r.createdAt.toISOString() : null, + })) + + return Hashing.sha256(JSON.stringify(normalized)) +} + /** * Creates a combined hash of all GCR-related tables. * Process: @@ -72,9 +105,12 @@ export default async function hashGCRTables(): Promise { // REVIEW: The below was GCRTracker without "", which was causing an error as is not an entity const gcrHash = await hashPublicKeyTable("gcr_tracker") // Tracking the GCR hashes as they are hashes of the GCR itself const subnetsTxsHash = await hashSubnetsTxsTable() + // REVIEW: TLSNotary proofs included in GCR integrity hash + const tlsnotaryHash = await hashTLSNotaryTable() return { native_gcr: gcrHash, native_subnets_txs: subnetsTxsHash, + native_tlsnotary: tlsnotaryHash, } } diff --git a/src/libs/blockchain/gcr/handleGCR.ts b/src/libs/blockchain/gcr/handleGCR.ts index 17614054..45e4738d 100644 --- a/src/libs/blockchain/gcr/handleGCR.ts +++ b/src/libs/blockchain/gcr/handleGCR.ts @@ -48,7 +48,12 @@ import GCRNonceRoutines from "./gcr_routines/GCRNonceRoutines" import Chain from "../chain" import { Repository } from "typeorm" import GCRIdentityRoutines from "./gcr_routines/GCRIdentityRoutines" +import { GCRTLSNotaryRoutines } from "./gcr_routines/GCRTLSNotaryRoutines" +import { GCRTLSNotary } from "@/model/entities/GCRv2/GCR_TLSNotary" import { Referrals } from "@/features/incentive/referrals" +// REVIEW: TLSNotary token management for native operations +import { createToken, extractDomain } from "@/features/tlsnotary/tokenManager" +import { INativePayload } from "@kynesyslabs/demosdk/types" export type GetNativeStatusOptions = { balance?: boolean @@ -279,6 +284,19 @@ export default class HandleGCR { // TODO implementations log.debug(`Assigning GCREdit ${editOperation.type}`) return { success: true, message: "Not implemented" } + case "smartContract": + case "storageProgram": + case "escrow": + // TODO implementations + log.debug(`GCREdit ${editOperation.type} not yet implemented`) + return { success: true, message: "Not implemented" } + // REVIEW: TLSNotary attestation proof storage + case "tlsnotary": + return GCRTLSNotaryRoutines.apply( + editOperation, + repositories.tlsnotary as Repository, + simulate, + ) default: return { success: false, message: "Invalid GCREdit type" } } @@ -369,9 +387,77 @@ export default class HandleGCR { } } + // REVIEW: Post-processing hook for native transaction side-effects + // This handles side-effects that aren't part of GCR edits (e.g., token creation) + // Token creation happens during simulation (mempool entry) so user can immediately use it + // The token is created optimistically - if tx fails consensus, token will expire unused + if (!isRollback && tx.content.type === "native") { + try { + await this.processNativeSideEffects(tx, simulate) + } catch (sideEffectError) { + log.error(`[applyToTx] Native side-effect error (non-fatal): ${sideEffectError}`) + // Side-effect errors are logged but don't fail the transaction + // The GCR edits (fee burning) have already been applied + } + } + return { success: true, message: "" } } + /** + * Process side-effects for native transactions that aren't captured in GCR edits + * Currently handles: + * - tlsn_request: Creates attestation token when tx enters mempool (simulate=true) + * so user can immediately use the proxy + * + * Token creation is idempotent - if token already exists for this tx, it's skipped + */ + private static async processNativeSideEffects( + tx: Transaction, + simulate = false, + ): Promise { + const nativeData = tx.content.data as ["native", INativePayload] + const nativePayload = nativeData[1] + + // Validate args exists before any destructuring + if (!nativePayload.args || !Array.isArray(nativePayload.args)) { + log.error(`[TLSNotary] Invalid nativePayload.args: ${JSON.stringify(nativePayload.args)}`) + return + } + + switch (nativePayload.nativeOperation) { + case "tlsn_request": { + const [targetUrl] = nativePayload.args + + // Only create token once - during simulation (mempool entry) + // Skip if called again during block finalization + if (!simulate) { + log.debug(`[TLSNotary] Skipping token creation for finalized tx ${tx.hash} (already created at mempool entry)`) + break + } + + log.info(`[TLSNotary] Processing tlsn_request side-effect for ${targetUrl}`) + + // Validate URL and extract domain + const domain = extractDomain(targetUrl) + log.debug(`[TLSNotary] Domain extracted: ${domain}`) + + // Create the attestation token (idempotent - tokenManager handles duplicates) + const token = createToken( + tx.content.from as string, + targetUrl, + tx.hash, + ) + log.info(`[TLSNotary] Created token ${token.id} for tx ${tx.hash}`) + break + } + // tlsn_store side-effects are handled in GCRTLSNotaryRoutines.apply() + default: + // No side-effects for other native operations + break + } + } + /** * Rolls back a transaction by reversing the order of applied GCR edits * @param tx The transaction to rollback @@ -462,6 +548,7 @@ export default class HandleGCR { hashes: dataSource.getRepository(GCRHashes), subnetsTxs: dataSource.getRepository(GCRSubnetsTxs), tracker: dataSource.getRepository(GCRTracker), + tlsnotary: dataSource.getRepository(GCRTLSNotary), } } diff --git a/src/libs/network/manageNodeCall.ts b/src/libs/network/manageNodeCall.ts index 720e208f..466e44d8 100644 --- a/src/libs/network/manageNodeCall.ts +++ b/src/libs/network/manageNodeCall.ts @@ -454,6 +454,226 @@ export async function manageNodeCall(content: NodeCall): Promise { // break // } + // REVIEW: TLSNotary proxy request endpoint for SDK (requires valid token) + case "requestTLSNproxy": { + try { + const { requestProxy, ProxyError } = await import("@/features/tlsnotary/proxyManager") + const { validateToken, consumeRetry } = await import("@/features/tlsnotary/tokenManager") + + // Require tokenId and owner (pubkey) for paid access + if (!data.tokenId || !data.owner) { + response.result = 400 + response.response = { + error: "INVALID_REQUEST", + message: "Missing tokenId or owner parameter", + } + break + } + + if (!data.targetUrl) { + response.result = 400 + response.response = { + error: "INVALID_REQUEST", + message: "Missing targetUrl parameter", + } + break + } + + // Validate URL is HTTPS + if (!data.targetUrl.startsWith("https://")) { + response.result = 400 + response.response = { + error: ProxyError.INVALID_URL, + message: "Only HTTPS URLs are supported for TLS attestation", + } + break + } + + // Validate the token + const validation = validateToken(data.tokenId, data.owner, data.targetUrl) + if (!validation.valid) { + response.result = validation.error === "TOKEN_NOT_FOUND" ? 404 : 403 + response.response = { + error: validation.error, + message: `Token validation failed: ${validation.error}`, + domain: validation.token?.domain, // Show expected domain on mismatch + } + break + } + + // Request the proxy (this spawns wstcp if needed) + const result = await requestProxy(data.targetUrl, data.requestOrigin) + + if ("error" in result) { + // Map proxy errors to appropriate HTTP status codes + switch (result.error) { + case ProxyError.INVALID_URL: + response.result = 400 // Bad Request - client error + break + case ProxyError.PORT_EXHAUSTED: + response.result = 503 // Service Unavailable - temporary + break + case ProxyError.WSTCP_NOT_AVAILABLE: + case ProxyError.PROXY_SPAWN_FAILED: + default: + response.result = 500 // Internal Server Error + break + } + response.response = result + } else { + // Success - consume a retry and link proxyId to token + const updatedToken = consumeRetry(data.tokenId, result.proxyId) + if (updatedToken) { + log.info(`[TLSNotary] Proxy spawned for token ${data.tokenId}, retries left: ${updatedToken.retriesLeft}`) + } + + // Add token info to response + response.response = { + ...result, + tokenId: data.tokenId, + retriesLeft: updatedToken?.retriesLeft ?? 0, + } + } + } catch (error) { + log.error("[manageNodeCall] requestTLSNproxy error: " + error) + response.result = 500 + response.response = { + error: "INTERNAL_ERROR", + message: "Failed to request TLSNotary proxy", + } + } + break + } + + // REVIEW: TLSNotary discovery endpoint for SDK auto-configuration + case "tlsnotary.getInfo": { + // Dynamic import to avoid circular dependencies and check if enabled + try { + const { getTLSNotaryService } = await import("@/features/tlsnotary") + const service = getTLSNotaryService() + + if (!service || !service.isRunning()) { + response.result = 503 + response.response = { + success: false, + error: "TLSNotary service is not enabled or not running", + } + break + } + + const publicKey = service.getPublicKeyHex() + const port = service.getPort() + + const proxyPort = process.env.TLSNOTARY_PROXY_PORT ?? "55688" + + // Extract host and determine WebSocket scheme from exposedUrl + // The node's host is used - SDK connects to the same host it's already connected to + let nodeHost = "localhost" + const wsScheme = (() => { + try { + const exposedUrl = getSharedState.exposedUrl + if (exposedUrl) { + const url = new URL(exposedUrl) + nodeHost = url.hostname + return url.protocol === "https:" ? "wss" : "ws" + } + } catch { + // Fall back to localhost and ws if URL parsing fails + } + return "ws" + })() + + // Build the notary WebSocket URL - Port is the TLSNotary WebSocket port + const notaryUrl = `${wsScheme}://${nodeHost}:${port}` + + // WebSocket proxy URL for TCP tunneling + const proxyUrl = `${wsScheme}://${nodeHost}:${proxyPort}` + + response.response = { + notaryUrl, + proxyUrl, + publicKey, + version: "0.1.0", // TLSNotary integration version + } + } catch (error) { + log.error("[manageNodeCall] tlsnotary.getInfo error: " + error) + response.result = 500 + response.response = { + success: false, + error: "Failed to get TLSNotary info", + } + } + break + } + + // REVIEW: TLSNotary token lookup by transaction hash + case "tlsnotary.getToken": { + try { + const { getTokenByTxHash, getToken } = await import("@/features/tlsnotary/tokenManager") + + // Support lookup by either tokenId or txHash + const { tokenId, txHash } = data as { tokenId?: string; txHash?: string } + + let token + if (tokenId) { + token = getToken(tokenId) + } else if (txHash) { + token = getTokenByTxHash(txHash) + } else { + response.result = 400 + response.response = { + error: "INVALID_REQUEST", + message: "Either tokenId or txHash is required", + } + break + } + + if (!token) { + response.result = 404 + response.response = { + error: "TOKEN_NOT_FOUND", + message: "No token found for the provided identifier", + } + } else { + response.response = { + token: { + id: token.id, + owner: token.owner, + domain: token.domain, + status: token.status, + expiresAt: token.expiresAt, + retriesLeft: token.retriesLeft, + }, + } + } + } catch (error) { + log.error("[manageNodeCall] tlsnotary.getToken error: " + error) + response.result = 500 + response.response = { + error: "INTERNAL_ERROR", + message: "Failed to get token", + } + } + break + } + + // REVIEW: TLSNotary token stats for monitoring + case "tlsnotary.getTokenStats": { + try { + const { getTokenStats } = await import("@/features/tlsnotary/tokenManager") + const stats = getTokenStats() + response.response = { stats } + } catch (error) { + log.error("[manageNodeCall] tlsnotary.getTokenStats error: " + error) + response.result = 500 + response.response = { + error: "INTERNAL_ERROR", + message: "Failed to get token stats", + } + } + break + } + // NOTE Don't look past here, go away // INFO For real, nothing here to be seen case "hots": diff --git a/src/libs/network/server_rpc.ts b/src/libs/network/server_rpc.ts index 22cf71ef..175fac2f 100644 --- a/src/libs/network/server_rpc.ts +++ b/src/libs/network/server_rpc.ts @@ -448,6 +448,16 @@ export async function serverRpcBun() { } }) + // REVIEW: Register TLSNotary routes if enabled + if (process.env.TLSNOTARY_ENABLED?.toLowerCase() === "true") { + try { + const { registerTLSNotaryRoutes } = await import("@/features/tlsnotary/routes") + registerTLSNotaryRoutes(server) + } catch (error) { + log.warning("[RPC] Failed to register TLSNotary routes: " + error) + } + } + log.info("[RPC Call] Server is running on 0.0.0.0:" + port, true) return server.start() } diff --git a/src/libs/omniprotocol/auth/verifier.ts b/src/libs/omniprotocol/auth/verifier.ts index 87a21a9e..31a4d70c 100644 --- a/src/libs/omniprotocol/auth/verifier.ts +++ b/src/libs/omniprotocol/auth/verifier.ts @@ -1,4 +1,4 @@ -import * as ed25519 from "@noble/ed25519" +import forge from "node-forge" import { keccak_256 } from "@noble/hashes/sha3.js" import { AuthBlock, SignatureAlgorithm, SignatureMode, VerificationResult } from "./types" import type { OmniMessageHeader } from "../types/message" @@ -182,8 +182,12 @@ export class SignatureVerifier { return false } - // Verify using noble/ed25519 - const valid = await ed25519.verify(signature, data, publicKey) + // Verify using node-forge ed25519 (same as SDK) + const valid = forge.pki.ed25519.verify({ + message: data, + signature: signature as forge.pki.ed25519.NativeBuffer, + publicKey: publicKey as forge.pki.ed25519.NativeBuffer, + }) return valid } catch (error) { log.error("[SignatureVerifier] Ed25519 verification error: " + error) diff --git a/src/libs/omniprotocol/protocol/handlers/control.ts b/src/libs/omniprotocol/protocol/handlers/control.ts index 7d39570c..b575a938 100644 --- a/src/libs/omniprotocol/protocol/handlers/control.ts +++ b/src/libs/omniprotocol/protocol/handlers/control.ts @@ -82,7 +82,8 @@ export const handleNodeCall: OmniHandler = async ({ message, context }) log.info(`[handleNodeCall] mempool merge request from peer: "${context.peerIdentity}"`) // ServerHandlers.handleMempool expects content with .data property - const content = request.params[0] ?? { data: [] } + const mempoolParams = Array.isArray(request.params) ? request.params : [] + const content = mempoolParams[0] ?? { data: [] } const response = await serverHandlers.handleMempool(content) return encodeNodeCallResponse({ @@ -93,6 +94,35 @@ export const handleNodeCall: OmniHandler = async ({ message, context }) }) } + // REVIEW: Handle hello_peer - peer handshake/discovery + // Format: { method: "hello_peer", params: [{ url, publicKey, signature, syncData }] } + if (request.method === "hello_peer") { + const { manageHelloPeer } = await import("src/libs/network/manageHelloPeer") + + log.debug(`[handleNodeCall] hello_peer from peer: "${context.peerIdentity}"`) + + const params = Array.isArray(request.params) ? request.params : [] + const helloPeerRequest = params[0] + if (!helloPeerRequest || typeof helloPeerRequest !== "object") { + return encodeNodeCallResponse({ + status: 400, + value: "Invalid hello_peer payload", + requireReply: false, + extra: null, + }) + } + + // Call manageHelloPeer with sender identity from OmniProtocol auth + const response = await manageHelloPeer(helloPeerRequest, context.peerIdentity ?? "") + + return encodeNodeCallResponse({ + status: response.result, + value: response.response, + requireReply: response.require_reply ?? false, + extra: response.extra ?? null, + }) + } + // REVIEW: Handle consensus_routine envelope format // Format: { method: "consensus_routine", params: [{ method: "setValidatorPhase", params: [...] }] } if (request.method === "consensus_routine") { @@ -101,7 +131,8 @@ export const handleNodeCall: OmniHandler = async ({ message, context }) ) // Extract the inner consensus method from params[0] - const consensusPayload = request.params[0] + const consensusParams = Array.isArray(request.params) ? request.params : [] + const consensusPayload = consensusParams[0] if (!consensusPayload || typeof consensusPayload !== "object") { return encodeNodeCallResponse({ status: 400, diff --git a/src/libs/omniprotocol/serialization/control.ts b/src/libs/omniprotocol/serialization/control.ts index 2bd8055a..35d83167 100644 --- a/src/libs/omniprotocol/serialization/control.ts +++ b/src/libs/omniprotocol/serialization/control.ts @@ -94,7 +94,12 @@ function deserializePeerEntry(buffer: Buffer, offset: number): { entry: Peerlist let metadata: Record | undefined if (metadataBytes.value.length > 0) { - metadata = JSON.parse(metadataBytes.value.toString("utf8")) as Record + try { + metadata = JSON.parse(metadataBytes.value.toString("utf8")) as Record + } catch { + // Malformed metadata, leave as undefined + metadata = undefined + } } return { diff --git a/src/libs/omniprotocol/transport/MessageFramer.ts b/src/libs/omniprotocol/transport/MessageFramer.ts index 4d788cef..a675818c 100644 --- a/src/libs/omniprotocol/transport/MessageFramer.ts +++ b/src/libs/omniprotocol/transport/MessageFramer.ts @@ -33,6 +33,8 @@ export class MessageFramer { /** Minimum complete message size */ private static readonly MIN_MESSAGE_SIZE = MessageFramer.HEADER_SIZE + MessageFramer.CHECKSUM_SIZE + /** Maximum payload size (16MB) to prevent DoS attacks */ + private static readonly MAX_PAYLOAD_SIZE = 16 * 1024 * 1024 /** * Add data received from TCP socket @@ -197,6 +199,15 @@ export class MessageFramer { PrimitiveDecoder.decodeUInt32(this.buffer, offset) offset += lengthBytes + // Validate payload size to prevent DoS attacks + if (payloadLength > MessageFramer.MAX_PAYLOAD_SIZE) { + // Drop buffered data so we don't retain attacker-controlled bytes in memory + this.buffer = Buffer.alloc(0) + throw new Error( + `Payload size ${payloadLength} exceeds maximum ${MessageFramer.MAX_PAYLOAD_SIZE}`, + ) + } + // Sequence/Message ID (4 bytes) const { value: sequence, bytesRead: sequenceBytes } = PrimitiveDecoder.decodeUInt32(this.buffer, offset) @@ -270,6 +281,11 @@ export class MessageFramer { auth?: AuthBlock | null, flags?: number, ): Buffer { + // Validate payload size before encoding + if (payload.length > MessageFramer.MAX_PAYLOAD_SIZE) { + throw new Error(`Payload size ${payload.length} exceeds maximum ${MessageFramer.MAX_PAYLOAD_SIZE}`) + } + // Determine flags const flagsByte = flags !== undefined ? flags : (auth ? 0x01 : 0x00) diff --git a/src/libs/omniprotocol/transport/PeerConnection.ts b/src/libs/omniprotocol/transport/PeerConnection.ts index 8a57a8a9..bf16ff33 100644 --- a/src/libs/omniprotocol/transport/PeerConnection.ts +++ b/src/libs/omniprotocol/transport/PeerConnection.ts @@ -1,7 +1,7 @@ // REVIEW: PeerConnection - TCP socket wrapper for single peer connection with state management import log from "src/utilities/logger" import { Socket } from "net" -import * as ed25519 from "@noble/ed25519" +import forge from "node-forge" import { keccak_256 } from "@noble/hashes/sha3.js" import { MessageFramer } from "./MessageFramer" import type { OmniMessageHeader } from "../types/message" @@ -212,10 +212,15 @@ export class PeerConnection { const payloadHash = Buffer.from(keccak_256(payload)) const dataToSign = Buffer.concat([msgIdBuf, payloadHash]) - // Sign with Ed25519 + // Sign with Ed25519 using node-forge (same as SDK) let signature: Uint8Array try { - signature = await ed25519.sign(dataToSign, privateKey) + // node-forge expects the message as a string and privateKey as NativeBuffer + const signatureBuffer = forge.pki.ed25519.sign({ + message: dataToSign, + privateKey: privateKey as forge.pki.ed25519.NativeBuffer, + }) + signature = new Uint8Array(signatureBuffer) } catch (error) { throw new SigningError( `Ed25519 signing failed (privateKey length: ${privateKey.length} bytes): ${error instanceof Error ? error.message : error}`, diff --git a/src/model/datasource.ts b/src/model/datasource.ts index 2e2dc5e0..d644b022 100644 --- a/src/model/datasource.ts +++ b/src/model/datasource.ts @@ -21,6 +21,7 @@ import { GlobalChangeRegistry } from "./entities/GCR/GlobalChangeRegistry.js" import { GCRHashes } from "./entities/GCRv2/GCRHashes.js" import { GCRSubnetsTxs } from "./entities/GCRv2/GCRSubnetsTxs.js" import { GCRMain } from "./entities/GCRv2/GCR_Main.js" +import { GCRTLSNotary } from "./entities/GCRv2/GCR_TLSNotary.js" import { GCRTracker } from "./entities/GCR/GCRTracker.js" export const dataSource = new DataSource({ @@ -43,6 +44,7 @@ export const dataSource = new DataSource({ GlobalChangeRegistry, GCRTracker, GCRMain, + GCRTLSNotary, ], synchronize: true, logging: false, diff --git a/src/model/entities/GCRv2/GCR_TLSNotary.ts b/src/model/entities/GCRv2/GCR_TLSNotary.ts new file mode 100644 index 00000000..bdef07dc --- /dev/null +++ b/src/model/entities/GCRv2/GCR_TLSNotary.ts @@ -0,0 +1,49 @@ +import { + Column, + CreateDateColumn, + Entity, + Index, + PrimaryColumn, +} from "typeorm" + +// REVIEW: TLSNotary proof storage entity for on-chain attestation data +/** + * GCR_TLSNotary stores TLSNotary attestation proofs. + * Each proof is linked to a token and domain, stored via the tlsn_store native operation. + */ +@Entity("gcr_tlsnotary") +@Index("idx_gcr_tlsnotary_owner", ["owner"]) +@Index("idx_gcr_tlsnotary_domain", ["domain"]) +@Index("idx_gcr_tlsnotary_txhash", ["txhash"]) +export class GCRTLSNotary { + @PrimaryColumn({ type: "text", name: "tokenId" }) + tokenId: string + + @Column({ type: "text", name: "owner" }) + owner: string + + @Column({ type: "text", name: "domain" }) + domain: string + + @Column({ type: "text", name: "proof" }) + proof: string + + @Column({ type: "text", name: "storageType" }) + storageType: "onchain" | "ipfs" + + @Column({ type: "text", name: "txhash" }) + txhash: string + + @Column({ + type: "bigint", + name: "proofTimestamp", + transformer: { + to: (v: string) => v, + from: (v: string | number) => String(v), + }, + }) + proofTimestamp: string + + @CreateDateColumn({ type: "timestamp", name: "createdAt" }) + createdAt: Date +} diff --git a/src/utilities/sharedState.ts b/src/utilities/sharedState.ts index 5a9732d4..38ab2e49 100644 --- a/src/utilities/sharedState.ts +++ b/src/utilities/sharedState.ts @@ -13,6 +13,8 @@ import { uint8ArrayToHex } from "@kynesyslabs/demosdk/encryption" import { PeerOmniAdapter } from "src/libs/omniprotocol/integration/peerAdapter" import type { MigrationMode } from "src/libs/omniprotocol/types/config" import log from "@/utilities/logger" +import type { TLSNotaryState } from "@/features/tlsnotary/proxyManager" +import type { TokenStoreState } from "@/features/tlsnotary/tokenManager" dotenv.config() @@ -53,6 +55,14 @@ export default class SharedState { // OmniProtocol adapter for peer communication private _omniAdapter: PeerOmniAdapter | null = null + // SECTION TLSNotary Proxy Manager State + // Stores wstcp proxy processes and port pool for TLS attestation + tlsnotary: TLSNotaryState | null = null + + // SECTION TLSNotary Token Store + // In-memory token store for paid attestation access + tlsnTokenStore: TokenStoreState | null = null + // Running as a node (is false when running specific modules like the signaling server) runningAsNode = true diff --git a/src/utilities/tui/CategorizedLogger.ts b/src/utilities/tui/CategorizedLogger.ts index ced59b80..ea452606 100644 --- a/src/utilities/tui/CategorizedLogger.ts +++ b/src/utilities/tui/CategorizedLogger.ts @@ -33,6 +33,7 @@ export type LogCategory = | "MCP" // MCP server operations | "MULTICHAIN" // Cross-chain/XM operations | "DAHR" // DAHR-specific operations + | "TLSN" // TLSNotary HTTPS attestation operations | "CMD" // Command execution and TUI commands /** @@ -198,6 +199,8 @@ const ALL_CATEGORIES: LogCategory[] = [ "MCP", "MULTICHAIN", "DAHR", + "TLSN", + "CMD", ] /** @@ -919,18 +922,7 @@ export class CategorizedLogger extends EventEmitter { * Get all available categories */ static getCategories(): LogCategory[] { - return [ - "CORE", - "NETWORK", - "PEER", - "CHAIN", - "SYNC", - "CONSENSUS", - "IDENTITY", - "MCP", - "MULTICHAIN", - "DAHR", - ] + return [...ALL_CATEGORIES] } /** diff --git a/src/utilities/tui/TUIManager.ts b/src/utilities/tui/TUIManager.ts index 59310d19..a627e012 100644 --- a/src/utilities/tui/TUIManager.ts +++ b/src/utilities/tui/TUIManager.ts @@ -24,6 +24,12 @@ export interface NodeInfo { peersCount: number blockNumber: number isSynced: boolean + // TLSNotary service info (optional) + tlsnotary?: { + enabled: boolean + port: number + running: boolean + } } export interface TUIConfig { @@ -99,7 +105,8 @@ const TABS: Tab[] = [ { key: "8", label: "MCP", category: "MCP" }, { key: "9", label: "XM", category: "MULTICHAIN" }, { key: "-", label: "DAHR", category: "DAHR" }, - { key: "=", label: "CMD", category: "CMD" }, + { key: "=", label: "TLSN", category: "TLSN" }, + { key: "\\", label: "CMD", category: "CMD" }, ] // SECTION Command definitions for CMD tab @@ -513,9 +520,17 @@ export class TUIManager extends EventEmitter { this.setActiveTab(10) // DAHR tab break - case "=": - this.setActiveTab(11) // CMD tab + case "=": { + const idx = TABS.findIndex(t => t.category === "TLSN") + if (idx >= 0) this.setActiveTab(idx) break + } + + case "\\": { + const idx = TABS.findIndex(t => t.category === "CMD") + if (idx >= 0) this.setActiveTab(idx) + break + } // Tab navigation case "TAB": @@ -1069,8 +1084,18 @@ export class TUIManager extends EventEmitter { } term.brightWhite(keyDisplay) - // Line 5: Empty separator + // Line 5: TLSNotary status (if enabled) term.moveTo(infoStartX, 5) + term.eraseLine() + if (this.nodeInfo.tlsnotary?.enabled) { + term.yellow("🔐 ") + term.gray("TLSN: ") + if (this.nodeInfo.tlsnotary.running) { + term.bgGreen.black(` ✓ :${this.nodeInfo.tlsnotary.port} `) + } else { + term.bgRed.white(" ✗ STOPPED ") + } + } // Line 6: Port term.moveTo(infoStartX, 6) diff --git a/src/utilities/tui/tagCategories.ts b/src/utilities/tui/tagCategories.ts index 68630793..e6abcb32 100644 --- a/src/utilities/tui/tagCategories.ts +++ b/src/utilities/tui/tagCategories.ts @@ -108,6 +108,13 @@ export const TAG_TO_CATEGORY: Record = { "DEMOS FOLLOW": "DAHR", "PAYLOAD FOR WEB2": "DAHR", "REQUEST FOR WEB2": "DAHR", + + // TLSN - TLSNotary HTTPS attestation operations + TLSNOTARY: "TLSN", + TLSNotary: "TLSN", + TLSN: "TLSN", + NOTARY: "TLSN", + ATTESTATION: "TLSN", } // Re-export LogCategory for convenience diff --git a/tlsnotary/docker-compose.yml b/tlsnotary/docker-compose.yml new file mode 100644 index 00000000..c5976d8f --- /dev/null +++ b/tlsnotary/docker-compose.yml @@ -0,0 +1,34 @@ +# TLSNotary Docker Notary Server +# Uses the official tlsn-js compatible notary server image +# +# This provides the full HTTP API + WebSocket interface that tlsn-js expects: +# - GET /info - Get notary public key +# - POST /session - Create session, returns sessionId +# - WS /notarize?sessionId=xxx - WebSocket MPC-TLS session +# +# Environment variables: +# - TLSNOTARY_PORT: Port to expose (default: 7047) + +services: + notary: + container_name: tlsn-notary-${TLSNOTARY_PORT:-7047} + image: ghcr.io/tlsnotary/tlsn/notary-server:v0.1.0-alpha.12 + environment: + NS_NOTARIZATION__MAX_SENT_DATA: 32768 + platform: linux/amd64 + ports: + - "${TLSNOTARY_PORT:-7047}:7047" + restart: unless-stopped + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:7047/info"] + interval: 10s + timeout: 5s + retries: 3 + start_period: 10s + # Note: The Docker notary-server uses its own internal signing key + # Attestations are cryptographically bound to this notary's public key + # which can be retrieved via GET /info endpoint + +networks: + default: + driver: bridge