diff --git a/POST_MVP_AUDIT.md b/POST_MVP_AUDIT.md new file mode 100644 index 00000000..c508e597 --- /dev/null +++ b/POST_MVP_AUDIT.md @@ -0,0 +1,769 @@ +# Cortex Linux Post-MVP Audit Report + +**Generated:** 2025-11-28 +**Target:** February 2025 Seed Funding ($2-3M) +**Repository:** https://github.com/cortexlinux/cortex + +--- + +## Executive Summary Dashboard + +| Category | Current State | Target State | Priority | +|----------|--------------|--------------|----------| +| **MVP Completion** | 89% (25/28 issues closed) | 100% | 🔴 Critical | +| **Branch Protection** | ❌ None | ✅ Required reviews + CI | 🔴 Critical | +| **Security Scanning** | ❌ Disabled | ✅ All enabled | 🔴 Critical | +| **Open PRs** | 5 with conflicts | 0 conflicts | 🟡 High | +| **Marketing Site** | ❌ None | ✅ Investor-ready | 🔴 Critical | +| **Documentation** | ✅ Good (recent overhaul) | ✅ Complete | 🟢 Done | +| **CI/CD** | ✅ Working | ✅ Enhanced | 🟢 Done | + +--- + +## Part 1: Closed Issues Audit + +### Summary Statistics +- **Total Closed Issues:** 169 +- **Completed (COMPLETED):** ~15 +- **Deferred (NOT_PLANNED):** ~154 +- **Reopen Candidates:** 28 + +### Issues to REOPEN NOW (Post-MVP Priority) + +| # | Title | Original Bounty | New Bounty | Milestone | Rationale | +|---|-------|-----------------|------------|-----------|-----------| +| **42** | Package Conflict Resolution UI | $25 | $100 | v0.2 | PR #203 exists, core UX feature | +| **43** | Smart Retry Logic with Exponential Backoff | $25 | $75 | v0.2 | Reliability feature | +| **44** | Installation Templates for Common Stacks | $25 | $75 | v0.2 | PR #201 exists, high demand | +| **45** | System Snapshot and Rollback Points | $25 | $150 | v0.2 | Enterprise requirement | +| **103** | Installation Simulation Mode | $25 | $75 | v0.2 | Safety feature, demo-worthy | +| **112** | Alternative Package Suggestions | $25 | $50 | v0.3 | AI-powered UX enhancement | +| **117** | Smart Package Search with Fuzzy Matching | $25 | $75 | v0.2 | Core search improvement | +| **119** | Package Recommendation Based on System Role | $25 | $100 | v0.3 | AI differentiator | +| **125** | Smart Cleanup and Disk Space Optimizer | $25 | $50 | v0.3 | Utility feature | +| **126** | Package Import from Requirements Files | $25 | $75 | v0.2 | Developer workflow | +| **128** | System Health Score and Recommendations | $25 | $100 | v0.3 | Dashboard feature | +| **170** | Package Performance Profiling | $25 | $100 | v1.0 | Enterprise feature | +| **171** | Immutable Infrastructure Mode | $25 | $150 | v1.0 | Enterprise/DevOps | +| **172** | Package Certification and Attestation | $25 | $200 | v1.0 | Security feature | +| **178** | Chaos Engineering Integration | $25 | $100 | v1.0 | Enterprise testing | +| **177** | AI-Powered Capacity Planning | $25 | $150 | v1.0 | Enterprise feature | + +### Issues to REOPEN LATER (Post-Funding) + +| # | Title | Bounty | Milestone | Notes | +|---|-------|--------|-----------|-------| +| 131 | AI-Powered Installation Tutor | $50 | v1.0 | Nice-to-have AI feature | +| 135 | Desktop Notification System | $50 | v1.0 | UX enhancement | +| 144 | Package Installation Profiles | $75 | v0.3 | User personalization | +| 175 | Time-Travel Debugging | $100 | v1.0 | Advanced debugging | +| 182 | Automated Technical Debt Detection | $75 | v1.0 | Code quality | +| 185 | Self-Healing System Architecture | $200 | v1.0+ | Ambitious AI feature | + +### Issues to KEEP CLOSED (Not Relevant) + +| # | Title | Reason | +|---|-------|--------| +| 173 | Energy Efficiency Optimization | Too niche, low demand | +| 174 | Federated Learning for Package Intelligence | Over-engineered for current stage | +| 176 | Package Dependency Marketplace | Requires ecosystem, premature | +| 179 | Package DNA and Genetic Lineage | Experimental, low value | +| 180 | Smart Contract Integration | Web3 hype, not core value | +| 181 | Package Sentiment Analysis | Scope creep | +| 183 | Package Installation Gamification | Distracting from core value | +| 184 | Quantum Computing Package Support | Too early | +| 186 | Package Installation Streaming | Not core feature | + +### CLI Commands to Reopen Issues + +```bash +# Reopen high-priority issues for v0.2 +gh issue reopen 42 43 44 45 103 117 126 --repo cortexlinux/cortex + +# Add labels and milestone +for issue in 42 43 44 45 103 117 126; do + gh issue edit $issue --repo cortexlinux/cortex \ + --add-label "priority: high,bounty,post-mvp" \ + --milestone "Post-MVP - Enhancements" +done + +# Reopen medium-priority issues for v0.3 +gh issue reopen 112 119 125 128 144 --repo cortexlinux/cortex + +for issue in 112 119 125 128 144; do + gh issue edit $issue --repo cortexlinux/cortex \ + --add-label "priority: medium,bounty" \ + --milestone "Post-MVP - Enhancements" +done +``` + +--- + +## Part 2: Repository Settings Audit + +### 🔴 CRITICAL GAPS (Fix This Week) + +| Setting | Current | Recommended | CLI Command | +|---------|---------|-------------|-------------| +| **Branch Protection** | ❌ None | Required reviews + CI | See below | +| **Secret Scanning** | ❌ Disabled | ✅ Enabled | GitHub UI | +| **Push Protection** | ❌ Disabled | ✅ Enabled | GitHub UI | +| **Dependabot Security** | ❌ Disabled | ✅ Enabled | GitHub UI | +| **Code Scanning** | ❌ None | ✅ CodeQL | Add workflow | +| **SECURITY.md** | ❌ Missing | ✅ Present | Create file | +| **CODEOWNERS** | ❌ Missing | ✅ Present | Create file | + +### Enable Branch Protection + +```bash +gh api repos/cortexlinux/cortex/branches/main/protection -X PUT \ + -H "Accept: application/vnd.github+json" \ + -f required_status_checks='{"strict":true,"contexts":["test (3.10)","test (3.11)","test (3.12)","lint","security"]}' \ + -f enforce_admins=false \ + -f required_pull_request_reviews='{"required_approving_review_count":1,"dismiss_stale_reviews":true}' \ + -f restrictions=null \ + -f allow_force_pushes=false \ + -f allow_deletions=false +``` + +### Create SECURITY.md + +```bash +cat > SECURITY.md << 'EOF' +# Security Policy + +## Supported Versions + +| Version | Supported | +| ------- | ------------------ | +| 0.1.x | :white_check_mark: | + +## Reporting a Vulnerability + +Please report security vulnerabilities to: security@cortexlinux.com + +**Do NOT open public issues for security vulnerabilities.** + +We will acknowledge receipt within 48 hours and provide a detailed response within 7 days. + +## Security Measures + +- All commands are validated against dangerous patterns before execution +- Firejail sandboxing for untrusted command execution +- No execution of piped curl/wget to shell +- Regular dependency scanning via Dependabot +EOF +``` + +### Create CODEOWNERS + +```bash +mkdir -p .github +cat > .github/CODEOWNERS << 'EOF' +# Cortex Linux Code Owners +* @mikejmorgan-ai + +# Security-sensitive files +cortex/coordinator.py @mikejmorgan-ai +cortex/utils/commands.py @mikejmorgan-ai +src/sandbox_executor.py @mikejmorgan-ai + +# CI/CD +.github/ @mikejmorgan-ai +EOF +``` + +### Add CodeQL Workflow + +```bash +cat > .github/workflows/codeql.yml << 'EOF' +name: "CodeQL" + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + schedule: + - cron: '0 6 * * 1' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + steps: + - uses: actions/checkout@v4 + - uses: github/codeql-action/init@v3 + with: + languages: python + - uses: github/codeql-action/analyze@v3 +EOF +``` + +### 🟢 GOOD STATUS + +| Setting | Status | +|---------|--------| +| Visibility | ✅ Public | +| Issues | ✅ Enabled | +| Discussions | ✅ Enabled | +| Wiki | ✅ Enabled | +| Discord Webhook | ✅ Active | +| Topics | ✅ ai, automation, linux, package-manager | + +### 🟡 RECOMMENDED IMPROVEMENTS + +| Setting | Current | Recommended | +|---------|---------|-------------| +| Auto-delete branches | ❌ | ✅ Enable | +| Auto-merge | ❌ | ✅ Enable | +| GitHub Pages | ❌ | ✅ Enable for docs | +| Environments | ❌ None | staging, production | +| Homepage | ❌ null | cortexlinux.com | + +```bash +# Enable auto-delete and auto-merge +gh repo edit cortexlinux/cortex --delete-branch-on-merge --enable-auto-merge + +# Add homepage +gh repo edit cortexlinux/cortex --homepage "https://cortexlinux.com" +``` + +--- + +## Part 3: Web Interface Roadmap + +### A. Marketing Site (cortexlinux.com) - MUST HAVE FOR FUNDING + +**Recommended Stack:** Astro + Tailwind CSS on Vercel + +| Option | Pros | Cons | Time | Cost/mo | +|--------|------|------|------|---------| +| **Astro + Tailwind** ✅ | Fast, SEO-friendly, modern | Learning curve | 2-3 weeks | $0 (Vercel free) | +| Next.js | Full-stack capable | Overkill for marketing | 3-4 weeks | $0-20 | +| GitHub Pages + Jekyll | Free, simple | Limited design | 1-2 weeks | $0 | + +**Recommended:** Astro + Tailwind on Vercel for investor-ready quality with minimal cost. + +#### Marketing Site Requirements + +``` +cortexlinux.com/ +├── / (Landing) +│ ├── Hero with terminal animation "cortex install docker" +│ ├── Value proposition (3 bullets) +│ ├── Live GitHub stats widget +│ └── CTA: "Get Started" → GitHub +├── /features +│ ├── AI-Powered Installation +│ ├── Conflict Resolution +│ ├── Rollback & Recovery +│ └── Security Sandboxing +├── /pricing +│ ├── Community (Free) +│ └── Enterprise (Contact us) +├── /docs → Link to GitHub wiki or separate docs site +└── /about + ├── Team + └── Investors/Advisors +``` + +#### Implementation Timeline + +| Week | Deliverable | +|------|-------------| +| 1 | Design mockups + Astro project setup | +| 2 | Landing page + features page | +| 3 | Pricing + about + polish | +| 4 | Testing + launch | + +### B. Product Dashboard (app.cortexlinux.com) - NICE TO HAVE + +**Recommended Stack:** Streamlit (fastest to MVP) or React + Vite + +| Option | Pros | Cons | Time | Cost/mo | +|--------|------|------|------|---------| +| **Streamlit** ✅ | Python-native, fast | Limited customization | 1-2 weeks | $0-50 | +| React + Vite | Full control | More development time | 4-6 weeks | $0-20 | +| Electron | Desktop app | Distribution complexity | 6-8 weeks | $0 | +| Textual TUI | Terminal users love it | Niche audience | 2-3 weeks | $0 | + +**Recommended:** Start with Streamlit for quick dashboard MVP, migrate to React later if needed. + +#### Dashboard Features (MVP) + +1. Installation History Viewer +2. Rollback Interface +3. Package Search +4. System Health Score +5. Settings Management + +### C. Domain Setup + +```bash +# Purchase domains (if not already owned) +# cortexlinux.com - Marketing site +# app.cortexlinux.com - Dashboard (subdomain) +# docs.cortexlinux.com - Documentation (subdomain) +``` + +--- + +## Part 4: Open PR Triage + +### PR Status Summary + +| PR | Title | Author | CI | Conflicts | Verdict | +|----|-------|--------|----|-----------|---------| +| **#199** | Self-update version mgmt | @dhvll | ✅ Pass | ⚠️ Yes | REQUEST CHANGES | +| **#201** | Installation Templates | @aliraza556 | ✅ Pass | ⚠️ Yes | REQUEST CHANGES | +| **#203** | Conflict Resolution | @Sahilbhatane | ✅ Pass | ⚠️ Yes | REQUEST CHANGES | +| **#38** | Pre-flight Checker | @AlexanderLuzDH | ❌ Fail | ⚠️ Yes | REQUEST CHANGES | +| **#21** | Config Templates | @aliraza556 | ❌ Fail | ⚠️ Yes | CLOSE (Superseded) | + +### PR #199 - Self Update Version Management +**Author:** @dhvll | **Additions:** 802 | **Files:** 9 + +**Code Review:** +- ✅ Good: Adds update channel support (stable/beta) +- ✅ Good: Checksum verification +- ✅ Good: Automatic rollback on failure +- ⚠️ Issue: Merge conflicts with main +- ⚠️ Issue: Removes some README content + +**Verdict:** REQUEST CHANGES - Rebase needed + +```bash +gh pr comment 199 --repo cortexlinux/cortex --body "$(cat <<'EOF' +## Code Review + +Thanks for implementing the self-update system! The update channel support and rollback mechanism look solid. + +### Required Changes +1. **Rebase required** - This PR has merge conflicts with main. Please run: + ```bash + git fetch origin main + git rebase origin/main + git push --force-with-lease + ``` + +2. **README changes** - Please preserve the existing README content while adding the update documentation. + +Once rebased, this is ready to merge. 🚀 +EOF +)" +``` + +### PR #201 - Installation Templates System +**Author:** @aliraza556 | **Additions:** 2,418 | **Files:** 11 + +**Code Review:** +- ✅ Good: Comprehensive template system (LAMP, MEAN, ML, etc.) +- ✅ Good: YAML template format +- ✅ Good: Hardware compatibility checks +- ✅ Good: Template validation +- ⚠️ Issue: Merge conflicts with main + +**Verdict:** REQUEST CHANGES - Rebase needed + +```bash +gh pr comment 201 --repo cortexlinux/cortex --body "$(cat <<'EOF' +## Code Review + +Excellent work on the installation templates system! The template format is well-designed and the hardware compatibility checking is a great addition. + +### Required Changes +1. **Rebase required** - This PR has merge conflicts. Please run: + ```bash + git fetch origin main + git rebase origin/main + git push --force-with-lease + ``` + +### After Rebase +This PR is approved and ready to merge once conflicts are resolved. Great contribution! 🎉 +EOF +)" +``` + +### PR #203 - Interactive Package Conflict Resolution +**Author:** @Sahilbhatane | **Additions:** 1,677 | **Files:** 5 + +**Code Review:** +- ✅ Good: Interactive conflict UI +- ✅ Good: Saved preferences system +- ✅ Good: Integration with PreferencesManager +- ⚠️ Issue: Merge conflicts + +**Verdict:** REQUEST CHANGES - Rebase needed + +```bash +gh pr comment 203 --repo cortexlinux/cortex --body "$(cat <<'EOF' +## Code Review + +Great implementation of the conflict resolution system! The saved preferences feature is particularly useful for repeat installations. + +### Required Changes +1. **Rebase required** - Please resolve merge conflicts: + ```bash + git fetch origin main + git rebase origin/main + git push --force-with-lease + ``` + +Ready to merge after rebase! 🚀 +EOF +)" +``` + +### PR #38 - System Requirements Pre-flight Checker +**Author:** @AlexanderLuzDH | **Additions:** 628 | **Deletions:** 2,815 | **Files:** 18 + +**Code Review:** +- ⚠️ Concern: Large number of deletions (2,815 lines) +- ⚠️ Concern: SonarCloud analysis failed +- ⚠️ Concern: Old PR (Nov 12) +- ⚠️ Issue: Merge conflicts + +**Verdict:** REQUEST CHANGES - Needs significant work + +```bash +gh pr comment 38 --repo cortexlinux/cortex --body "$(cat <<'EOF' +## Code Review + +Thanks for working on the pre-flight checker! However, there are some concerns: + +### Required Changes +1. **Large deletions** - This PR removes 2,815 lines. Please ensure no critical code is being removed unintentionally. + +2. **CI Failure** - SonarCloud analysis is failing. Please investigate and fix. + +3. **Rebase required** - Please resolve merge conflicts. + +4. **Scope review** - Please provide a summary of what files/features are being removed and why. + +Once these issues are addressed, we can proceed with the review. +EOF +)" +``` + +### PR #21 - Configuration File Template System +**Author:** @aliraza556 | **Additions:** 3,642 | **Files:** 19 + +**Code Review:** +- ⚠️ Already approved but never merged +- ⚠️ Very old (Nov 8) +- ⚠️ May be superseded by PR #201 + +**Verdict:** CLOSE - Superseded by newer implementation + +```bash +gh pr close 21 --repo cortexlinux/cortex --comment "$(cat <<'EOF' +Closing this PR as the configuration template functionality has been implemented differently in the codebase. + +@aliraza556 - Thank you for your contribution! Your work on PR #201 (Installation Templates) is the preferred implementation path. Please focus on getting that PR rebased and merged. +EOF +)" +``` + +--- + +## Part 5: Contributor Pipeline + +### Outstanding Bounties (Merged PRs) + +| PR | Title | Author | Bounty | Status | +|----|-------|--------|--------|--------| +| #198 | Installation history tracking | @aliraza556 | $75 | **UNPAID** | +| #195 | Package manager wrapper | @dhvll | $50 | **UNPAID** | +| #190 | Installation coordinator | @Sahilbhatane | $50 | **UNPAID** | +| #37 | Progress notifications | @AlexanderLuzDH | $25 | **UNPAID** | +| #6 | Sandbox executor | @dhvll | $50 | **UNPAID** | +| #5 | LLM integration | @Sahilbhatane | $100 | **UNPAID** | +| #4 | Hardware profiling | @dhvll | $50 | **UNPAID** | +| #200 | User Preferences | @Sahilbhatane | $50 | **UNPAID** | +| #202 | Config export/import | @danishirfan21 | $50 | **UNPAID** | + +**Total Outstanding:** ~$500 + +### Contributor Summary + +| Contributor | Merged PRs | Total Bounty Owed | +|-------------|------------|-------------------| +| @Sahilbhatane | 3 | $200 | +| @dhvll | 3 | $150 | +| @aliraza556 | 1 | $75 | +| @AlexanderLuzDH | 1 | $25 | +| @danishirfan21 | 1 | $50 | + +### New Bounty Issues to Create + +```bash +# Issue 1: Marketing Website +gh issue create --repo cortexlinux/cortex \ + --title "Build Marketing Website (cortexlinux.com)" \ + --body "$(cat <<'EOF' +## Description +Create an investor-ready marketing website for Cortex Linux. + +## Requirements +- Astro + Tailwind CSS +- Landing page with terminal demo animation +- Features page +- Pricing page (Community free / Enterprise contact) +- Mobile responsive +- < 2s load time +- Deploy on Vercel + +## Acceptance Criteria +- [ ] Landing page with hero animation +- [ ] Features overview +- [ ] Pricing table +- [ ] Mobile responsive +- [ ] Lighthouse score > 90 +- [ ] Deployed to cortexlinux.com + +**Skills:** Astro, Tailwind CSS, Web Design +**Bounty:** $500 upon merge +**Priority:** Critical +**Deadline:** January 15, 2025 +EOF +)" --label "bounty,priority: critical,help wanted" + +# Issue 2: Streamlit Dashboard MVP +gh issue create --repo cortexlinux/cortex \ + --title "Build Streamlit Dashboard MVP" \ + --body "$(cat <<'EOF' +## Description +Create a web dashboard for Cortex using Streamlit. + +## Features +- Installation history viewer +- Package search +- System health score display +- Settings management + +## Acceptance Criteria +- [ ] View installation history +- [ ] Search packages +- [ ] Display system health +- [ ] Basic settings UI +- [ ] Deploy instructions + +**Skills:** Python, Streamlit, UI/UX +**Bounty:** $200 upon merge +**Priority:** High +EOF +)" --label "bounty,priority: high" + +# Issue 3: Test Coverage Improvement +gh issue create --repo cortexlinux/cortex \ + --title "Increase Test Coverage to 80%" \ + --body "$(cat <<'EOF' +## Description +Improve test coverage across the codebase to 80%+. + +## Current State +- Test directory: test/ +- Framework: pytest +- Current coverage: ~40% + +## Requirements +- Add unit tests for cortex/coordinator.py +- Add unit tests for cortex/packages.py +- Add unit tests for LLM/interpreter.py +- Add integration tests + +## Acceptance Criteria +- [ ] Coverage >= 80% +- [ ] All tests pass +- [ ] Coverage report in CI + +**Skills:** Python, pytest, testing +**Bounty:** $150 upon merge +**Priority:** High +EOF +)" --label "bounty,testing,priority: high" + +# Issue 4: Documentation Improvements +gh issue create --repo cortexlinux/cortex \ + --title "API Documentation with Sphinx" \ + --body "$(cat <<'EOF' +## Description +Generate API documentation using Sphinx. + +## Requirements +- Sphinx setup +- Auto-generated from docstrings +- Published to GitHub Pages or docs.cortexlinux.com + +## Acceptance Criteria +- [ ] Sphinx configuration +- [ ] API reference generated +- [ ] Hosted documentation +- [ ] CI workflow for doc generation + +**Skills:** Python, Sphinx, Documentation +**Bounty:** $100 upon merge +**Priority:** Medium +EOF +)" --label "bounty,documentation" + +# Issue 5: Multi-Distro Support +gh issue create --repo cortexlinux/cortex \ + --title "Add Fedora/RHEL Support" \ + --body "$(cat <<'EOF' +## Description +Extend package manager support to Fedora/RHEL (dnf/yum). + +## Requirements +- Detect distro family +- Map apt commands to dnf equivalents +- Test on Fedora 39+ + +## Acceptance Criteria +- [ ] Distro detection +- [ ] dnf/yum command mapping +- [ ] Tests for RHEL family +- [ ] Documentation update + +**Skills:** Python, Linux, Package Management +**Bounty:** $150 upon merge +**Priority:** Medium +EOF +)" --label "bounty,enhancement" +``` + +--- + +## Immediate Actions (Run Now) + +### Security Settings (GitHub UI) +1. Go to Settings → Code security and analysis +2. Enable: Dependabot alerts ✅ +3. Enable: Dependabot security updates ✅ +4. Enable: Secret scanning ✅ +5. Enable: Push protection ✅ + +### CLI Commands to Execute + +```bash +# 1. Post PR review comments +gh pr comment 199 --repo cortexlinux/cortex --body "Please rebase: git fetch origin main && git rebase origin/main && git push --force-with-lease" +gh pr comment 201 --repo cortexlinux/cortex --body "Please rebase: git fetch origin main && git rebase origin/main && git push --force-with-lease" +gh pr comment 203 --repo cortexlinux/cortex --body "Please rebase: git fetch origin main && git rebase origin/main && git push --force-with-lease" +gh pr comment 38 --repo cortexlinux/cortex --body "Large deletions need review. Please explain the 2,815 lines removed." + +# 2. Close superseded PR +gh pr close 21 --repo cortexlinux/cortex --comment "Superseded by newer implementation" + +# 3. Reopen high-priority issues +gh issue reopen 42 43 44 45 103 117 126 --repo cortexlinux/cortex 2>/dev/null || echo "Some issues may already be open" + +# 4. Update repository settings +gh repo edit cortexlinux/cortex --delete-branch-on-merge --enable-auto-merge + +# 5. Create SECURITY.md and CODEOWNERS (run in repo directory) +cd /Users/allbots/cortex-review +echo '# Security Policy...' > SECURITY.md +mkdir -p .github +echo '* @mikejmorgan-ai' > .github/CODEOWNERS +``` + +--- + +## This Week Actions + +| Day | Task | Owner | +|-----|------|-------| +| Mon | Enable all security settings in GitHub UI | Admin | +| Mon | Add branch protection rules | Admin | +| Mon | Post PR review comments | Admin | +| Tue | Create SECURITY.md and CODEOWNERS | Admin | +| Tue | Add CodeQL workflow | Admin | +| Wed | Reopen priority issues with new bounties | Admin | +| Wed | Create new bounty issues | Admin | +| Thu | Follow up with contributors on PR rebases | Admin | +| Fri | Pay outstanding bounties ($500) | Admin | + +--- + +## Pre-Funding Actions (Before February 2025) + +### Critical Path + +``` +Week 1-2: Security & Infrastructure +├── Enable all security features +├── Add branch protection +├── Create SECURITY.md, CODEOWNERS +└── Merge pending PRs (after rebase) + +Week 3-4: Marketing Website +├── Design mockups +├── Build landing page +├── Build features page +└── Deploy to Vercel + +Week 5-6: Polish & Demo +├── Streamlit dashboard MVP +├── Demo video recording +├── Documentation polish +└── GitHub profile optimization + +Week 7-8: Investor Prep +├── Pitch deck finalization +├── Demo environment stable +├── Metrics dashboard +└── Launch marketing site +``` + +### Milestone Targets + +| Milestone | Target Date | Issues | +|-----------|-------------|--------| +| MVP Complete | Dec 15, 2024 | Close remaining 3 issues | +| Security Hardened | Dec 20, 2024 | All security settings enabled | +| Marketing Site Live | Jan 15, 2025 | cortexlinux.com deployed | +| Demo Ready | Jan 31, 2025 | Streamlit dashboard + video | +| Funding Ready | Feb 10, 2025 | All materials complete | + +--- + +## Budget Summary + +| Category | Amount | +|----------|--------| +| Outstanding Bounties | $500 | +| New Bounty Issues | $1,100 | +| Marketing Site Bounty | $500 | +| Domain (if needed) | $50/yr | +| **Total Pre-Funding** | ~$2,150 | + +--- + +## Risk Assessment + +| Risk | Likelihood | Impact | Mitigation | +|------|------------|--------|------------| +| PRs not rebased | Medium | Medium | Direct contributor outreach | +| Marketing site delay | Medium | High | Start immediately, hire if needed | +| Security incident | Low | Critical | Enable all security features NOW | +| Contributor burnout | Medium | Medium | Pay bounties promptly | + +--- + +## Contact Information + +**Repository:** https://github.com/cortexlinux/cortex +**Discord:** https://discord.gg/uCqHvxjU83 +**Issues:** https://github.com/cortexlinux/cortex/issues + +--- + +*Generated by Claude Code audit on 2025-11-28* diff --git a/README.md b/README.md index 19948549..3d684348 100644 --- a/README.md +++ b/README.md @@ -426,4 +426,4 @@ Apache 2.0 - See [LICENSE](LICENSE) for details.

Built with love by the Cortex team and contributors worldwide. -

+

\ No newline at end of file diff --git a/README_backup_20251202_033440.md b/README_backup_20251202_033440.md new file mode 100644 index 00000000..6784517c --- /dev/null +++ b/README_backup_20251202_033440.md @@ -0,0 +1,476 @@ +# Cortex Linux + +> **The AI-Native Operating System** - Linux that understands you. No documentation required. + +[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](LICENSE) +[![Python](https://img.shields.io/badge/Python-3.10%2B-blue.svg)](https://python.org) +[![Status](https://img.shields.io/badge/Status-Alpha-orange.svg)]() +[![Discord](https://img.shields.io/discord/1234567890?color=7289da&label=Discord)](https://discord.gg/uCqHvxjU83) + +```bash +$ cortex install oracle-23-ai --optimize-gpu + Analyzing system: NVIDIA RTX 4090 detected + Installing CUDA 12.3 + dependencies + Configuring Oracle for GPU acceleration + Running validation tests + Oracle 23 AI ready at localhost:1521 (4m 23s) +``` + +--- + +## Table of Contents + +- [The Problem](#the-problem) +- [The Solution](#the-solution) +- [Features](#features) +- [Quick Start](#quick-start) +- [Installation](#installation) +- [Usage](#usage) +- [Configuration](#configuration) +- [Architecture](#architecture) +- [Development](#development) +- [Contributing](#contributing) +- [Roadmap](#roadmap) +- [FAQ](#faq) +- [Community](#community) +- [License](#license) + +--- + +## The Problem + +Installing complex software on Linux is broken: + +- **47 Stack Overflow tabs** to install CUDA drivers +- **Dependency hell** that wastes days +- **Configuration files** written in ancient runes +- **"Works on my machine"** syndrome + +**Developers spend 30% of their time fighting the OS instead of building.** + +## The Solution + +Cortex Linux embeds AI at the operating system level. Tell it what you need in plain English - it handles everything: + +| Feature | Description | +|---------|-------------| +| **Natural Language Commands** | System understands intent, not syntax | +| **Hardware-Aware Optimization** | Automatically configures for your GPU/CPU | +| **Self-Healing Configuration** | Fixes broken dependencies automatically | +| **Enterprise-Grade Security** | AI actions are sandboxed and validated | +| **Installation History** | Track and rollback any installation | + +--- + +## Features + +### Core Capabilities + +- **Natural Language Parsing** - "Install Python for machine learning" just works +- **Multi-Provider LLM Support** - Claude (Anthropic) and OpenAI GPT-4 +- **Intelligent Package Management** - Wraps apt/yum/dnf with semantic understanding +- **Hardware Detection** - Automatic GPU, CPU, RAM, storage profiling +- **Sandboxed Execution** - Firejail-based isolation for all commands +- **Installation Rollback** - Undo any installation with one command +- **Error Analysis** - AI-powered error diagnosis and fix suggestions + +### Supported Software (32+ Categories) + +| Category | Examples | +|----------|----------| +| Languages | Python, Node.js, Go, Rust | +| Databases | PostgreSQL, MySQL, MongoDB, Redis | +| Web Servers | Nginx, Apache | +| Containers | Docker, Kubernetes | +| DevOps | Terraform, Ansible | +| ML/AI | CUDA, TensorFlow, PyTorch | + +--- + +## Quick Start + +```bash +# Install cortex +pip install cortex-linux + +# Set your API key (choose one) +export ANTHROPIC_API_KEY="your-key-here" +# or +export OPENAI_API_KEY="your-key-here" + +# Install software with natural language +cortex install docker +cortex install "python for data science" +cortex install "web development environment" + +# Execute the installation +cortex install docker --execute + +# Preview without executing +cortex install nginx --dry-run +``` + +--- + +## Installation + +### Prerequisites + +| Requirement | Version | Notes | +|-------------|---------|-------| +| **OS** | Ubuntu 24.04 LTS | Other Debian-based coming soon | +| **Python** | 3.10+ | Required | +| **Firejail** | Latest | Recommended for sandboxing | +| **API Key** | - | Anthropic or OpenAI | + +### Step-by-Step Installation + +```bash +# 1. Install system dependencies +sudo apt update +sudo apt install -y python3 python3-pip python3-venv firejail + +# 2. Create virtual environment (recommended) +python3 -m venv ~/.cortex-venv +source ~/.cortex-venv/bin/activate + +# 3. Install Cortex +pip install cortex-linux + +# 4. Configure API key +echo 'export ANTHROPIC_API_KEY="your-key"' >> ~/.bashrc +source ~/.bashrc + +# 5. Verify installation +cortex --help +``` + +### From Source + +```bash +git clone https://github.com/cortexlinux/cortex.git +cd cortex +pip install -e . +``` + +--- + +## Usage + +### Basic Commands + +```bash +# Install software +cortex install # Show commands only +cortex install --execute # Execute installation +cortex install --dry-run # Preview mode + +# Installation history +cortex history # List recent installations +cortex history show # Show installation details + +# Rollback +cortex rollback # Undo an installation +cortex rollback --dry-run # Preview rollback +``` + +### Examples + +```bash +# Simple installations +cortex install docker --execute +cortex install postgresql --execute +cortex install nginx --execute + +# Natural language requests +cortex install "python with machine learning libraries" --execute +cortex install "web development stack with nodejs and npm" --execute +cortex install "database tools for postgresql" --execute + +# Complex requests +cortex install "cuda drivers for nvidia gpu" --execute +cortex install "complete devops toolchain" --execute +``` + +### Environment Variables + +| Variable | Description | Required | +|----------|-------------|----------| +| `ANTHROPIC_API_KEY` | Anthropic Claude API key | One of these | +| `OPENAI_API_KEY` | OpenAI GPT-4 API key | required | +| `MOONSHOT_API_KEY` | Kimi K2 API key | Optional | +| `CORTEX_LOG_LEVEL` | Logging level (DEBUG, INFO, WARNING) | No | +| `CORTEX_DATA_DIR` | Data directory path | No | + +--- + +## Configuration + +### Configuration File + +Create `~/.config/cortex/config.yaml`: + +```yaml +# LLM Provider Settings +llm: + default_provider: claude # claude, openai, kimi + temperature: 0.3 + max_tokens: 1000 + +# Security Settings +security: + enable_sandbox: true + require_confirmation: true + allowed_directories: + - /tmp + - ~/.local + +# Logging +logging: + level: INFO + file: ~/.local/share/cortex/cortex.log +``` + +--- + +## Architecture + +``` + User Input + + Natural Language + + Cortex CLI + + +--------+--------+ + | | + LLM Router Hardware + | Profiler + | + +-------+-------+ + | | | +Claude GPT-4 Kimi K2 + | + Command Generator + | + Security Validator + | + Sandbox Executor + | + +-------+-------+ + | | +apt/yum/dnf Verifier + | + Installation + History +``` + +### Key Components + +| Component | File | Purpose | +|-----------|------|---------| +| CLI | `cortex/cli.py` | Command-line interface | +| Coordinator | `cortex/coordinator.py` | Installation orchestration | +| LLM Interpreter | `LLM/interpreter.py` | Natural language to commands | +| Package Manager | `cortex/packages.py` | Package manager abstraction | +| Sandbox | `src/sandbox_executor.py` | Secure command execution | +| Hardware Profiler | `src/hwprofiler.py` | System hardware detection | +| History | `installation_history.py` | Installation tracking | +| Error Parser | `error_parser.py` | Error analysis and fixes | + +--- + +## Development + +### Setup Development Environment + +```bash +# Clone repository +git clone https://github.com/cortexlinux/cortex.git +cd cortex + +# Create virtual environment +python3 -m venv venv +source venv/bin/activate + +# Install dependencies +pip install -r requirements.txt +pip install -r requirements-dev.txt + +# Install in development mode +pip install -e . + +# Run tests +pytest test/ -v + +# Run with coverage +pytest test/ --cov=cortex --cov-report=html +``` + +### Code Style + +```bash +# Format code +black cortex/ + +# Lint +pylint cortex/ + +# Type checking +mypy cortex/ +``` + +### Project Structure + +``` +cortex/ + cortex/ # Core Python package + __init__.py + cli.py # CLI entry point + coordinator.py # Installation coordinator + packages.py # Package manager wrapper + LLM/ # LLM integration + interpreter.py # Command interpreter + requirements.txt + src/ # Additional modules + sandbox_executor.py + hwprofiler.py + progress_tracker.py + test/ # Unit tests + docs/ # Documentation + examples/ # Usage examples + .github/ # CI/CD workflows + requirements.txt # Dependencies + setup.py # Package config +``` + +--- + +## Contributing + +We welcome contributions! See [CONTRIBUTING.md](CONTRIBUTING.md) for guidelines. + +### Quick Contribution Guide + +1. **Fork** the repository +2. **Create** a feature branch (`git checkout -b feature/amazing-feature`) +3. **Commit** your changes (`git commit -m 'Add amazing feature'`) +4. **Push** to the branch (`git push origin feature/amazing-feature`) +5. **Open** a Pull Request + +### Bounty Program + +Cash bounties on merge: + +| Tier | Amount | Examples | +|------|--------|----------| +| Critical | $150-200 | Security fixes, core features | +| Standard | $75-150 | New features, integrations | +| Testing | $25-75 | Tests, documentation | + +**Payment methods:** Bitcoin, USDC, PayPal + +See [Bounties.md](Bounties.md) for available bounties. + +--- + +## Roadmap + +### Current Status: Alpha (Phase 1) + +- LLM integration layer +- Safe command execution sandbox +- Hardware detection +- Installation history & rollback +- Error parsing & suggestions +- Multi-provider LLM support + +### Coming Soon (Phase 2) + +- Advanced dependency resolution +- Configuration file generation +- Multi-step installation orchestration +- Plugin architecture + +### Future (Phase 3) + +- Enterprise deployment tools +- Security hardening & audit logging +- Role-based access control +- Air-gapped deployment support + +See [ROADMAP.md](ROADMAP.md) for detailed plans. + +--- + +## FAQ + +
+What operating systems are supported? + +Currently Ubuntu 24.04 LTS. Other Debian-based distributions coming soon. +
+ +
+Is it free? + +Yes! Community edition is free and open source (Apache 2.0). Enterprise subscriptions will be available for advanced features. +
+ +
+Is it secure? + +Yes. All commands are validated and executed in a Firejail sandbox with AppArmor policies. AI-generated commands are checked against a security allowlist. +
+ +
+Can I use my own LLM? + +Currently supports Claude (Anthropic) and OpenAI. Local LLM support is planned for future releases. +
+ +
+What if something goes wrong? + +Every installation is tracked and can be rolled back with `cortex rollback `. +
+ +See [FAQ.md](FAQ.md) for more questions. + +--- + +## Community + +### Get Help + +- **Discord:** [Join our server](https://discord.gg/uCqHvxjU83) +- **GitHub Issues:** [Report bugs](https://github.com/cortexlinux/cortex/issues) +- **Discussions:** [Ask questions](https://github.com/cortexlinux/cortex/discussions) + +### Stay Updated + +- Star this repository +- Follow [@cortexlinux](https://twitter.com/cortexlinux) on Twitter +- Subscribe to our [newsletter](https://cortexlinux.com) + +--- + +## License + +This project is licensed under the Apache License 2.0 - see the [LICENSE](LICENSE) file for details. + +--- + +## Acknowledgments + +- Built with [Claude](https://anthropic.com) and [OpenAI](https://openai.com) +- Sandbox powered by [Firejail](https://firejail.wordpress.com/) +- Inspired by the pain of every developer who spent hours on Stack Overflow + +--- + +

+ Star this repo to follow development +

+ Built with by the Cortex Linux community +

diff --git a/cortex/__init__.py b/cortex/__init__.py index dcf98a77..1352d82a 100644 --- a/cortex/__init__.py +++ b/cortex/__init__.py @@ -1,7 +1,13 @@ +from importlib import metadata + from .cli import main from .env_loader import load_env from .packages import PackageManager, PackageManagerType -__version__ = "0.1.0" +try: + __version__ = metadata.version("cortex-linux") +except metadata.PackageNotFoundError: + __version__ = "0.1.0" +__all__ = ["__version__"] __all__ = ["main", "load_env", "PackageManager", "PackageManagerType"] diff --git a/cortex/update_manifest.py b/cortex/update_manifest.py new file mode 100644 index 00000000..e44e487c --- /dev/null +++ b/cortex/update_manifest.py @@ -0,0 +1,178 @@ +""" +Structures and helpers for Cortex update manifests. +""" + +from __future__ import annotations + +import platform +from collections.abc import Iterable +from dataclasses import dataclass, field +from enum import Enum +from typing import Any, Optional + +from packaging.specifiers import InvalidSpecifier, SpecifierSet +from packaging.version import Version + +from cortex.versioning import CortexVersion, is_newer_version + + +class UpdateChannel(str, Enum): + STABLE = "stable" + BETA = "beta" + + @classmethod + def from_string(cls, raw: str) -> UpdateChannel: + try: + return cls(raw.lower()) + except ValueError as exc: + valid = ", ".join(c.value for c in cls) + raise ValueError(f"Unknown update channel '{raw}'. Valid options: {valid}") from exc + + +@dataclass +class SystemInfo: + python_version: Version + os_name: str + architecture: str + distro: str | None = None + + @classmethod + def current(cls) -> SystemInfo: + return cls( + python_version=Version(platform.python_version()), + os_name=platform.system().lower(), + architecture=platform.machine().lower(), + distro=_detect_distro(), + ) + + +def _detect_distro() -> str | None: + try: + import distro # type: ignore + + return distro.id() + except Exception: + return None + + +@dataclass +class CompatibilityRule: + python_spec: SpecifierSet | None = None + os_names: list[str] = field(default_factory=list) + architectures: list[str] = field(default_factory=list) + distros: list[str] = field(default_factory=list) + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> CompatibilityRule: + specifier_value = data.get("python") + specifier = None + if specifier_value: + try: + specifier = SpecifierSet(specifier_value) + except InvalidSpecifier as exc: + raise ValueError(f"Invalid python specifier '{specifier_value}'") from exc + + return cls( + python_spec=specifier, + os_names=[name.lower() for name in data.get("os", [])], + architectures=[arch.lower() for arch in data.get("arch", [])], + distros=[dist.lower() for dist in data.get("distro", [])], + ) + + def is_compatible(self, system: SystemInfo) -> bool: + if self.python_spec and system.python_version not in self.python_spec: + return False + + if self.os_names and system.os_name not in self.os_names: + return False + + if self.architectures and system.architecture not in self.architectures: + return False + + if self.distros and system.distro not in self.distros: + return False + + return True + + +@dataclass +class ReleaseEntry: + version: CortexVersion + channel: UpdateChannel + download_url: str + sha256: str + release_notes: str + published_at: str | None = None + compatibility: list[CompatibilityRule] = field(default_factory=list) + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> ReleaseEntry: + compatibility_data = data.get("compatibility", []) + compatibility = [CompatibilityRule.from_dict(entry) for entry in compatibility_data] + + return cls( + version=CortexVersion.from_string(data["version"]), + channel=UpdateChannel.from_string(data.get("channel", UpdateChannel.STABLE.value)), + download_url=data["download_url"], + sha256=data["sha256"], + release_notes=data.get("release_notes", ""), + published_at=data.get("published_at"), + compatibility=compatibility, + ) + + def is_compatible(self, system: SystemInfo) -> bool: + if not self.compatibility: + return True + + return any(rule.is_compatible(system) for rule in self.compatibility) + + +@dataclass +class UpdateManifest: + releases: list[ReleaseEntry] + signature: str | None = None + generated_at: str | None = None + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> UpdateManifest: + releases_data = data.get("releases", []) + releases = [ReleaseEntry.from_dict(entry) for entry in releases_data] + return cls( + releases=releases, + signature=data.get("signature"), + generated_at=data.get("generated_at"), + ) + + def iter_releases( + self, + *, + channel: UpdateChannel | None = None, + system: SystemInfo | None = None, + ) -> Iterable[ReleaseEntry]: + for release in self.releases: + if channel and release.channel != channel: + continue + if system and not release.is_compatible(system): + continue + yield release + + def find_latest( + self, + *, + current_version: CortexVersion, + channel: UpdateChannel, + system: SystemInfo | None = None, + ) -> ReleaseEntry | None: + system_info = system or SystemInfo.current() + + eligible = [ + release + for release in self.iter_releases(channel=channel, system=system_info) + if is_newer_version(current_version, release.version) + ] + + if not eligible: + return None + + eligible.sort(key=lambda release: release.version.parsed, reverse=True) + return eligible[0] diff --git a/cortex/updater.py b/cortex/updater.py new file mode 100644 index 00000000..e8ed865e --- /dev/null +++ b/cortex/updater.py @@ -0,0 +1,326 @@ +""" +Update checking and coordination for Cortex. +""" + +from __future__ import annotations + +import hashlib +import json +import os +import shutil +import subprocess +import sys +import tempfile +from dataclasses import dataclass +from datetime import datetime, timedelta, timezone +from pathlib import Path +from typing import Any, Optional + +import requests + +from cortex.update_manifest import ( + ReleaseEntry, + SystemInfo, + UpdateChannel, + UpdateManifest, +) +from cortex.versioning import PACKAGE_NAME, CortexVersion, get_installed_version + +DEFAULT_MANIFEST_URL = "https://updates.cortexlinux.com/manifest.json" +STATE_DIR = Path.home() / ".config" / "cortex" / "updater" +STATE_FILE = STATE_DIR / "state.json" +DEFAULT_LOG_FILE = STATE_DIR / "update.log" +CACHE_TTL = timedelta(hours=6) + + +@dataclass +class UpdateCheckResult: + update_available: bool + release: ReleaseEntry | None + channel: UpdateChannel + last_checked: datetime + from_cache: bool = False + + +@dataclass +class UpdatePerformResult: + success: bool + updated: bool + release: ReleaseEntry | None + previous_version: CortexVersion + current_version: CortexVersion + log_path: Path + message: str | None = None + + +class UpdateError(Exception): + """Generic update failure.""" + + +class ChecksumMismatch(UpdateError): + """Raised when downloaded artifacts do not match expected checksum.""" + + +class InstallError(UpdateError): + """Raised when pip install fails.""" + + +class UpdateService: + def __init__( + self, + *, + manifest_url: str | None = None, + state_file: Path | None = None, + system_info: SystemInfo | None = None, + log_file: Path | None = None, + ) -> None: + self.manifest_url = manifest_url or os.environ.get( + "CORTEX_UPDATE_MANIFEST_URL", DEFAULT_MANIFEST_URL + ) + self.state_file = state_file or STATE_FILE + self.system_info = system_info or SystemInfo.current() + self.log_file = log_file or DEFAULT_LOG_FILE + self.state_file.parent.mkdir(parents=True, exist_ok=True) + self.log_file.parent.mkdir(parents=True, exist_ok=True) + + # ------------------------------------------------------------------ State + def _load_state(self) -> dict[str, Any]: + if not self.state_file.exists(): + return {} + try: + with self.state_file.open("r", encoding="utf-8") as fh: + return json.load(fh) + except Exception: + return {} + + def _save_state(self, state: dict[str, Any]) -> None: + tmp_path = self.state_file.with_suffix(".tmp") + with tmp_path.open("w", encoding="utf-8") as fh: + json.dump(state, fh, indent=2) + tmp_path.replace(self.state_file) + + # ---------------------------------------------------------------- Channels + def get_channel(self) -> UpdateChannel: + state = self._load_state() + channel_raw = state.get("channel", UpdateChannel.STABLE.value) + try: + return UpdateChannel.from_string(channel_raw) + except ValueError: + return UpdateChannel.STABLE + + def set_channel(self, channel: UpdateChannel) -> None: + state = self._load_state() + state["channel"] = channel.value + self._save_state(state) + + # --------------------------------------------------------------- Manifest + def _fetch_manifest(self) -> UpdateManifest: + response = requests.get(self.manifest_url, timeout=10) + response.raise_for_status() + payload = response.json() + return UpdateManifest.from_dict(payload) + + def _should_use_cache(self, last_checked: str | None) -> bool: + if not last_checked: + return False + try: + last_dt = datetime.fromisoformat(last_checked) + except ValueError: + return False + return datetime.now(timezone.utc) - last_dt < CACHE_TTL + + # --------------------------------------------------------------- Checking + def check_for_updates( + self, + *, + force: bool = False, + channel: UpdateChannel | None = None, + current_version: CortexVersion | None = None, + ) -> UpdateCheckResult: + state = self._load_state() + resolved_channel = channel or self.get_channel() + current = current_version or get_installed_version() + + if not force and self._should_use_cache(state.get("last_checked")): + cached_release = state.get("cached_release") + release = ReleaseEntry.from_dict(cached_release) if cached_release else None + last_checked = datetime.fromisoformat(state.get("last_checked")).astimezone( + timezone.utc + ) + return UpdateCheckResult( + update_available=bool(release), + release=release, + channel=resolved_channel, + last_checked=last_checked, + from_cache=True, + ) + + manifest = self._fetch_manifest() + release = manifest.find_latest( + current_version=current, + channel=resolved_channel, + system=self.system_info, + ) + + last_checked = datetime.now(timezone.utc) + state["last_checked"] = last_checked.isoformat() + state["cached_release"] = _release_to_dict(release) if release else None + state["channel"] = resolved_channel.value + self._save_state(state) + + return UpdateCheckResult( + update_available=release is not None, + release=release, + channel=resolved_channel, + last_checked=last_checked, + from_cache=False, + ) + + # --------------------------------------------------------------- Upgrades + def perform_update( + self, + *, + force: bool = False, + channel: UpdateChannel | None = None, + dry_run: bool = False, + ) -> UpdatePerformResult: + current_version = get_installed_version() + check_result = self.check_for_updates( + force=force, channel=channel, current_version=current_version + ) + + if not check_result.update_available or not check_result.release: + return UpdatePerformResult( + success=True, + updated=False, + release=None, + previous_version=current_version, + current_version=current_version, + log_path=self.log_file, + message="Already up to date.", + ) + + release = check_result.release + + if dry_run: + return UpdatePerformResult( + success=True, + updated=False, + release=release, + previous_version=current_version, + current_version=current_version, + log_path=self.log_file, + message=f"Update available (dry run): {release.version.raw}", + ) + + temp_dir: Path | None = None + try: + artifact_path, temp_dir = self._download_release(release) + self._log(f"Installing Cortex {release.version.raw} from {artifact_path}") + self._install_artifact(artifact_path) + self._record_last_upgrade(previous=current_version, new_version=release.version) + + return UpdatePerformResult( + success=True, + updated=True, + release=release, + previous_version=current_version, + current_version=release.version, + log_path=self.log_file, + message=f"Updated to {release.version.raw}", + ) + except UpdateError as exc: + self._log(f"Update error: {exc}. Rolling back to {current_version.raw}.") + self._rollback(previous=current_version) + raise + finally: + if temp_dir: + shutil.rmtree(temp_dir, ignore_errors=True) + + # ----------------------------------------------------------- Implementation + def _download_release(self, release: ReleaseEntry) -> tuple[Path, Path]: + temp_dir = Path(tempfile.mkdtemp(prefix="cortex-update-")) + artifact_name = release.download_url.split("/")[-1] or f"cortex-{release.version.raw}.whl" + artifact_path = temp_dir / artifact_name + + with requests.get(release.download_url, stream=True, timeout=60) as response: + response.raise_for_status() + with artifact_path.open("wb") as fh: + for chunk in response.iter_content(chunk_size=1024 * 1024): + if chunk: + fh.write(chunk) + + self._log(f"Downloaded release to {artifact_path}") + self._verify_checksum(artifact_path, release.sha256) + return artifact_path, temp_dir + + def _verify_checksum(self, path: Path, expected_sha256: str) -> None: + sha256 = hashlib.sha256() + with path.open("rb") as fh: + for chunk in iter(lambda: fh.read(1024 * 1024), b""): + sha256.update(chunk) + computed = sha256.hexdigest() + if computed.lower() != expected_sha256.lower(): + raise ChecksumMismatch( + f"Checksum mismatch for {path.name}: expected {expected_sha256}, got {computed}" + ) + self._log(f"Checksum verified for {path.name}") + + def _install_artifact(self, artifact_path: Path) -> None: + self._log(f"Running pip install for {artifact_path}") + self._run_pip(["install", str(artifact_path)]) + + def _rollback(self, previous: CortexVersion) -> None: + self._log(f"Rolling back to Cortex {previous.raw}") + self._run_pip(["install", f"{PACKAGE_NAME}=={previous.raw}"]) + + def _run_pip(self, args: list[str]) -> None: + cmd = [sys.executable, "-m", "pip"] + args + self._log(f"Executing command: {' '.join(cmd)}") + try: + result = subprocess.run( + cmd, + check=True, + capture_output=True, + text=True, + ) + self._log(f"Pip output: {result.stdout.strip()}") + except subprocess.CalledProcessError as exc: + self._log(f"Pip failed: {exc.stderr}") + raise InstallError(f"pip exited with code {exc.returncode}") from exc + + def _record_last_upgrade(self, *, previous: CortexVersion, new_version: CortexVersion) -> None: + state = self._load_state() + state["last_success_version"] = new_version.raw + state["previous_version"] = previous.raw + state["last_upgrade_at"] = datetime.now(timezone.utc).isoformat() + self._save_state(state) + + def _log(self, message: str) -> None: + timestamp = datetime.now(timezone.utc).isoformat() + log_line = f"[{timestamp}] {message}\n" + with self.log_file.open("a", encoding="utf-8") as fh: + fh.write(log_line) + + +def _release_to_dict(release: ReleaseEntry | None) -> dict[str, Any] | None: + if not release: + return None + + return { + "version": release.version.raw, + "channel": release.channel.value, + "download_url": release.download_url, + "sha256": release.sha256, + "release_notes": release.release_notes, + "published_at": release.published_at, + "compatibility": [ + { + "python": str(rule.python_spec) if rule.python_spec else None, + "os": rule.os_names, + "arch": rule.architectures, + "distro": rule.distros, + } + for rule in release.compatibility + ], + } diff --git a/cortex/versioning.py b/cortex/versioning.py new file mode 100644 index 00000000..33f2ea97 --- /dev/null +++ b/cortex/versioning.py @@ -0,0 +1,66 @@ +""" +Utilities for working with Cortex package versions. +""" + +from __future__ import annotations + +from dataclasses import dataclass +from importlib import metadata +from typing import Optional + +from packaging.version import InvalidVersion, Version + +PACKAGE_NAME = "cortex-linux" +__all__ = [ + "PACKAGE_NAME", + "CortexVersion", + "get_installed_version", + "is_newer_version", +] + + +@dataclass(frozen=True) +class CortexVersion: + """Wrapper that keeps both raw and parsed versions.""" + + raw: str + parsed: Version + + @classmethod + def from_string(cls, raw_version: str) -> CortexVersion: + try: + parsed = Version(raw_version) + except InvalidVersion as exc: + raise ValueError(f"Invalid Cortex version string: {raw_version}") from exc + return cls(raw=raw_version, parsed=parsed) + + def __str__(self) -> str: + return self.raw + + +def get_installed_version() -> CortexVersion: + """ + Return the version of Cortex that is currently installed. + + Falls back to the package's __version__ attribute when metadata is unavailable. + """ + + raw_version: str | None = None + + try: + raw_version = metadata.version(PACKAGE_NAME) + except metadata.PackageNotFoundError: + try: + from cortex import __version__ as package_version # type: ignore + + raw_version = package_version + except Exception: + raw_version = "0.0.0" + + return CortexVersion.from_string(raw_version) + + +def is_newer_version(current: CortexVersion, candidate: CortexVersion) -> bool: + """Return True when ``candidate`` is newer than ``current``.""" + + return candidate.parsed > current.parsed diff --git a/docs/guides/Home.md b/docs/guides/Home.md index fb6e933d..261cdf87 100644 --- a/docs/guides/Home.md +++ b/docs/guides/Home.md @@ -40,4 +40,4 @@ cortex install "python for machine learning" - **Discord:** https://discord.gg/uCqHvxjU83 - **GitHub:** https://github.com/cortexlinux/cortex -- **Discussions:** https://github.com/cortexlinux/cortex/discussions +- **Discussions:** https://github.com/cortexlinux/cortex/discussions \ No newline at end of file diff --git a/examples/parallel_llm_demo.py b/examples/parallel_llm_demo.py index d1930d2e..801822fe 100644 --- a/examples/parallel_llm_demo.py +++ b/examples/parallel_llm_demo.py @@ -252,3 +252,4 @@ async def main(): if __name__ == "__main__": asyncio.run(main()) + asyncio.run(main()) diff --git a/manage_cortex_prs.sh b/manage_cortex_prs.sh new file mode 100755 index 00000000..ee3d3d74 --- /dev/null +++ b/manage_cortex_prs.sh @@ -0,0 +1,435 @@ +#!/bin/bash +# Cortex Linux - Master PR Control & Team Coordination +# Complete automation: reviews, assignments, Discord, payments, everything + +set -e + +echo "🧠 CORTEX LINUX - MASTER PR CONTROL SYSTEM" +echo "==========================================" +echo "" + +# Configuration +REPO="cortexlinux/cortex" +REPO_DIR="$HOME/cortex" +DISCORD_INVITE="https://discord.gg/uCqHvxjU83" +GITHUB_TOKEN=$(grep GITHUB_TOKEN ~/.zshrc | cut -d'=' -f2 | tr -d '"' | tr -d "'") +BOUNTY_CSV="$REPO_DIR/bounties_paid.csv" + +# Ensure we're in the repo +cd "$REPO_DIR" || { echo "❌ Repo not found at $REPO_DIR"; exit 1; } + +# Create bounty tracking CSV if it doesn't exist +if [ ! -f "$BOUNTY_CSV" ]; then + echo "PR_Number,Author,Amount,Status,Payment_Status,Date" > "$BOUNTY_CSV" +fi + +echo "📊 STEP 1: FETCHING ALL OPEN PRS" +echo "=================================" +echo "" + +# Get all open PRs +prs=$(gh pr list --repo "$REPO" --state open --json number,title,author,createdAt,reviews,isDraft,mergeable --limit 50) +total_prs=$(echo "$prs" | jq length) + +echo "Found $total_prs open PR(s)" +echo "" + +if [ "$total_prs" -eq 0 ]; then + echo "✅ No PRs to process!" + exit 0 +fi + +# Display all PRs +echo "$prs" | jq -r '.[] | "PR #\(.number): \(.title) by @\(.author.login) - Draft: \(.isDraft)"' +echo "" + +echo "🎯 STEP 2: CATEGORIZING PRS" +echo "===========================" +echo "" + +# Arrays for different PR categories +critical_prs=() +ready_to_merge=() +needs_review=() +draft_prs=() +stale_prs=() + +# Categorize each PR +while IFS= read -r pr_num; do + pr_data=$(echo "$prs" | jq -r ".[] | select(.number == $pr_num)") + author=$(echo "$pr_data" | jq -r '.author.login') + title=$(echo "$pr_data" | jq -r '.title') + is_draft=$(echo "$pr_data" | jq -r '.isDraft') + created=$(echo "$pr_data" | jq -r '.createdAt') + mergeable=$(echo "$pr_data" | jq -r '.mergeable') + review_count=$(echo "$pr_data" | jq -r '.reviews | length') + + # Calculate age + created_ts=$(date -j -f "%Y-%m-%dT%H:%M:%SZ" "$created" +%s 2>/dev/null || echo 0) + now_ts=$(date +%s) + age_days=$(( (now_ts - created_ts) / 86400 )) + + # Skip drafts + if [ "$is_draft" = "true" ]; then + draft_prs+=($pr_num) + continue + fi + + # Check if it's the critical package manager PR + if [[ "$title" == *"package"* ]] || [[ "$title" == *"Package"* ]] || [ "$pr_num" -eq 195 ]; then + critical_prs+=($pr_num) + echo "🔥 CRITICAL: PR #$pr_num - $title (Age: $age_days days)" + elif [ "$mergeable" = "MERGEABLE" ] && [ "$review_count" -gt 0 ]; then + ready_to_merge+=($pr_num) + echo "✅ READY TO MERGE: PR #$pr_num - $title" + elif [ "$review_count" -eq 0 ]; then + needs_review+=($pr_num) + echo "📋 NEEDS REVIEW: PR #$pr_num - $title (Age: $age_days days)" + fi + + # Check if stale (>5 days) + if [ "$age_days" -gt 5 ]; then + stale_prs+=($pr_num) + fi +done < <(echo "$prs" | jq -r '.[].number') + +echo "" +echo "Summary:" +echo " 🔥 Critical PRs: ${#critical_prs[@]}" +echo " ✅ Ready to merge: ${#ready_to_merge[@]}" +echo " 📋 Need review: ${#needs_review[@]}" +echo " 📝 Drafts: ${#draft_prs[@]}" +echo " ⏰ Stale (>5 days): ${#stale_prs[@]}" +echo "" + +read -p "Continue with automated processing? (y/n): " -n 1 -r +echo +if [[ ! $REPLY =~ ^[Yy]$ ]]; then + echo "Aborted." + exit 0 +fi + +echo "" +echo "🎯 STEP 3: PROCESSING CRITICAL PRS" +echo "==================================" +echo "" + +for pr_num in "${critical_prs[@]}"; do + pr_data=$(echo "$prs" | jq -r ".[] | select(.number == $pr_num)") + author=$(echo "$pr_data" | jq -r '.author.login') + title=$(echo "$pr_data" | jq -r '.title') + + echo "Processing CRITICAL PR #$pr_num: $title" + echo "Author: @$author" + echo "" + + # Assign reviewers if not already assigned + echo " Assigning reviewers: dhvil, mikejmorgan-ai" + gh pr edit $pr_num --add-reviewer dhvil,mikejmorgan-ai 2>/dev/null || echo " (Reviewers already assigned)" + + # Post urgent review comment + comment="🔥 **CRITICAL PATH REVIEW** + +Hi @$author! This PR is blocking our MVP completion. + +**Urgent Review In Progress:** +- ✅ Technical review by @dhvil +- ✅ Final approval by @mikejmorgan-ai +- ⏱️ Target decision: Within 24 hours + +**Payment Ready:** +💰 Bounty will be paid via Discord crypto (BTC/USDC) within 24 hours of merge + +**Join Discord for payment coordination:** +👉 $DISCORD_INVITE + +We're prioritizing this merge! Thanks for the critical work. 🚀" + + gh pr comment $pr_num --body "$comment" 2>/dev/null || echo " (Comment already exists)" + + echo " ✅ Critical PR tagged and reviewers notified" + echo "" + sleep 1 +done + +echo "" +echo "✅ STEP 4: AUTO-MERGING READY PRS" +echo "=================================" +echo "" + +merged_count=0 +for pr_num in "${ready_to_merge[@]}"; do + pr_data=$(echo "$prs" | jq -r ".[] | select(.number == $pr_num)") + author=$(echo "$pr_data" | jq -r '.author.login') + title=$(echo "$pr_data" | jq -r '.title') + + echo "PR #$pr_num: $title by @$author" + echo " Status: Mergeable with approvals" + + # Determine bounty amount based on issue + bounty_amount="TBD" + if [[ "$title" == *"context"* ]] || [[ "$title" == *"Context"* ]]; then + bounty_amount="150" + elif [[ "$title" == *"logging"* ]] || [[ "$title" == *"Logging"* ]]; then + bounty_amount="100" + fi + + read -p " Merge PR #$pr_num? (y/n): " -n 1 -r + echo + if [[ $REPLY =~ ^[Yy]$ ]]; then + # Merge the PR + gh pr merge $pr_num --squash --delete-branch + echo " ✅ Merged!" + + # Post payment comment + payment_comment="🎉 **PR MERGED!** + +Thanks @$author! Your contribution has been merged into main. + +**💰 Payment Details:** +- Bounty: \$$bounty_amount (as specified in issue) +- Method: Crypto (Bitcoin or USDC) +- Timeline: Within 24 hours + +**Next Steps:** +1. Join Discord: $DISCORD_INVITE +2. DM @mikejmorgan with your wallet address +3. Receive payment confirmation + +Great work! Looking forward to your next contribution. 🚀" + + gh pr comment $pr_num --body "$payment_comment" + + # Track in CSV + echo "$pr_num,$author,$bounty_amount,Merged,Pending Payment,$(date +%Y-%m-%d)" >> "$BOUNTY_CSV" + + ((merged_count++)) + echo "" + else + echo " ⏭️ Skipped" + echo "" + fi + sleep 1 +done + +echo "Merged $merged_count PR(s)" +echo "" + +echo "📋 STEP 5: ASSIGNING REVIEWERS TO PENDING PRS" +echo "==============================================" +echo "" + +for pr_num in "${needs_review[@]}"; do + pr_data=$(echo "$prs" | jq -r ".[] | select(.number == $pr_num)") + author=$(echo "$pr_data" | jq -r '.author.login') + title=$(echo "$pr_data" | jq -r '.title') + + echo "PR #$pr_num: $title by @$author" + + # Assign reviewers + if [ "$author" != "dhvil" ] && [ "$author" != "mikejmorgan-ai" ]; then + gh pr edit $pr_num --add-reviewer dhvil,mikejmorgan-ai 2>/dev/null || true + echo " ✅ Assigned reviewers: dhvil, mikejmorgan-ai" + else + gh pr edit $pr_num --add-reviewer mikejmorgan-ai 2>/dev/null || true + echo " ✅ Assigned reviewer: mikejmorgan-ai" + fi + + # Post welcome comment + welcome_comment="Thanks @$author for this contribution! 🎉 + +**Review Process:** +1. ✅ Reviewers assigned - expect feedback within 24-48 hours +2. 💬 **Join Discord**: $DISCORD_INVITE +3. 💰 **Bounty Payment**: Crypto (BTC/USDC) via Discord after merge + +**Important:** +- All bounties tracked and paid through Discord +- Please join to coordinate payment details +- Typical merge → payment time: 24-48 hours + +Looking forward to reviewing this! 🚀" + + # Check if we already commented + existing=$(gh pr view $pr_num --json comments --jq '[.comments[] | select(.author.login == "mikejmorgan-ai")] | length') + if [ "$existing" -eq 0 ]; then + gh pr comment $pr_num --body "$welcome_comment" + echo " ✅ Posted welcome comment" + else + echo " (Welcome comment already exists)" + fi + + echo "" + sleep 1 +done + +echo "" +echo "⏰ STEP 6: SENDING STALE PR REMINDERS" +echo "=====================================" +echo "" + +for pr_num in "${stale_prs[@]}"; do + # Skip if it's in draft or critical (already handled) + if [[ " ${draft_prs[@]} " =~ " ${pr_num} " ]] || [[ " ${critical_prs[@]} " =~ " ${pr_num} " ]]; then + continue + fi + + pr_data=$(echo "$prs" | jq -r ".[] | select(.number == $pr_num)") + author=$(echo "$pr_data" | jq -r '.author.login') + title=$(echo "$pr_data" | jq -r '.title') + created=$(echo "$pr_data" | jq -r '.createdAt') + + created_ts=$(date -j -f "%Y-%m-%dT%H:%M:%SZ" "$created" +%s 2>/dev/null || echo 0) + now_ts=$(date +%s) + age_days=$(( (now_ts - created_ts) / 86400 )) + + echo "PR #$pr_num: $title by @$author ($age_days days old)" + + stale_comment="Hi @$author! 👋 + +This PR has been open for $age_days days. Quick status check: + +📋 **Checklist:** +- [ ] Joined Discord? ($DISCORD_INVITE) +- [ ] All tests passing? +- [ ] Addressed review feedback? + +💰 **Payment Reminder:** +- Bounties paid via crypto (Bitcoin/USDC) +- Processed through Discord DMs +- Sent within 24 hours of merge + +Need help? Let us know in Discord! We want to get this merged and pay you ASAP. 🚀" + + gh pr comment $pr_num --body "$stale_comment" + echo " ✅ Sent reminder" + echo "" + sleep 1 +done + +echo "" +echo "💬 STEP 7: GENERATING DISCORD ANNOUNCEMENT" +echo "==========================================" +echo "" + +cat << DISCORD_EOF > /tmp/discord_announcement.txt +🚀 **PR STATUS UPDATE - $(date +"%B %d, %Y")** + +Just completed automated PR processing! Here's where we stand: + +**📊 Statistics:** +- Total Open PRs: $total_prs +- 🔥 Critical (Package Manager): ${#critical_prs[@]} +- ✅ Merged Today: $merged_count +- 📋 Under Review: ${#needs_review[@]} +- ⏰ Stale Reminders Sent: ${#stale_prs[@]} + +**🎯 Focus Areas:** +DISCORD_EOF + +if [ ${#critical_prs[@]} -gt 0 ]; then + echo "• 🔥 PR #${critical_prs[0]} (Package Manager) - CRITICAL PATH - Under urgent review" >> /tmp/discord_announcement.txt +fi + +cat << DISCORD_EOF2 >> /tmp/discord_announcement.txt + +**💰 Payment Process:** +1. PR gets merged ✅ +2. I DM you for wallet address 💬 +3. Crypto sent within 24 hours 💸 +4. You confirm receipt ✅ + +**All contributors:** Join Discord for bounty coordination! +👉 $DISCORD_INVITE + +Let's keep the momentum going! 🔥 + +- Mike +DISCORD_EOF2 + +echo "Discord announcement generated:" +echo "===============================" +cat /tmp/discord_announcement.txt +echo "===============================" +echo "" +echo "📋 Copy the above to Discord #announcements" +echo "" + +echo "" +echo "📊 STEP 8: PAYMENT TRACKING SUMMARY" +echo "===================================" +echo "" + +if [ -f "$BOUNTY_CSV" ]; then + echo "Payments Pending:" + tail -n +2 "$BOUNTY_CSV" | grep "Pending" 2>/dev/null | while IFS=, read -r pr author amount status payment date; do + echo " PR #$pr - @$author - \$$amount - $date" + done || echo " No pending payments" + echo "" + echo "Full tracking: $BOUNTY_CSV" +fi + +echo "" +echo "📧 STEP 9: CONTRIBUTOR DM TEMPLATES" +echo "===================================" +echo "" + +# Generate DM templates for unique contributors +contributors=$(echo "$prs" | jq -r '.[].author.login' | sort -u) + +echo "Send these DMs on Discord:" +echo "" + +for contributor in $contributors; do + pr_count=$(echo "$prs" | jq -r --arg author "$contributor" '[.[] | select(.author.login == $author)] | length') + + if [ "$pr_count" -gt 0 ]; then + echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + echo "To: @$contributor ($pr_count open PR)" + echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + cat << DM_EOF + +Hey! Just processed your Cortex PR(s) - great work! 🎉 + +**Quick Check:** +1. Have you joined Discord? ($DISCORD_INVITE) +2. What's your crypto wallet address? (BTC or USDC) +3. Any blockers I can help with? + +**Payment Timeline:** +- PR review: 24-48 hours +- Merge decision: Clear feedback either way +- Payment: Within 24 hours of merge + +Looking forward to merging your work! + +- Mike + +DM_EOF + fi +done + +echo "" +echo "==============================================" +echo "✅ MASTER PR CONTROL COMPLETE" +echo "==============================================" +echo "" + +echo "📊 Summary of Actions:" +echo " • Reviewed $total_prs PRs" +echo " • Assigned reviewers to ${#needs_review[@]} PRs" +echo " • Merged $merged_count PRs" +echo " • Flagged ${#critical_prs[@]} critical PR(s)" +echo " • Sent ${#stale_prs[@]} stale reminders" +echo "" + +echo "📋 Next Manual Steps:" +echo " 1. Copy Discord announcement to #announcements" +echo " 2. Send DMs to contributors (templates above)" +echo " 3. Review critical PR #${critical_prs[0]:-N/A} urgently" +echo " 4. Process $merged_count payment(s) via crypto" +echo "" + +echo "🔄 Run this script daily to maintain PR velocity!" +echo "" +echo "✅ All done!" diff --git a/requirements.txt b/requirements.txt index 166a777e..c2e26975 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,4 +22,5 @@ pyyaml>=6.0.0 # Type hints for older Python versions typing-extensions>=4.0.0 + PyYAML==6.0.3 diff --git a/tests/test_update_service.py b/tests/test_update_service.py new file mode 100644 index 00000000..600cef20 --- /dev/null +++ b/tests/test_update_service.py @@ -0,0 +1,115 @@ +import json + +from packaging.version import Version + +from cortex.update_manifest import SystemInfo, UpdateChannel, UpdateManifest +from cortex.updater import UpdateService +from cortex.versioning import CortexVersion + + +def make_manifest(version: str = "0.2.0", channel: str = "stable"): + return UpdateManifest.from_dict( + { + "releases": [ + { + "version": version, + "channel": channel, + "download_url": "https://example.com/cortex.whl", + "sha256": "0" * 64, + "release_notes": "Test release", + "compatibility": [ + { + "python": ">=3.8", + "os": ["linux"], + "arch": ["x86_64"], + } + ], + } + ] + } + ) + + +def current_system(): + return SystemInfo( + python_version=Version("3.10.0"), + os_name="linux", + architecture="x86_64", + distro="ubuntu", + ) + + +def test_manifest_selects_newer_release(): + manifest = UpdateManifest.from_dict( + { + "releases": [ + { + "version": "0.1.5", + "channel": "stable", + "download_url": "https://example.com/old.whl", + "sha256": "1" * 64, + }, + { + "version": "0.2.0", + "channel": "stable", + "download_url": "https://example.com/new.whl", + "sha256": "2" * 64, + }, + ] + } + ) + current = CortexVersion.from_string("0.1.0") + latest = manifest.find_latest( + current_version=current, channel=UpdateChannel.STABLE, system=current_system() + ) + + assert latest is not None + assert latest.version.raw == "0.2.0" + + +def test_update_service_persists_channel_choice(tmp_path): + state_file = tmp_path / "state.json" + log_file = tmp_path / "update.log" + + service = UpdateService( + manifest_url="https://invalid.local", + state_file=state_file, + log_file=log_file, + system_info=current_system(), + ) + + service.set_channel(UpdateChannel.BETA) + assert service.get_channel() == UpdateChannel.BETA + + service.set_channel(UpdateChannel.STABLE) + assert service.get_channel() == UpdateChannel.STABLE + + with state_file.open() as fh: + data = json.load(fh) + assert data["channel"] == "stable" + + +def test_perform_update_dry_run(monkeypatch, tmp_path): + state_file = tmp_path / "state.json" + log_file = tmp_path / "update.log" + + service = UpdateService( + manifest_url="https://invalid.local", + state_file=state_file, + log_file=log_file, + system_info=current_system(), + ) + + manifest = make_manifest() + + monkeypatch.setattr( + "cortex.updater.get_installed_version", lambda: CortexVersion.from_string("0.1.0") + ) + monkeypatch.setattr(UpdateService, "_fetch_manifest", lambda self: manifest) + + result = service.perform_update(dry_run=True) + + assert result.release is not None + assert result.updated is False + assert result.release.version.raw == "0.2.0" + assert "dry run" in (result.message or "").lower()