diff --git a/.github/workflows/squad-repo-health.yml b/.github/workflows/squad-repo-health.yml new file mode 100644 index 000000000..d6fe1fea1 --- /dev/null +++ b/.github/workflows/squad-repo-health.yml @@ -0,0 +1,159 @@ +name: Repo Health + +on: + pull_request_target: + branches: [dev] + types: [opened, synchronize, reopened] + +# pull_request_target gives write token even for fork PRs. +# SAFETY: We check out the BASE branch (trusted scripts) and fetch the PR +# head only as a git ref for analysis — no PR-supplied code is executed. +permissions: + contents: read + pull-requests: write + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number }} + cancel-in-progress: true + +jobs: + # ─── Bootstrap Protection (BLOCKING) ──────────────────────────────── + bootstrap-protection: + name: Bootstrap Protection + runs-on: ubuntu-latest + timeout-minutes: 3 + if: github.actor != 'dependabot[bot]' + steps: + - uses: actions/checkout@v4 + with: + sparse-checkout: | + scripts/check-bootstrap-deps.mjs + sparse-checkout-cone-mode: false + - name: Fetch PR head (data only — not executed) + run: git fetch origin ${{ github.event.pull_request.head.sha }} + - uses: actions/setup-node@v4 + with: + node-version: '22' + - name: Check bootstrap dependencies + id: bootstrap + run: | + set +e + OUTPUT=$(node scripts/check-bootstrap-deps.mjs --ref ${{ github.event.pull_request.head.sha }} 2>&1) + EXIT_CODE=$? + echo "$OUTPUT" + echo "result<> $GITHUB_OUTPUT + echo "$OUTPUT" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + echo "exit_code=$EXIT_CODE" >> $GITHUB_OUTPUT + exit $EXIT_CODE + + # ─── Squad File Leakage (WARNING) ─────────────────────────────────── + squad-leakage: + name: Squad File Leakage + runs-on: ubuntu-latest + timeout-minutes: 3 + if: github.actor != 'dependabot[bot]' + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Fetch PR head (data only — not executed) + run: git fetch origin ${{ github.event.pull_request.head.sha }} + - uses: actions/setup-node@v4 + with: + node-version: '22' + - name: Detect .squad/ leakage + id: leakage + run: | + git fetch origin dev --quiet + OUTPUT=$(node scripts/check-squad-leakage.mjs origin/dev ${{ github.event.pull_request.head.sha }} 2>&1) + echo "$OUTPUT" + echo "result<> $GITHUB_OUTPUT + echo "$OUTPUT" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + - name: Comment on leakage + if: always() + uses: actions/github-script@v7 + with: + script: | + const { run } = await import(`${process.env.GITHUB_WORKSPACE}/scripts/repo-health-comment.mjs`); + await run({ + github, + context, + output: `${{ steps.leakage.outputs.result }}`, + job: 'leakage', + }); + + # ─── Architectural Review (INFORMATIONAL) ─────────────────────────── + architectural-review: + name: Architectural Review + runs-on: ubuntu-latest + timeout-minutes: 5 + if: github.actor != 'dependabot[bot]' + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Fetch PR head (data only — not executed) + run: git fetch origin ${{ github.event.pull_request.head.sha }} + - uses: actions/setup-node@v4 + with: + node-version: '22' + - name: Run architectural review + id: arch + run: | + git fetch origin dev --quiet + OUTPUT=$(node scripts/architectural-review.mjs origin/dev ${{ github.event.pull_request.head.sha }} 2>&1) + echo "$OUTPUT" + echo "result<> $GITHUB_OUTPUT + echo "$OUTPUT" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + - name: Comment on findings + if: always() + uses: actions/github-script@v7 + with: + script: | + const { run } = await import(`${process.env.GITHUB_WORKSPACE}/scripts/repo-health-comment.mjs`); + await run({ + github, + context, + output: `${{ steps.arch.outputs.result }}`, + job: 'architectural', + }); + + # ─── Security Review (INFORMATIONAL) ──────────────────────────────── + security-review: + name: Security Review + runs-on: ubuntu-latest + timeout-minutes: 5 + if: github.actor != 'dependabot[bot]' + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Fetch PR head (data only — not executed) + run: git fetch origin ${{ github.event.pull_request.head.sha }} + - uses: actions/setup-node@v4 + with: + node-version: '22' + - name: Run security review + id: security + run: | + git fetch origin dev --quiet + OUTPUT=$(node scripts/security-review.mjs origin/dev ${{ github.event.pull_request.head.sha }} 2>&1) + echo "$OUTPUT" + echo "result<> $GITHUB_OUTPUT + echo "$OUTPUT" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + - name: Comment on findings + if: always() + uses: actions/github-script@v7 + with: + script: | + const { run } = await import(`${process.env.GITHUB_WORKSPACE}/scripts/repo-health-comment.mjs`); + await run({ + github, + context, + output: `${{ steps.security.outputs.result }}`, + job: 'security', + }); diff --git a/scripts/architectural-review.mjs b/scripts/architectural-review.mjs new file mode 100644 index 000000000..a8a6158df --- /dev/null +++ b/scripts/architectural-review.mjs @@ -0,0 +1,251 @@ +/** + * Architectural Review Check — detects structural concerns in PRs. + * + * Checks for: + * - Bootstrap area modifications (packages/squad-cli/src/cli/core/) + * - New/modified exports in package entry points + * - Cross-package import violations (CLI ↔ SDK direct paths) + * - Template file sync (changes in one template dir without others) + * - Sweeping refactors (>20 files changed) + * - File deletions (potential breakage) + * + * Usage: node scripts/architectural-review.mjs [base-ref] + * Default base-ref: origin/dev + * + * Exit code: always 0 (informational) + * Output: JSON { findings: [{category, severity, message, files}], summary } + * + * Uses only node:* built-ins (runs in CI before npm install). + */ + +import { execFileSync } from 'node:child_process'; +import { readFileSync } from 'node:fs'; +import { resolve } from 'node:path'; + +const baseRef = process.argv[2] || 'origin/dev'; +const headRef = process.argv[3] || 'HEAD'; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function gitDiffNames(filter) { + try { + const args = ['diff', `${baseRef}...${headRef}`, '--name-only']; + if (filter) args.push(`--diff-filter=${filter}`); + const output = execFileSync('git', args, { + encoding: 'utf-8', + stdio: ['pipe', 'pipe', 'pipe'], + }); + return output + .split('\n') + .map((f) => f.trim()) + .filter(Boolean); + } catch { + return []; + } +} + +function gitDiffContent() { + try { + return execFileSync('git', ['diff', `${baseRef}...${headRef}`, '-U3'], { + encoding: 'utf-8', + stdio: ['pipe', 'pipe', 'pipe'], + maxBuffer: 10 * 1024 * 1024, + }); + } catch { + return ''; + } +} + +function readFileSafe(filePath) { + try { + if (headRef !== 'HEAD') { + return execFileSync('git', ['show', `${headRef}:${filePath}`], { encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'] }); + } + return readFileSync(resolve(filePath), 'utf-8'); + } catch { + return null; + } +} + +// --------------------------------------------------------------------------- +// Checks +// --------------------------------------------------------------------------- + +const findings = []; + +const allChanged = gitDiffNames('ACMRT'); +const deletedFiles = gitDiffNames('D'); +const diff = gitDiffContent(); + +// 1. Bootstrap area modifications +const bootstrapFiles = allChanged.filter((f) => + f.startsWith('packages/squad-cli/src/cli/core/'), +); +if (bootstrapFiles.length > 0) { + findings.push({ + category: 'bootstrap-area', + severity: 'warning', + message: + `${bootstrapFiles.length} file(s) in the bootstrap area (packages/squad-cli/src/cli/core/) were modified. ` + + 'These files must maintain zero external dependencies. Review carefully.', + files: bootstrapFiles, + }); +} + +// 2. Entry point export changes +const entryPoints = [ + 'packages/squad-sdk/src/index.ts', + 'packages/squad-cli/src/index.ts', +]; +const changedEntryPoints = allChanged.filter((f) => entryPoints.includes(f)); +if (changedEntryPoints.length > 0) { + // Check for added export lines in the diff + const exportLines = diff + .split('\n') + .filter( + (line) => + line.startsWith('+') && + !line.startsWith('+++') && + /\bexport\b/.test(line), + ); + if (exportLines.length > 0) { + findings.push({ + category: 'export-surface', + severity: 'warning', + message: + `Package entry point(s) modified with ${exportLines.length} new/changed export(s). ` + + 'New public API surface requires careful review for backward compatibility.', + files: changedEntryPoints, + }); + } +} + +// 3. Cross-package imports +const cliFiles = allChanged.filter((f) => + f.startsWith('packages/squad-cli/'), +); +const sdkFiles = allChanged.filter((f) => + f.startsWith('packages/squad-sdk/'), +); + +const crossImportViolations = []; +for (const file of cliFiles) { + const content = readFileSafe(file); + if (!content) continue; + const lines = content.split('\n'); + for (let i = 0; i < lines.length; i++) { + if ( + /from\s+['"].*squad-sdk\/src\//.test(lines[i]) || + /require\(['"].*squad-sdk\/src\//.test(lines[i]) + ) { + crossImportViolations.push({ file, line: i + 1, direction: 'CLI → SDK src' }); + } + } +} +for (const file of sdkFiles) { + const content = readFileSafe(file); + if (!content) continue; + const lines = content.split('\n'); + for (let i = 0; i < lines.length; i++) { + if ( + /from\s+['"].*squad-cli\/src\//.test(lines[i]) || + /require\(['"].*squad-cli\/src\//.test(lines[i]) + ) { + crossImportViolations.push({ file, line: i + 1, direction: 'SDK → CLI src' }); + } + } +} +if (crossImportViolations.length > 0) { + findings.push({ + category: 'cross-package-import', + severity: 'error', + message: + `${crossImportViolations.length} cross-package import(s) detected. ` + + 'Packages should import via the published package name, not direct src/ paths.', + files: crossImportViolations.map( + (v) => `${v.file}:${v.line} (${v.direction})`, + ), + }); +} + +// 4. Template sync check +const TEMPLATE_DIRS = [ + 'templates/', + '.squad-templates/', + 'packages/squad-cli/templates/', + '.github/workflows/', +]; +const touchedTemplateDirs = TEMPLATE_DIRS.filter((dir) => + allChanged.some((f) => f.startsWith(dir)), +); +if (touchedTemplateDirs.length === 1) { + const untouched = TEMPLATE_DIRS.filter((d) => !touchedTemplateDirs.includes(d)); + findings.push({ + category: 'template-sync', + severity: 'info', + message: + `Template files changed in ${touchedTemplateDirs[0]} but not in other template locations. ` + + 'If these templates should stay in sync, consider updating the others too.', + files: [ + `Changed: ${touchedTemplateDirs.join(', ')}`, + `Unchanged: ${untouched.join(', ')}`, + ], + }); +} + +// 5. Sweeping refactor signal +const totalChanged = allChanged.length + deletedFiles.length; +if (totalChanged > 20) { + findings.push({ + category: 'sweeping-refactor', + severity: 'warning', + message: + `This PR touches ${totalChanged} files (${allChanged.length} modified/added, ${deletedFiles.length} deleted). ` + + 'Large PRs are harder to review — consider splitting if possible.', + files: [], + }); +} + +// 6. File deletions +if (deletedFiles.length > 0) { + const publicDeletions = deletedFiles.filter( + (f) => + f.startsWith('packages/') && + (f.endsWith('/index.ts') || f.includes('/src/')), + ); + if (publicDeletions.length > 0) { + findings.push({ + category: 'file-deletion', + severity: 'warning', + message: + `${publicDeletions.length} source file(s) deleted from packages/. ` + + 'Verify no public API or imports are broken.', + files: publicDeletions, + }); + } +} + +// --------------------------------------------------------------------------- +// Output +// --------------------------------------------------------------------------- + +const errorCount = findings.filter((f) => f.severity === 'error').length; +const warnCount = findings.filter((f) => f.severity === 'warning').length; +const infoCount = findings.filter((f) => f.severity === 'info').length; + +let summary; +if (findings.length === 0) { + summary = '✅ No architectural concerns found.'; +} else { + const parts = []; + if (errorCount) parts.push(`${errorCount} error(s)`); + if (warnCount) parts.push(`${warnCount} warning(s)`); + if (infoCount) parts.push(`${infoCount} info`); + summary = `⚠️ Architectural review: ${parts.join(', ')}.`; +} + +const result = { findings, summary }; +console.log(JSON.stringify(result, null, 2)); +console.log(`\n${summary}`); diff --git a/scripts/check-bootstrap-deps.mjs b/scripts/check-bootstrap-deps.mjs new file mode 100644 index 000000000..cc89b1da8 --- /dev/null +++ b/scripts/check-bootstrap-deps.mjs @@ -0,0 +1,155 @@ +/** + * Bootstrap Protection Gate — validates that protected bootstrap files + * use only node:* built-in module imports. No npm or workspace deps allowed. + * + * Exit code: 0 = pass, 1 = violations found + * Output: JSON { pass, violations: [{file, import, line}] } + * + * Uses only node:* built-ins (runs in CI before npm install). + */ + +import { readFileSync } from 'node:fs'; +import { execFileSync } from 'node:child_process'; +import { resolve } from 'node:path'; + +// --------------------------------------------------------------------------- +// Protected bootstrap files — these MUST have zero non-node:* dependencies. +// --------------------------------------------------------------------------- + +const PROTECTED_FILES = [ + 'packages/squad-cli/src/cli/core/detect-squad-dir.ts', + 'packages/squad-cli/src/cli/core/errors.ts', + 'packages/squad-cli/src/cli/core/gh-cli.ts', + 'packages/squad-cli/src/cli/core/output.ts', + 'packages/squad-cli/src/cli/core/history-split.ts', +]; + +const refIndex = process.argv.indexOf('--ref'); +const gitRef = refIndex !== -1 ? process.argv[refIndex + 1] : null; + +// Node.js built-in modules (with and without node: prefix) +const NODE_BUILTINS = new Set([ + 'assert', 'async_hooks', 'buffer', 'child_process', 'cluster', + 'console', 'constants', 'crypto', 'dgram', 'diagnostics_channel', + 'dns', 'domain', 'events', 'fs', 'http', 'http2', 'https', + 'inspector', 'module', 'net', 'os', 'path', 'perf_hooks', + 'process', 'punycode', 'querystring', 'readline', 'repl', + 'stream', 'string_decoder', 'sys', 'test', 'timers', 'tls', + 'trace_events', 'tty', 'url', 'util', 'v8', 'vm', 'wasi', + 'worker_threads', 'zlib', +]); + +/** + * Check whether an import specifier is a node built-in. + * Accepts both `node:fs` and `fs` forms, as well as subpaths like `node:fs/promises`. + */ +function isNodeBuiltin(specifier) { + if (specifier.startsWith('node:')) return true; + const base = specifier.split('/')[0]; + return NODE_BUILTINS.has(base); +} + +/** + * Check whether an import is a relative path (sibling bootstrap file). + * Relative imports within the same directory are allowed. + */ +function isRelativeImport(specifier) { + return specifier.startsWith('./') || specifier.startsWith('../'); +} + +// Patterns that capture import/require specifiers in TS/JS +const IMPORT_PATTERNS = [ + // ES import — import ... from 'specifier' + /(?:^|\s)import\s+(?:[\s\S]*?\s+from\s+)?['"]([^'"]+)['"]/g, + // Dynamic import — import('specifier') + /import\(\s*['"]([^'"]+)['"]\s*\)/g, + // require — require('specifier') + /require\(\s*['"]([^'"]+)['"]\s*\)/g, +]; + +/** + * Scan a file for non-node:* imports. + * @param {string} filePath + * @returns {{ file: string, import: string, line: number }[]} + */ +function scanFile(filePath) { + const violations = []; + let content; + try { + if (gitRef) { + content = execFileSync('git', ['show', `${gitRef}:${filePath}`], { encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'] }); + } else { + content = readFileSync(resolve(filePath), 'utf-8'); + } + } catch (err) { + // File might not exist in sparse checkout — skip silently + const errorMessage = err instanceof Error ? err.message : String(err); + console.error(`Warning: could not read ${filePath}: ${errorMessage}`); + return violations; + } + + const lines = content.split('\n'); + let inBlockComment = false; + for (let i = 0; i < lines.length; i++) { + const line = lines[i]; + const trimmed = line.trim(); + + // Track block comment state + if (inBlockComment) { + if (trimmed.includes('*/')) inBlockComment = false; + continue; + } + if (trimmed.startsWith('/*')) { + if (!trimmed.includes('*/')) inBlockComment = true; + continue; + } + // Skip single-line comments + if (trimmed.startsWith('//') || trimmed.startsWith('*')) continue; + + for (const pattern of IMPORT_PATTERNS) { + // Reset regex state + pattern.lastIndex = 0; + let match; + while ((match = pattern.exec(line)) !== null) { + const specifier = match[1]; + if (!isNodeBuiltin(specifier) && !isRelativeImport(specifier)) { + violations.push({ + file: filePath, + import: specifier, + line: i + 1, + }); + } + } + } + } + return violations; +} + +// --------------------------------------------------------------------------- +// Main +// --------------------------------------------------------------------------- + +const allViolations = []; +for (const file of PROTECTED_FILES) { + allViolations.push(...scanFile(file)); +} + +const result = { + pass: allViolations.length === 0, + violations: allViolations, +}; + +console.log(JSON.stringify(result, null, 2)); + +if (!result.pass) { + console.error( + `\n❌ Bootstrap protection: ${allViolations.length} violation(s) found.`, + ); + console.error('Protected bootstrap files must only import node:* built-in modules.'); + for (const v of allViolations) { + console.error(` ${v.file}:${v.line} — imports "${v.import}"`); + } + process.exitCode = 1; +} else { + console.log('\n✅ Bootstrap protection: all protected files use only node:* imports.'); +} diff --git a/scripts/check-squad-leakage.mjs b/scripts/check-squad-leakage.mjs new file mode 100644 index 000000000..0606de8ff --- /dev/null +++ b/scripts/check-squad-leakage.mjs @@ -0,0 +1,56 @@ +/** + * .squad/ Leakage Detector — warns if .squad/ files are included in a PR. + * + * Feature branches should not typically modify .squad/ files (team config, + * agent charters, routing). This script detects accidental leakage. + * + * Usage: node scripts/check-squad-leakage.mjs [base-ref] + * Default base-ref: origin/dev + * + * Exit code: always 0 (informational only — does not block merge) + * Output: JSON { leaked: boolean, files: string[] } + * + * Uses only node:* built-ins (runs in CI before npm install). + */ + +import { execFileSync } from 'node:child_process'; + +const baseRef = process.argv[2] || 'origin/dev'; +const headRef = process.argv[3] || 'HEAD'; + +let changedFiles = []; +try { + const output = execFileSync( + 'git', + ['diff', `${baseRef}...${headRef}`, '--name-only', '--diff-filter=ACMRT', '--', '.squad/'], + { encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'] }, + ); + changedFiles = output + .split('\n') + .map((f) => f.trim()) + .filter(Boolean); +} catch (err) { + // git diff can fail if base ref is missing — treat as no leakage + const errorMessage = err instanceof Error ? err.message : String(err); + console.error(`Warning: git diff failed: ${errorMessage}`); +} + +const result = { + leaked: changedFiles.length > 0, + files: changedFiles, +}; + +console.log(JSON.stringify(result, null, 2)); + +if (result.leaked) { + console.warn(`\n⚠️ Squad file leakage: ${changedFiles.length} .squad/ file(s) modified in this PR:`); + for (const f of changedFiles) { + console.warn(` - ${f}`); + } + console.warn( + '\nThis is usually unintentional. If these changes are deliberate, ensure they are ' + + 'approved by the team lead. .squad/ files affect team routing, agent charters, and decisions.', + ); +} else { + console.log('\n✅ No .squad/ file leakage detected.'); +} diff --git a/scripts/repo-health-comment.mjs b/scripts/repo-health-comment.mjs new file mode 100644 index 000000000..4ebe1570a --- /dev/null +++ b/scripts/repo-health-comment.mjs @@ -0,0 +1,151 @@ +// scripts/repo-health-comment.mjs — zero dependencies +// Shared utility for posting/upserting repo health PR comments. +// DI pattern: run({ github, context, output, job }) for testability. + +const JOBS = { + leakage: { + marker: '', + parse(output) { + try { + const jsonMatch = output.match(/\{[\s\S]*?\}/); + const parsed = jsonMatch ? JSON.parse(jsonMatch[0]) : { leaked: false, files: [] }; + return parsed.leaked ? parsed : null; + } catch { + return null; + } + }, + format(parsed) { + const fileList = parsed.files.map(f => `- \`${f}\``).join('\n'); + return [ + '## ⚠️ Squad File Leakage Detected', + '', + 'The following `.squad/` files were modified in this PR:', + '', + fileList, + '', + 'These files affect team routing, agent charters, and decisions.', + 'If intentional, ensure approval from the team lead.', + ].join('\n'); + }, + }, + architectural: { + marker: '', + parse(output) { + try { + const jsonMatch = output.match(/\{[\s\S]*"findings"[\s\S]*\}/); + const parsed = jsonMatch ? JSON.parse(jsonMatch[0]) : null; + return parsed && parsed.findings.length > 0 ? parsed : null; + } catch { + return null; + } + }, + format(parsed) { + const severityIcon = { error: '🔴', warning: '🟡', info: 'ℹ️' }; + const rows = parsed.findings.map(f => { + const icon = severityIcon[f.severity] || '❓'; + const files = f.files.length > 0 + ? f.files.map(fi => `\`${fi}\``).join(', ') + : '—'; + return `| ${icon} ${f.severity} | **${f.category}** | ${f.message} | ${files} |`; + }); + return [ + '## 🏗️ Architectural Review', + '', + parsed.summary, + '', + '| Severity | Category | Finding | Files |', + '|----------|----------|---------|-------|', + ...rows, + '', + '---', + '*Automated architectural review — informational only.*', + ].join('\n'); + }, + }, + security: { + marker: '', + parse(output) { + try { + const jsonMatch = output.match(/\{[\s\S]*"findings"[\s\S]*\}/); + const parsed = jsonMatch ? JSON.parse(jsonMatch[0]) : null; + return parsed && parsed.findings.length > 0 ? parsed : null; + } catch { + return null; + } + }, + format(parsed) { + const severityIcon = { error: '🔴', warning: '🟡', info: 'ℹ️' }; + const rows = parsed.findings.map(f => { + const icon = severityIcon[f.severity] || '❓'; + const loc = f.line ? `\`${f.file}:${f.line}\`` : (f.file ? `\`${f.file}\`` : '—'); + return `| ${icon} ${f.severity} | **${f.category}** | ${f.message} | ${loc} |`; + }); + return [ + '## 🔒 Security Review', + '', + parsed.summary, + '', + '| Severity | Category | Finding | Location |', + '|----------|----------|---------|----------|', + ...rows, + '', + '---', + '*Automated security review — informational only.*', + ].join('\n'); + }, + }, +}; + +/** + * Post or update a repo health comment on a PR. + * @param {object} opts + * @param {object} opts.github Octokit instance (DI) + * @param {object} opts.context GitHub Actions context (DI) + * @param {string} opts.output Raw output from the check step + * @param {string} opts.job 'leakage' | 'architectural' | 'security' + */ +export async function run({ github, context, output, job }) { + const config = JOBS[job]; + if (!config) throw new Error(`Unknown repo-health job type: ${job}`); + + const parsed = config.parse(output); + + // Fetch all comments (paginated) to find existing marker + const comments = await github.paginate(github.rest.issues.listComments, { + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + per_page: 100, + }); + const existing = comments.find(c => c.body && c.body.includes(config.marker)); + + // No findings — clean up stale marker comment if one exists + if (!parsed) { + if (existing) { + await github.rest.issues.deleteComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: existing.id, + }); + } + return; + } + + const body = `${config.marker}\n${config.format(parsed)}`; + + if (existing) { + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: existing.id, + body, + }); + } else { + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + body, + }); + } +} diff --git a/scripts/security-review.mjs b/scripts/security-review.mjs new file mode 100644 index 000000000..eef502ed8 --- /dev/null +++ b/scripts/security-review.mjs @@ -0,0 +1,318 @@ +/** + * Security Review Check — detects potential security concerns in PR diffs. + * + * Checks for: + * - secrets.* references in workflow files + * - eval() usage in JS/TS + * - child_process.exec with template literals (injection risk) + * - Unsafe git operations (git add ., git add -A, git commit -a, git push --force) + * - New npm dependencies + * - PII-related environment variable patterns + * - Workflow files with write permissions + * - pull_request_target + actions/checkout combination (token exposure) + * + * Usage: node scripts/security-review.mjs [base-ref] + * Default base-ref: origin/dev + * + * Exit code: always 0 (informational) + * Output: JSON { findings: [{category, severity, message, file, line}], summary } + * + * Uses only node:* built-ins (runs in CI before npm install). + */ + +import { execFileSync } from 'node:child_process'; +import { readFileSync } from 'node:fs'; +import { resolve } from 'node:path'; + +const baseRef = process.argv[2] || 'origin/dev'; +const headRef = process.argv[3] || 'HEAD'; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +function gitDiffNames() { + try { + const output = execFileSync( + 'git', + ['diff', `${baseRef}...${headRef}`, '--name-only', '--diff-filter=ACMRT'], + { encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'] }, + ); + return output + .split('\n') + .map((f) => f.trim()) + .filter(Boolean); + } catch { + return []; + } +} + +function gitDiffPatch() { + try { + return execFileSync('git', ['diff', `${baseRef}...${headRef}`, '-U0'], { + encoding: 'utf-8', + stdio: ['pipe', 'pipe', 'pipe'], + maxBuffer: 10 * 1024 * 1024, + }); + } catch { + return ''; + } +} + +function readFileSafe(filePath) { + try { + if (headRef !== 'HEAD') { + return execFileSync('git', ['show', `${headRef}:${filePath}`], { encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'] }); + } + return readFileSync(resolve(filePath), 'utf-8'); + } catch { + return null; + } +} + +/** + * Parse unified diff into per-file added lines with line numbers. + * Returns Map> + */ +function parseAddedLines(patch) { + const result = new Map(); + let currentFile = null; + let hunkLine = 0; + + for (const rawLine of patch.split('\n')) { + // New file header + const fileMatch = rawLine.match(/^\+\+\+ b\/(.+)/); + if (fileMatch) { + currentFile = fileMatch[1]; + if (!result.has(currentFile)) result.set(currentFile, []); + continue; + } + // Hunk header — extract new file line number + const hunkMatch = rawLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)/); + if (hunkMatch) { + hunkLine = parseInt(hunkMatch[1], 10); + continue; + } + // Added line + if (rawLine.startsWith('+') && !rawLine.startsWith('+++') && currentFile) { + result.get(currentFile).push({ line: hunkLine, text: rawLine.slice(1) }); + hunkLine++; + } else if (!rawLine.startsWith('-')) { + // Context line — increment line counter + hunkLine++; + } + } + return result; +} + +// --------------------------------------------------------------------------- +// Security checks +// --------------------------------------------------------------------------- + +const findings = []; +const changedFiles = gitDiffNames(); +const patch = gitDiffPatch(); +const addedByFile = parseAddedLines(patch); + +const workflowFiles = changedFiles.filter((f) => + f.startsWith('.github/workflows/') && (f.endsWith('.yml') || f.endsWith('.yaml')), +); +const jstsFiles = changedFiles.filter((f) => + /\.(js|ts|mjs|mts|cjs|cts)$/.test(f), +); +const pkgJsonFiles = changedFiles.filter((f) => f.endsWith('package.json')); + +// 1. secrets.* references in workflow files +for (const file of workflowFiles) { + const added = addedByFile.get(file) || []; + for (const { line, text } of added) { + // Exclude standard GITHUB_TOKEN and common safe patterns + if (/secrets\./.test(text) && !/secrets\.GITHUB_TOKEN/.test(text)) { + findings.push({ + category: 'secrets-reference', + severity: 'warning', + message: 'Non-standard secret reference in workflow — verify this secret is necessary and scoped correctly.', + file, + line, + }); + } + } +} + +// 2. eval() usage +for (const file of jstsFiles) { + const added = addedByFile.get(file) || []; + for (const { line, text } of added) { + if (/\beval\s*\(/.test(text)) { + findings.push({ + category: 'eval-usage', + severity: 'error', + message: 'eval() detected — this is a code injection risk. Use safer alternatives.', + file, + line, + }); + } + } +} + +// 3. child_process.exec with template literals +for (const file of jstsFiles) { + const added = addedByFile.get(file) || []; + for (const { line, text } of added) { + if (/exec\s*\(\s*`/.test(text) || /exec\s*\(\s*['"].*\$\{/.test(text)) { + findings.push({ + category: 'command-injection', + severity: 'error', + message: + 'exec() with template literal/interpolation detected — risk of command injection. ' + + 'Use execFile() with array arguments instead.', + file, + line, + }); + } + } +} + +// 4. Unsafe git operations +const GIT_UNSAFE_PATTERNS = [ + { pattern: /git\s+add\s+\./, label: 'git add .' }, + { pattern: /git\s+add\s+-A/, label: 'git add -A' }, + { pattern: /git\s+commit\s+-a/, label: 'git commit -a' }, + { pattern: /git\s+push\s+--force/, label: 'git push --force' }, + { pattern: /--force-with-lease/, label: 'git push --force-with-lease' }, +]; + +for (const file of changedFiles) { + const added = addedByFile.get(file) || []; + for (const { line, text } of added) { + for (const { pattern, label } of GIT_UNSAFE_PATTERNS) { + if (pattern.test(text)) { + findings.push({ + category: 'unsafe-git', + severity: 'error', + message: `Unsafe git operation: \`${label}\` — this can stage unintended files or force-push shared branches.`, + file, + line, + }); + } + } + } +} + +// 5. New npm dependencies +for (const file of pkgJsonFiles) { + const added = addedByFile.get(file) || []; + // Look for lines adding new dependencies + const depLines = added.filter(({ text }) => + /^\s*"[^"]+"\s*:\s*"[~^]?\d/.test(text) || /^\s*"[^"]+"\s*:\s*"(workspace|npm):/.test(text), + ); + if (depLines.length > 0) { + findings.push({ + category: 'new-dependency', + severity: 'info', + message: + `${depLines.length} new/changed dependency version(s) in ${file}. ` + + 'Verify these packages are trusted and necessary.', + file, + line: depLines[0].line, + }); + } +} + +// 6. PII-related environment variable patterns +const PII_PATTERNS = [ + /PASSWORD/i, + /SECRET_KEY/i, + /PRIVATE_KEY/i, + /API_KEY/i, + /ACCESS_TOKEN/i, + /CREDENTIALS/i, + /AUTH_TOKEN/i, +]; + +for (const file of workflowFiles) { + const added = addedByFile.get(file) || []; + for (const { line, text } of added) { + for (const pattern of PII_PATTERNS) { + if (pattern.test(text) && !/secrets\./.test(text)) { + findings.push({ + category: 'pii-env-var', + severity: 'warning', + message: `Environment variable with sensitive name pattern (${pattern.source}) — ensure this isn't hardcoded.`, + file, + line, + }); + break; // one finding per line + } + } + } +} + +// 7. Workflow write permissions +for (const file of workflowFiles) { + const content = readFileSafe(file); + if (!content) continue; + const lines = content.split('\n'); + for (let i = 0; i < lines.length; i++) { + if (/:\s*write\b/.test(lines[i]) && /permissions/i.test(lines.slice(Math.max(0, i - 5), i + 1).join('\n'))) { + // Only flag if this line was added in the diff + const added = addedByFile.get(file) || []; + if (added.some((a) => a.line === i + 1)) { + findings.push({ + category: 'workflow-permissions', + severity: 'info', + message: 'Workflow grants write permission — verify this is the minimum required scope.', + file, + line: i + 1, + }); + } + } + } +} + +// 8. pull_request_target + actions/checkout combination +for (const file of workflowFiles) { + const content = readFileSafe(file); + if (!content) continue; + const hasPRTarget = /pull_request_target/.test(content); + const hasCheckout = /actions\/checkout/.test(content); + const checksOutHead = + /ref:\s*.*pull_request\.head/.test(content) || + /ref:\s*.*github\.event\.pull_request\.head\.sha/.test(content); + + if (hasPRTarget && hasCheckout && checksOutHead) { + findings.push({ + category: 'pr-target-checkout', + severity: 'warning', + message: + 'This workflow uses pull_request_target AND checks out the PR head. ' + + 'This grants write token to untrusted code — ensure no scripts from the PR are executed ' + + 'or use sparse-checkout to limit exposure.', + file, + line: 0, + }); + } +} + +// --------------------------------------------------------------------------- +// Output +// --------------------------------------------------------------------------- + +const errorCount = findings.filter((f) => f.severity === 'error').length; +const warnCount = findings.filter((f) => f.severity === 'warning').length; +const infoCount = findings.filter((f) => f.severity === 'info').length; + +let summary; +if (findings.length === 0) { + summary = '✅ No security concerns found.'; +} else { + const parts = []; + if (errorCount) parts.push(`${errorCount} error(s)`); + if (warnCount) parts.push(`${warnCount} warning(s)`); + if (infoCount) parts.push(`${infoCount} info`); + summary = `🔒 Security review: ${parts.join(', ')}.`; +} + +const result = { findings, summary }; +console.log(JSON.stringify(result, null, 2)); +console.log(`\n${summary}`); diff --git a/test/scripts/architectural-review.test.ts b/test/scripts/architectural-review.test.ts new file mode 100644 index 000000000..079cdfd91 --- /dev/null +++ b/test/scripts/architectural-review.test.ts @@ -0,0 +1,215 @@ +/** + * Tests for scripts/architectural-review.mjs + * + * Validates that the architectural review check: + * - Produces valid JSON with findings array and summary string + * - Each finding has the correct shape (category, severity, message, files) + * - Reports zero findings when diff is empty (HEAD vs HEAD) + * - Detects cross-package import patterns correctly + * - Detects bootstrap area modifications + * - Always exits with code 0 (informational only) + */ + +import { describe, it, expect } from 'vitest'; +import { extractJson, runScript } from './helpers'; + +// --------------------------------------------------------------------------- +// Cross-package import patterns (replicated from the script for unit testing) +// --------------------------------------------------------------------------- + +const CLI_TO_SDK_SRC = /from\s+['"].*squad-sdk\/src\//; +const SDK_TO_CLI_SRC = /from\s+['"].*squad-cli\/src\//; +const CLI_TO_SDK_REQUIRE = /require\(['"].*squad-sdk\/src\//; +const SDK_TO_CLI_REQUIRE = /require\(['"].*squad-cli\/src\//; + +function detectCrossPackageImport( + line: string, + direction: 'cli-to-sdk' | 'sdk-to-cli', +): boolean { + if (direction === 'cli-to-sdk') { + return CLI_TO_SDK_SRC.test(line) || CLI_TO_SDK_REQUIRE.test(line); + } + return SDK_TO_CLI_SRC.test(line) || SDK_TO_CLI_REQUIRE.test(line); +} + +// --------------------------------------------------------------------------- +// Integration tests +// --------------------------------------------------------------------------- + +describe('architectural-review script', () => { + describe('integration: no-diff baseline (HEAD as base ref)', () => { + it('produces valid JSON with findings and summary', () => { + const result = runScript('architectural-review.mjs', ['HEAD']); + const json = extractJson(result.stdout); + + expect(json).toHaveProperty('findings'); + expect(Array.isArray(json.findings)).toBe(true); + expect(json).toHaveProperty('summary'); + expect(typeof json.summary).toBe('string'); + }); + + it('reports zero findings when diff is empty', () => { + const result = runScript('architectural-review.mjs', ['HEAD']); + const json = extractJson(result.stdout); + + expect(json.findings).toEqual([]); + expect(json.summary).toContain('No architectural concerns'); + }); + + it('always exits with code 0', () => { + const result = runScript('architectural-review.mjs', ['HEAD']); + expect(result.status).toBe(0); + }); + }); + + describe('integration: default base ref', () => { + it('exits with code 0 regardless of findings', () => { + const result = runScript('architectural-review.mjs'); + expect(result.status).toBe(0); + }); + + it('produces valid JSON with correct schema', () => { + const result = runScript('architectural-review.mjs'); + const json = extractJson(result.stdout); + + expect(json).toHaveProperty('findings'); + expect(json).toHaveProperty('summary'); + expect(Array.isArray(json.findings)).toBe(true); + expect(typeof json.summary).toBe('string'); + }); + + it('each finding has category, severity, message, and files', () => { + const result = runScript('architectural-review.mjs'); + const json = extractJson(result.stdout); + const findings = json.findings as Array>; + + for (const f of findings) { + expect(f).toHaveProperty('category'); + expect(f).toHaveProperty('severity'); + expect(f).toHaveProperty('message'); + expect(f).toHaveProperty('files'); + expect(typeof f.category).toBe('string'); + expect(typeof f.severity).toBe('string'); + expect(typeof f.message).toBe('string'); + expect(Array.isArray(f.files)).toBe(true); + } + }); + + it('severity values are from the allowed set', () => { + const result = runScript('architectural-review.mjs'); + const json = extractJson(result.stdout); + const findings = json.findings as Array>; + const ALLOWED = new Set(['error', 'warning', 'info']); + + for (const f of findings) { + expect(ALLOWED.has(f.severity as string)).toBe(true); + } + }); + + it('category values match known check types', () => { + const result = runScript('architectural-review.mjs'); + const json = extractJson(result.stdout); + const findings = json.findings as Array>; + const KNOWN_CATEGORIES = new Set([ + 'bootstrap-area', + 'export-surface', + 'cross-package-import', + 'template-sync', + 'sweeping-refactor', + 'file-deletion', + ]); + + for (const f of findings) { + expect(KNOWN_CATEGORIES.has(f.category as string)).toBe(true); + } + }); + }); + + // --------------------------------------------------------------------------- + // Unit tests — cross-package import detection + // --------------------------------------------------------------------------- + + describe('cross-package import pattern detection', () => { + it('detects CLI importing from SDK src path', () => { + expect( + detectCrossPackageImport( + "import { Foo } from '@bradygaster/squad-sdk/src/foo'", + 'cli-to-sdk', + ), + ).toBe(true); + }); + + it('detects SDK importing from CLI src path', () => { + expect( + detectCrossPackageImport( + "import { Bar } from '../squad-cli/src/bar'", + 'sdk-to-cli', + ), + ).toBe(true); + }); + + it('detects require-style cross-package imports', () => { + expect( + detectCrossPackageImport( + "const x = require('@bradygaster/squad-sdk/src/foo')", + 'cli-to-sdk', + ), + ).toBe(true); + }); + + it('does not flag imports via published package name', () => { + expect( + detectCrossPackageImport( + "import { Foo } from '@bradygaster/squad-sdk'", + 'cli-to-sdk', + ), + ).toBe(false); + }); + + it('does not flag unrelated imports', () => { + expect( + detectCrossPackageImport("import { readFile } from 'node:fs'", 'cli-to-sdk'), + ).toBe(false); + expect( + detectCrossPackageImport("import express from 'express'", 'sdk-to-cli'), + ).toBe(false); + }); + }); + + // --------------------------------------------------------------------------- + // Unit tests — bootstrap area detection + // --------------------------------------------------------------------------- + + describe('bootstrap area detection', () => { + const BOOTSTRAP_PREFIX = 'packages/squad-cli/src/cli/core/'; + + it('recognizes bootstrap area files', () => { + expect('packages/squad-cli/src/cli/core/detect-squad-dir.ts'.startsWith(BOOTSTRAP_PREFIX)).toBe(true); + expect('packages/squad-cli/src/cli/core/errors.ts'.startsWith(BOOTSTRAP_PREFIX)).toBe(true); + expect('packages/squad-cli/src/cli/core/output.ts'.startsWith(BOOTSTRAP_PREFIX)).toBe(true); + }); + + it('does not flag files outside bootstrap area', () => { + expect('packages/squad-cli/src/commands/init.ts'.startsWith(BOOTSTRAP_PREFIX)).toBe(false); + expect('packages/squad-sdk/src/index.ts'.startsWith(BOOTSTRAP_PREFIX)).toBe(false); + expect('test/scripts/helpers.ts'.startsWith(BOOTSTRAP_PREFIX)).toBe(false); + }); + }); + + // --------------------------------------------------------------------------- + // Edge cases + // --------------------------------------------------------------------------- + + describe('edge cases', () => { + it('handles invalid base ref gracefully', () => { + const result = runScript('architectural-review.mjs', [ + 'refs/heads/nonexistent-branch-xyz-99999', + ]); + const json = extractJson(result.stdout); + + // Should produce valid JSON with zero findings (git diff returns empty) + expect(json.findings).toEqual([]); + expect(result.status).toBe(0); + }); + }); +}); diff --git a/test/scripts/check-bootstrap-deps.test.ts b/test/scripts/check-bootstrap-deps.test.ts new file mode 100644 index 000000000..837f84b54 --- /dev/null +++ b/test/scripts/check-bootstrap-deps.test.ts @@ -0,0 +1,245 @@ +/** + * Tests for scripts/check-bootstrap-deps.mjs + * + * Validates that the bootstrap protection gate: + * - Produces valid JSON output with the correct schema + * - Correctly identifies node built-in vs external imports + * - Detects various import/require syntax patterns + * - Passes for the actual protected files in this repo + */ + +import { describe, it, expect } from 'vitest'; +import { extractJson, runScript } from './helpers'; + +// --------------------------------------------------------------------------- +// Replicated logic from check-bootstrap-deps.mjs for unit testing +// (Script does not export functions, so we mirror the core detection logic.) +// --------------------------------------------------------------------------- + +const NODE_BUILTINS = new Set([ + 'assert', 'async_hooks', 'buffer', 'child_process', 'cluster', + 'console', 'constants', 'crypto', 'dgram', 'diagnostics_channel', + 'dns', 'domain', 'events', 'fs', 'http', 'http2', 'https', + 'inspector', 'module', 'net', 'os', 'path', 'perf_hooks', + 'process', 'punycode', 'querystring', 'readline', 'repl', + 'stream', 'string_decoder', 'sys', 'test', 'timers', 'tls', + 'trace_events', 'tty', 'url', 'util', 'v8', 'vm', 'wasi', + 'worker_threads', 'zlib', +]); + +function isNodeBuiltin(specifier: string): boolean { + if (specifier.startsWith('node:')) return true; + const base = specifier.split('/')[0]; + return NODE_BUILTINS.has(base); +} + +function isRelativeImport(specifier: string): boolean { + return specifier.startsWith('./') || specifier.startsWith('../'); +} + +const IMPORT_PATTERNS = [ + /(?:^|\s)import\s+(?:[\s\S]*?\s+from\s+)?['"]([^'"]+)['"]/g, + /import\(\s*['"]([^'"]+)['"]\s*\)/g, + /require\(\s*['"]([^'"]+)['"]\s*\)/g, +]; + +function findImports(line: string): string[] { + const imports: string[] = []; + for (const pattern of IMPORT_PATTERNS) { + pattern.lastIndex = 0; + let match; + while ((match = pattern.exec(line)) !== null) { + imports.push(match[1]); + } + } + return imports; +} + +function isViolation(specifier: string): boolean { + return !isNodeBuiltin(specifier) && !isRelativeImport(specifier); +} + +// --------------------------------------------------------------------------- +// Integration tests — run the actual script +// --------------------------------------------------------------------------- + +describe('check-bootstrap-deps script', () => { + describe('integration: script execution', () => { + it('produces valid JSON with pass and violations fields', () => { + const result = runScript('check-bootstrap-deps.mjs'); + const json = extractJson(result.stdout); + + expect(json).toHaveProperty('pass'); + expect(typeof json.pass).toBe('boolean'); + expect(json).toHaveProperty('violations'); + expect(Array.isArray(json.violations)).toBe(true); + }); + + it('passes for the current repo (protected files use only node:* imports)', () => { + const result = runScript('check-bootstrap-deps.mjs'); + const json = extractJson(result.stdout); + + expect(json.pass).toBe(true); + expect(json.violations).toEqual([]); + expect(result.status).toBe(0); + }); + + it('includes success message in stdout when passing', () => { + const result = runScript('check-bootstrap-deps.mjs'); + expect(result.stdout).toContain('Bootstrap protection'); + expect(result.stdout).toContain('node:*'); + }); + }); + + // --------------------------------------------------------------------------- + // Unit tests — import pattern detection + // --------------------------------------------------------------------------- + + describe('import pattern detection', () => { + it('detects ES static imports', () => { + expect(findImports("import { foo } from 'bar'")).toContain('bar'); + expect(findImports("import foo from 'bar'")).toContain('bar'); + expect(findImports("import 'side-effect-pkg'")).toContain('side-effect-pkg'); + }); + + it('detects ES imports with double quotes', () => { + expect(findImports('import { foo } from "bar"')).toContain('bar'); + }); + + it('detects dynamic import()', () => { + expect(findImports("const m = import('bar')")).toContain('bar'); + expect(findImports("await import('bar')")).toContain('bar'); + expect(findImports('import("dynamic-pkg")')).toContain('dynamic-pkg'); + }); + + it('detects require() calls', () => { + expect(findImports("const m = require('bar')")).toContain('bar'); + expect(findImports("require('bar')")).toContain('bar'); + expect(findImports('require("double-quoted")')).toContain('double-quoted'); + }); + + it('captures node: prefixed specifiers', () => { + expect(findImports("import { readFileSync } from 'node:fs'")).toContain('node:fs'); + expect(findImports("import { resolve } from 'node:path'")).toContain('node:path'); + }); + + it('returns empty array for non-import lines', () => { + expect(findImports('const x = 42;')).toEqual([]); + expect(findImports('')).toEqual([]); + }); + + it('regex still matches imports inside comments (comment filtering is separate)', () => { + // The script filters comment lines BEFORE running import patterns. + // The patterns themselves don't distinguish comments from code. + expect(findImports('// import { foo } from "bar"')).toContain('bar'); + }); + }); + + // --------------------------------------------------------------------------- + // Unit tests — built-in detection + // --------------------------------------------------------------------------- + + describe('isNodeBuiltin', () => { + it('accepts node: prefix imports', () => { + expect(isNodeBuiltin('node:fs')).toBe(true); + expect(isNodeBuiltin('node:path')).toBe(true); + expect(isNodeBuiltin('node:child_process')).toBe(true); + expect(isNodeBuiltin('node:util')).toBe(true); + }); + + it('accepts node: prefix with subpath', () => { + expect(isNodeBuiltin('node:fs/promises')).toBe(true); + expect(isNodeBuiltin('node:stream/web')).toBe(true); + }); + + it('accepts bare built-in module names', () => { + expect(isNodeBuiltin('fs')).toBe(true); + expect(isNodeBuiltin('path')).toBe(true); + expect(isNodeBuiltin('util')).toBe(true); + expect(isNodeBuiltin('child_process')).toBe(true); + expect(isNodeBuiltin('crypto')).toBe(true); + }); + + it('accepts built-in subpaths (e.g. fs/promises)', () => { + expect(isNodeBuiltin('fs/promises')).toBe(true); + expect(isNodeBuiltin('stream/web')).toBe(true); + }); + + it('rejects npm packages', () => { + expect(isNodeBuiltin('express')).toBe(false); + expect(isNodeBuiltin('lodash')).toBe(false); + expect(isNodeBuiltin('vitest')).toBe(false); + }); + + it('rejects scoped packages', () => { + expect(isNodeBuiltin('@bradygaster/squad-sdk')).toBe(false); + expect(isNodeBuiltin('@types/node')).toBe(false); + }); + }); + + describe('isRelativeImport', () => { + it('allows ./ imports', () => { + expect(isRelativeImport('./sibling')).toBe(true); + expect(isRelativeImport('./nested/deep')).toBe(true); + }); + + it('allows ../ imports', () => { + expect(isRelativeImport('../parent')).toBe(true); + expect(isRelativeImport('../../grandparent')).toBe(true); + }); + + it('rejects bare specifiers', () => { + expect(isRelativeImport('express')).toBe(false); + expect(isRelativeImport('@scope/pkg')).toBe(false); + expect(isRelativeImport('node:fs')).toBe(false); + }); + }); + + // --------------------------------------------------------------------------- + // Unit tests — violation classification + // --------------------------------------------------------------------------- + + describe('violation classification', () => { + it('flags npm packages as violations', () => { + expect(isViolation('express')).toBe(true); + expect(isViolation('@bradygaster/squad-sdk')).toBe(true); + expect(isViolation('lodash/merge')).toBe(true); + }); + + it('does not flag node builtins', () => { + expect(isViolation('node:fs')).toBe(false); + expect(isViolation('fs')).toBe(false); + expect(isViolation('node:path')).toBe(false); + }); + + it('does not flag relative imports', () => { + expect(isViolation('./sibling')).toBe(false); + expect(isViolation('../parent')).toBe(false); + }); + }); + + // --------------------------------------------------------------------------- + // Edge cases + // --------------------------------------------------------------------------- + + describe('edge cases', () => { + it('comment lines are skipped by the script logic', () => { + // The script skips lines starting with // or * + const commentLines = [ + '// import { bad } from "evil-pkg"', + '* import { bad } from "evil-pkg"', + ]; + for (const line of commentLines) { + const trimmed = line.trim(); + const isComment = trimmed.startsWith('//') || trimmed.startsWith('*'); + expect(isComment).toBe(true); + } + }); + + it('handles empty specifier edge case', () => { + // Empty string is not a node builtin and not relative + expect(isNodeBuiltin('')).toBe(false); + expect(isRelativeImport('')).toBe(false); + }); + }); +}); diff --git a/test/scripts/check-squad-leakage.test.ts b/test/scripts/check-squad-leakage.test.ts new file mode 100644 index 000000000..b90d7e9bf --- /dev/null +++ b/test/scripts/check-squad-leakage.test.ts @@ -0,0 +1,95 @@ +/** + * Tests for scripts/check-squad-leakage.mjs + * + * Validates that the .squad/ leakage detector: + * - Produces valid JSON with the correct schema { leaked, files } + * - Reports no leakage when base ref equals HEAD (empty diff) + * - Always exits with code 0 (informational only) + * - Handles missing base ref gracefully + */ + +import { describe, it, expect } from 'vitest'; +import { extractJson, runScript } from './helpers'; + +describe('check-squad-leakage script', () => { + describe('integration: no-diff baseline (HEAD as base ref)', () => { + it('produces valid JSON with leaked and files fields', () => { + const result = runScript('check-squad-leakage.mjs', ['HEAD']); + const json = extractJson(result.stdout); + + expect(json).toHaveProperty('leaked'); + expect(typeof json.leaked).toBe('boolean'); + expect(json).toHaveProperty('files'); + expect(Array.isArray(json.files)).toBe(true); + }); + + it('reports no leakage when diff is empty (HEAD vs HEAD)', () => { + const result = runScript('check-squad-leakage.mjs', ['HEAD']); + const json = extractJson(result.stdout); + + expect(json.leaked).toBe(false); + expect(json.files).toEqual([]); + }); + + it('includes success message when no leakage', () => { + const result = runScript('check-squad-leakage.mjs', ['HEAD']); + expect(result.stdout).toContain('No .squad/ file leakage detected'); + }); + + it('always exits with code 0 (informational only)', () => { + const result = runScript('check-squad-leakage.mjs', ['HEAD']); + expect(result.status).toBe(0); + }); + }); + + describe('integration: default base ref', () => { + it('exits with code 0 regardless of findings', () => { + // Default base ref is origin/dev — may or may not exist + const result = runScript('check-squad-leakage.mjs'); + expect(result.status).toBe(0); + }); + + it('always produces valid JSON output', () => { + const result = runScript('check-squad-leakage.mjs'); + const json = extractJson(result.stdout); + + expect(json).toHaveProperty('leaked'); + expect(json).toHaveProperty('files'); + expect(typeof json.leaked).toBe('boolean'); + expect(Array.isArray(json.files)).toBe(true); + }); + + it('leaked files are strings when present', () => { + const result = runScript('check-squad-leakage.mjs'); + const json = extractJson(result.stdout); + const files = json.files as string[]; + + for (const file of files) { + expect(typeof file).toBe('string'); + } + }); + + it('all leaked files start with .squad/ prefix', () => { + const result = runScript('check-squad-leakage.mjs'); + const json = extractJson(result.stdout); + const files = json.files as string[]; + + for (const file of files) { + expect(file).toMatch(/^\.squad\//); + } + }); + }); + + describe('edge case: invalid base ref', () => { + it('handles a non-existent ref gracefully with no leakage', () => { + const result = runScript('check-squad-leakage.mjs', [ + 'refs/heads/this-branch-does-not-exist-ever-12345', + ]); + const json = extractJson(result.stdout); + + expect(json.leaked).toBe(false); + expect(json.files).toEqual([]); + expect(result.status).toBe(0); + }); + }); +}); diff --git a/test/scripts/helpers.ts b/test/scripts/helpers.ts new file mode 100644 index 000000000..2037294f4 --- /dev/null +++ b/test/scripts/helpers.ts @@ -0,0 +1,48 @@ +/** + * Shared test helpers for repo health check script tests. + */ +import { spawnSync, type SpawnSyncReturns } from 'node:child_process'; +import { resolve } from 'node:path'; + +export const ROOT = resolve(__dirname, '..', '..'); + +/** + * Extract the first top-level JSON object from mixed stdout output. + * Scripts emit pretty-printed JSON followed by human-readable summary lines. + */ +export function extractJson(output: string): Record { + const lines = output.split('\n'); + const jsonLines: string[] = []; + let inJson = false; + let depth = 0; + for (const line of lines) { + if (!inJson && line.trimStart().startsWith('{')) { + inJson = true; + } + if (inJson) { + jsonLines.push(line); + for (const ch of line) { + if (ch === '{') depth++; + if (ch === '}') depth--; + } + if (depth === 0) break; + } + } + if (jsonLines.length === 0) throw new Error('No JSON object found in output'); + return JSON.parse(jsonLines.join('\n')); +} + +/** + * Run a health-check script as a subprocess and return the result. + */ +export function runScript( + scriptName: string, + args: string[] = [], +): SpawnSyncReturns { + const script = resolve(ROOT, 'scripts', scriptName); + return spawnSync('node', [script, ...args], { + cwd: ROOT, + encoding: 'utf-8', + timeout: 30_000, + }); +} diff --git a/test/scripts/security-review.test.ts b/test/scripts/security-review.test.ts new file mode 100644 index 000000000..41ca26271 --- /dev/null +++ b/test/scripts/security-review.test.ts @@ -0,0 +1,423 @@ +/** + * Tests for scripts/security-review.mjs + * + * Validates that the security review check: + * - Produces valid JSON with findings array and summary string + * - Each finding has the correct shape (category, severity, message, file, line) + * - Reports zero findings when diff is empty (HEAD vs HEAD) + * - Correctly parses unified diff patches (parseAddedLines logic) + * - Detects eval(), command injection, unsafe git ops, secrets, PII patterns + * - Always exits with code 0 (informational only) + */ + +import { describe, it, expect } from 'vitest'; +import { extractJson, runScript } from './helpers'; + +// --------------------------------------------------------------------------- +// Replicated parseAddedLines from security-review.mjs for unit testing +// --------------------------------------------------------------------------- + +interface AddedLine { + line: number; + text: string; +} + +function parseAddedLines(patch: string): Map { + const result = new Map(); + let currentFile: string | null = null; + let hunkLine = 0; + + for (const rawLine of patch.split('\n')) { + const fileMatch = rawLine.match(/^\+\+\+ b\/(.+)/); + if (fileMatch) { + currentFile = fileMatch[1]; + if (!result.has(currentFile)) result.set(currentFile, []); + continue; + } + const hunkMatch = rawLine.match(/^@@ -\d+(?:,\d+)? \+(\d+)/); + if (hunkMatch) { + hunkLine = parseInt(hunkMatch[1], 10); + continue; + } + if (rawLine.startsWith('+') && !rawLine.startsWith('+++') && currentFile) { + result.get(currentFile)!.push({ line: hunkLine, text: rawLine.slice(1) }); + hunkLine++; + } else if (!rawLine.startsWith('-')) { + hunkLine++; + } + } + return result; +} + +// --------------------------------------------------------------------------- +// Replicated security patterns from the script +// --------------------------------------------------------------------------- + +const EVAL_PATTERN = /\beval\s*\(/; +const EXEC_TEMPLATE = /exec\s*\(\s*`/; +const EXEC_INTERPOLATION = /exec\s*\(\s*['"].*\$\{/; + +const GIT_UNSAFE_PATTERNS = [ + { pattern: /git\s+add\s+\./, label: 'git add .' }, + { pattern: /git\s+add\s+-A/, label: 'git add -A' }, + { pattern: /git\s+commit\s+-a/, label: 'git commit -a' }, + { pattern: /git\s+push\s+--force/, label: 'git push --force' }, + { pattern: /--force-with-lease/, label: 'git push --force-with-lease' }, +]; + +const SECRETS_PATTERN = /secrets\./; +const SECRETS_GITHUB_TOKEN = /secrets\.GITHUB_TOKEN/; + +const PII_PATTERNS = [ + /PASSWORD/i, + /SECRET_KEY/i, + /PRIVATE_KEY/i, + /API_KEY/i, + /ACCESS_TOKEN/i, + /CREDENTIALS/i, + /AUTH_TOKEN/i, +]; + +// --------------------------------------------------------------------------- +// Integration tests +// --------------------------------------------------------------------------- + +describe('security-review script', () => { + describe('integration: no-diff baseline (HEAD as base ref)', () => { + it('produces valid JSON with findings and summary', () => { + const result = runScript('security-review.mjs', ['HEAD']); + const json = extractJson(result.stdout); + + expect(json).toHaveProperty('findings'); + expect(Array.isArray(json.findings)).toBe(true); + expect(json).toHaveProperty('summary'); + expect(typeof json.summary).toBe('string'); + }); + + it('reports zero findings when diff is empty', () => { + const result = runScript('security-review.mjs', ['HEAD']); + const json = extractJson(result.stdout); + + expect(json.findings).toEqual([]); + expect(json.summary).toContain('No security concerns'); + }); + + it('always exits with code 0', () => { + const result = runScript('security-review.mjs', ['HEAD']); + expect(result.status).toBe(0); + }); + }); + + describe('integration: default base ref', () => { + it('exits with code 0 regardless of findings', () => { + const result = runScript('security-review.mjs'); + expect(result.status).toBe(0); + }); + + it('produces valid JSON with correct schema', () => { + const result = runScript('security-review.mjs'); + const json = extractJson(result.stdout); + + expect(json).toHaveProperty('findings'); + expect(json).toHaveProperty('summary'); + expect(Array.isArray(json.findings)).toBe(true); + }); + + it('each finding has category, severity, message, file, and line', () => { + const result = runScript('security-review.mjs'); + const json = extractJson(result.stdout); + const findings = json.findings as Array>; + + for (const f of findings) { + expect(f).toHaveProperty('category'); + expect(f).toHaveProperty('severity'); + expect(f).toHaveProperty('message'); + expect(f).toHaveProperty('file'); + expect(f).toHaveProperty('line'); + expect(typeof f.category).toBe('string'); + expect(typeof f.severity).toBe('string'); + expect(typeof f.message).toBe('string'); + expect(typeof f.file).toBe('string'); + expect(typeof f.line).toBe('number'); + } + }); + + it('severity values are from the allowed set', () => { + const result = runScript('security-review.mjs'); + const json = extractJson(result.stdout); + const findings = json.findings as Array>; + const ALLOWED = new Set(['error', 'warning', 'info']); + + for (const f of findings) { + expect(ALLOWED.has(f.severity as string)).toBe(true); + } + }); + + it('category values match known check types', () => { + const result = runScript('security-review.mjs'); + const json = extractJson(result.stdout); + const findings = json.findings as Array>; + const KNOWN_CATEGORIES = new Set([ + 'secrets-reference', + 'eval-usage', + 'command-injection', + 'unsafe-git', + 'new-dependency', + 'pii-env-var', + 'workflow-permissions', + 'pr-target-checkout', + ]); + + for (const f of findings) { + expect(KNOWN_CATEGORIES.has(f.category as string)).toBe(true); + } + }); + }); + + // --------------------------------------------------------------------------- + // Unit tests — parseAddedLines + // --------------------------------------------------------------------------- + + describe('parseAddedLines', () => { + it('parses a simple single-file patch', () => { + const patch = [ + 'diff --git a/file.ts b/file.ts', + '--- a/file.ts', + '+++ b/file.ts', + '@@ -1,3 +1,4 @@', + ' const a = 1;', + '+const b = 2;', + ' const c = 3;', + ].join('\n'); + + const result = parseAddedLines(patch); + expect(result.has('file.ts')).toBe(true); + const lines = result.get('file.ts')!; + expect(lines).toHaveLength(1); + expect(lines[0].text).toBe('const b = 2;'); + expect(lines[0].line).toBe(2); + }); + + it('parses multiple files in a single patch', () => { + const patch = [ + 'diff --git a/a.ts b/a.ts', + '--- a/a.ts', + '+++ b/a.ts', + '@@ -1,2 +1,3 @@', + ' line1', + '+added-a', + ' line2', + 'diff --git a/b.ts b/b.ts', + '--- a/b.ts', + '+++ b/b.ts', + '@@ -5,2 +5,3 @@', + ' old-line', + '+added-b', + ' next-line', + ].join('\n'); + + const result = parseAddedLines(patch); + expect(result.has('a.ts')).toBe(true); + expect(result.has('b.ts')).toBe(true); + expect(result.get('a.ts')![0].text).toBe('added-a'); + expect(result.get('a.ts')![0].line).toBe(2); + expect(result.get('b.ts')![0].text).toBe('added-b'); + expect(result.get('b.ts')![0].line).toBe(6); + }); + + it('handles multiple hunks in one file', () => { + const patch = [ + '+++ b/file.ts', + '@@ -1,3 +1,4 @@', + ' line1', + '+hunk1-add', + ' line3', + '@@ -10,2 +11,3 @@', + ' line10', + '+hunk2-add', + ' line12', + ].join('\n'); + + const result = parseAddedLines(patch); + const lines = result.get('file.ts')!; + expect(lines).toHaveLength(2); + expect(lines[0]).toEqual({ line: 2, text: 'hunk1-add' }); + expect(lines[1]).toEqual({ line: 12, text: 'hunk2-add' }); + }); + + it('handles empty patch', () => { + const result = parseAddedLines(''); + expect(result.size).toBe(0); + }); + + it('ignores removed lines (lines starting with -)', () => { + const patch = [ + '+++ b/file.ts', + '@@ -1,3 +1,3 @@', + ' context', + '-removed', + '+added', + ' context2', + ].join('\n'); + + const result = parseAddedLines(patch); + const lines = result.get('file.ts')!; + expect(lines).toHaveLength(1); + expect(lines[0].text).toBe('added'); + }); + + it('strips the leading + from added line text', () => { + const patch = [ + '+++ b/file.ts', + '@@ -1,1 +1,2 @@', + ' existing', + '+ indented code', + ].join('\n'); + + const result = parseAddedLines(patch); + expect(result.get('file.ts')![0].text).toBe(' indented code'); + }); + }); + + // --------------------------------------------------------------------------- + // Unit tests — security pattern detection + // --------------------------------------------------------------------------- + + describe('eval() detection', () => { + it('detects eval with parentheses', () => { + expect(EVAL_PATTERN.test('const x = eval("code")')).toBe(true); + expect(EVAL_PATTERN.test('eval(userInput)')).toBe(true); + expect(EVAL_PATTERN.test(' eval (')).toBe(true); + }); + + it('does not false-positive on similar function names', () => { + expect(EVAL_PATTERN.test('evaluate(x)')).toBe(false); + expect(EVAL_PATTERN.test('myeval(x)')).toBe(false); + }); + + it('does not match eval in comments or strings when not called', () => { + expect(EVAL_PATTERN.test('// eval is bad')).toBe(false); + expect(EVAL_PATTERN.test('const name = "eval"')).toBe(false); + }); + }); + + describe('command injection detection', () => { + it('detects exec with template literal', () => { + expect(EXEC_TEMPLATE.test('exec(`ls ${dir}`)')).toBe(true); + expect(EXEC_TEMPLATE.test("child_process.exec(`cmd`)")).toBe(true); + }); + + it('detects exec with string interpolation', () => { + expect(EXEC_INTERPOLATION.test('exec("ls ${dir}")')).toBe(true); + }); + + it('does not flag execFile with array args', () => { + expect(EXEC_TEMPLATE.test("execFile('ls', ['-la'])")).toBe(false); + expect(EXEC_INTERPOLATION.test("execFile('ls', ['-la'])")).toBe(false); + }); + }); + + describe('unsafe git operation detection', () => { + it('detects git add .', () => { + expect(GIT_UNSAFE_PATTERNS[0].pattern.test('git add .')).toBe(true); + expect(GIT_UNSAFE_PATTERNS[0].pattern.test(' git add . && git commit')).toBe(true); + }); + + it('detects git add -A', () => { + expect(GIT_UNSAFE_PATTERNS[1].pattern.test('git add -A')).toBe(true); + }); + + it('detects git commit -a', () => { + expect(GIT_UNSAFE_PATTERNS[2].pattern.test('git commit -a -m "msg"')).toBe(true); + }); + + it('detects git push --force', () => { + expect(GIT_UNSAFE_PATTERNS[3].pattern.test('git push --force origin main')).toBe(true); + }); + + it('detects --force-with-lease', () => { + expect(GIT_UNSAFE_PATTERNS[4].pattern.test('git push --force-with-lease')).toBe(true); + }); + + it('does not flag safe git operations', () => { + const safe = 'git add path/to/specific-file.ts'; + for (const { pattern } of GIT_UNSAFE_PATTERNS) { + expect(pattern.test(safe)).toBe(false); + } + }); + + it('does not flag git commit without -a', () => { + expect(GIT_UNSAFE_PATTERNS[2].pattern.test('git commit -m "message"')).toBe(false); + }); + }); + + describe('secrets reference detection', () => { + it('detects non-standard secret references', () => { + expect(SECRETS_PATTERN.test('${{ secrets.MY_CUSTOM_TOKEN }}')).toBe(true); + expect(!SECRETS_GITHUB_TOKEN.test('${{ secrets.MY_CUSTOM_TOKEN }}')).toBe(true); + }); + + it('allows secrets.GITHUB_TOKEN', () => { + const line = '${{ secrets.GITHUB_TOKEN }}'; + // GITHUB_TOKEN is excluded from findings + expect(SECRETS_GITHUB_TOKEN.test(line)).toBe(true); + }); + + it('does not flag lines without secrets reference', () => { + expect(SECRETS_PATTERN.test('const token = process.env.TOKEN')).toBe(false); + }); + }); + + describe('PII environment variable detection', () => { + it('detects PASSWORD patterns', () => { + expect(PII_PATTERNS.some((p) => p.test('DB_PASSWORD=foo'))).toBe(true); + }); + + it('detects API_KEY patterns', () => { + expect(PII_PATTERNS.some((p) => p.test('OPENAI_API_KEY'))).toBe(true); + }); + + it('detects ACCESS_TOKEN patterns', () => { + expect(PII_PATTERNS.some((p) => p.test('MY_ACCESS_TOKEN'))).toBe(true); + }); + + it('detects PRIVATE_KEY patterns', () => { + expect(PII_PATTERNS.some((p) => p.test('SSH_PRIVATE_KEY'))).toBe(true); + }); + + it('does not flag safe environment variables', () => { + const safeVars = ['NODE_ENV', 'CI', 'HOME', 'PATH', 'PORT']; + for (const v of safeVars) { + expect(PII_PATTERNS.some((p) => p.test(v))).toBe(false); + } + }); + }); + + // --------------------------------------------------------------------------- + // Edge cases + // --------------------------------------------------------------------------- + + describe('edge cases', () => { + it('handles invalid base ref gracefully', () => { + const result = runScript('security-review.mjs', [ + 'refs/heads/nonexistent-branch-xyz-99999', + ]); + const json = extractJson(result.stdout); + + expect(json.findings).toEqual([]); + expect(result.status).toBe(0); + }); + + it('summary uses security emoji when findings exist', () => { + // When there are findings, summary starts with 🔒 + // When empty, it starts with ✅ + const result = runScript('security-review.mjs', ['HEAD']); + const json = extractJson(result.stdout); + if ((json.findings as unknown[]).length === 0) { + expect(json.summary).toContain('✅'); + } else { + expect(json.summary).toContain('🔒'); + } + }); + }); +});