diff --git a/pkg/cli/codemod_permissions_read.go b/pkg/cli/codemod_permissions_read.go new file mode 100644 index 00000000000..304c7984476 --- /dev/null +++ b/pkg/cli/codemod_permissions_read.go @@ -0,0 +1,75 @@ +package cli + +import ( + "strings" + + "github.com/github/gh-aw/pkg/logger" +) + +var permissionsReadCodemodLog = logger.New("cli:codemod_permissions_read") + +// getPermissionsReadCodemod creates a codemod for converting invalid "read" and "write" shorthands +func getPermissionsReadCodemod() Codemod { + return Codemod{ + ID: "permissions-read-to-read-all", + Name: "Convert invalid permissions shorthand", + Description: "Converts 'permissions: read' to 'permissions: read-all' and 'permissions: write' to 'permissions: write-all' as per GitHub Actions spec", + IntroducedIn: "0.5.0", + Apply: func(content string, frontmatter map[string]any) (string, bool, error) { + // Check if permissions exist + permissionsValue, hasPermissions := frontmatter["permissions"] + if !hasPermissions { + return content, false, nil + } + + // Check if permissions uses invalid shorthand (read or write) + hasInvalidShorthand := false + if strValue, ok := permissionsValue.(string); ok { + if strValue == "read" || strValue == "write" { + hasInvalidShorthand = true + } + } + + if !hasInvalidShorthand { + return content, false, nil + } + + newContent, applied, err := applyFrontmatterLineTransform(content, func(lines []string) ([]string, bool) { + var modified bool + result := make([]string, len(lines)) + for i, line := range lines { + trimmedLine := strings.TrimSpace(line) + + // Check for permissions line with shorthand + if strings.HasPrefix(trimmedLine, "permissions:") { + // Handle shorthand on same line: "permissions: read" or "permissions: write" + if strings.Contains(trimmedLine, ": read") && !strings.Contains(trimmedLine, "read-all") && !strings.Contains(trimmedLine, ": read\n") { + // Make sure it's "permissions: read" and not "contents: read" + if strings.TrimSpace(strings.Split(line, ":")[0]) == "permissions" { + result[i] = strings.Replace(line, ": read", ": read-all", 1) + modified = true + permissionsReadCodemodLog.Printf("Replaced 'permissions: read' with 'permissions: read-all' on line %d", i+1) + continue + } + } else if strings.Contains(trimmedLine, ": write") && !strings.Contains(trimmedLine, "write-all") { + // Make sure it's "permissions: write" and not "contents: write" + if strings.TrimSpace(strings.Split(line, ":")[0]) == "permissions" { + result[i] = strings.Replace(line, ": write", ": write-all", 1) + modified = true + permissionsReadCodemodLog.Printf("Replaced 'permissions: write' with 'permissions: write-all' on line %d", i+1) + continue + } + } + } + + result[i] = line + } + return result, modified + }) + if applied { + permissionsReadCodemodLog.Print("Applied permissions read/write to read-all/write-all migration") + } + return newContent, applied, err + }, + } +} diff --git a/pkg/cli/codemod_permissions.go b/pkg/cli/codemod_permissions_write.go similarity index 57% rename from pkg/cli/codemod_permissions.go rename to pkg/cli/codemod_permissions_write.go index f4da4722f7f..259570dd3ec 100644 --- a/pkg/cli/codemod_permissions.go +++ b/pkg/cli/codemod_permissions_write.go @@ -7,74 +7,6 @@ import ( "github.com/github/gh-aw/pkg/logger" ) -var permissionsReadCodemodLog = logger.New("cli:codemod_permissions_read") - -// getPermissionsReadCodemod creates a codemod for converting invalid "read" and "write" shorthands -func getPermissionsReadCodemod() Codemod { - return Codemod{ - ID: "permissions-read-to-read-all", - Name: "Convert invalid permissions shorthand", - Description: "Converts 'permissions: read' to 'permissions: read-all' and 'permissions: write' to 'permissions: write-all' as per GitHub Actions spec", - IntroducedIn: "0.5.0", - Apply: func(content string, frontmatter map[string]any) (string, bool, error) { - // Check if permissions exist - permissionsValue, hasPermissions := frontmatter["permissions"] - if !hasPermissions { - return content, false, nil - } - - // Check if permissions uses invalid shorthand (read or write) - hasInvalidShorthand := false - if strValue, ok := permissionsValue.(string); ok { - if strValue == "read" || strValue == "write" { - hasInvalidShorthand = true - } - } - - if !hasInvalidShorthand { - return content, false, nil - } - - newContent, applied, err := applyFrontmatterLineTransform(content, func(lines []string) ([]string, bool) { - var modified bool - result := make([]string, len(lines)) - for i, line := range lines { - trimmedLine := strings.TrimSpace(line) - - // Check for permissions line with shorthand - if strings.HasPrefix(trimmedLine, "permissions:") { - // Handle shorthand on same line: "permissions: read" or "permissions: write" - if strings.Contains(trimmedLine, ": read") && !strings.Contains(trimmedLine, "read-all") && !strings.Contains(trimmedLine, ": read\n") { - // Make sure it's "permissions: read" and not "contents: read" - if strings.TrimSpace(strings.Split(line, ":")[0]) == "permissions" { - result[i] = strings.Replace(line, ": read", ": read-all", 1) - modified = true - permissionsReadCodemodLog.Printf("Replaced 'permissions: read' with 'permissions: read-all' on line %d", i+1) - continue - } - } else if strings.Contains(trimmedLine, ": write") && !strings.Contains(trimmedLine, "write-all") { - // Make sure it's "permissions: write" and not "contents: write" - if strings.TrimSpace(strings.Split(line, ":")[0]) == "permissions" { - result[i] = strings.Replace(line, ": write", ": write-all", 1) - modified = true - permissionsReadCodemodLog.Printf("Replaced 'permissions: write' with 'permissions: write-all' on line %d", i+1) - continue - } - } - } - - result[i] = line - } - return result, modified - }) - if applied { - permissionsReadCodemodLog.Print("Applied permissions read/write to read-all/write-all migration") - } - return newContent, applied, err - }, - } -} - var writePermissionsCodemodLog = logger.New("cli:codemod_permissions") // getWritePermissionsCodemod creates a codemod for converting write permissions to read diff --git a/pkg/cli/trial_command.go b/pkg/cli/trial_command.go index 37f5c85b963..79da3a37b77 100644 --- a/pkg/cli/trial_command.go +++ b/pkg/cli/trial_command.go @@ -17,8 +17,8 @@ import ( "github.com/github/gh-aw/pkg/constants" "github.com/github/gh-aw/pkg/fileutil" "github.com/github/gh-aw/pkg/logger" - "github.com/github/gh-aw/pkg/repoutil" "github.com/github/gh-aw/pkg/sliceutil" + "github.com/github/gh-aw/pkg/stringutil" "github.com/github/gh-aw/pkg/workflow" "github.com/spf13/cobra" ) @@ -503,7 +503,7 @@ func RunWorkflowTrials(ctx context.Context, workflowSpecs []string, opts TrialOp workflowResults = append(workflowResults, result) // Save individual trial file - sanitizedTargetRepo := repoutil.SanitizeForFilename(targetRepoForFilename) + sanitizedTargetRepo := stringutil.SanitizeForFilename(targetRepoForFilename) individualFilename := fmt.Sprintf("trials/%s-%s.%s.json", parsedSpec.WorkflowName, sanitizedTargetRepo, dateTimeID) if err := saveTrialResult(individualFilename, result, opts.Verbose); err != nil { fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to save individual trial result: %v", err))) @@ -537,7 +537,7 @@ func RunWorkflowTrials(ctx context.Context, workflowSpecs []string, opts TrialOp if len(parsedSpecs) > 1 { workflowNames := sliceutil.Map(parsedSpecs, func(spec *WorkflowSpec) string { return spec.WorkflowName }) workflowNamesStr := strings.Join(workflowNames, "-") - sanitizedTargetRepo := repoutil.SanitizeForFilename(targetRepoForFilename) + sanitizedTargetRepo := stringutil.SanitizeForFilename(targetRepoForFilename) combinedFilename := fmt.Sprintf("trials/%s-%s.%s.json", workflowNamesStr, sanitizedTargetRepo, dateTimeID) combinedResult := CombinedTrialResult{ WorkflowNames: workflowNames, @@ -906,7 +906,7 @@ func copyTrialResultsToHostRepo(tempDir, dateTimeID string, workflowNames []stri } // Copy individual workflow result files - sanitizedTargetRepo := repoutil.SanitizeForFilename(targetRepoSlug) + sanitizedTargetRepo := stringutil.SanitizeForFilename(targetRepoSlug) for _, workflowName := range workflowNames { sourceFile := fmt.Sprintf("trials/%s-%s.%s.json", workflowName, sanitizedTargetRepo, dateTimeID) destFile := filepath.Join(trialsDir, fmt.Sprintf("%s-%s.%s.json", workflowName, sanitizedTargetRepo, dateTimeID)) diff --git a/pkg/console/doc.go b/pkg/console/doc.go new file mode 100644 index 00000000000..45a5101151c --- /dev/null +++ b/pkg/console/doc.go @@ -0,0 +1,25 @@ +// Package console provides terminal UI components and formatting utilities for +// the gh-aw CLI. +// +// # Naming Convention: Format* vs Render* +// +// Functions in this package follow a consistent naming convention: +// +// - Format* functions return a formatted string for a single item or message. +// They are pure string transformations with no side effects. +// Examples: FormatSuccessMessage, FormatErrorMessage, FormatFileSize, +// FormatCommandMessage, FormatProgressMessage. +// +// - Render* functions produce multi-element or structured output (tables, boxes, +// trees, structs). They may return strings, slices of strings, or write +// directly to output. They are used when the output requires layout or +// structural composition. +// Examples: RenderTable, RenderStruct, RenderTitleBox, RenderErrorBox, +// RenderInfoSection, RenderTree, RenderComposedSections. +// +// # Output Routing +// +// All diagnostic output (messages, warnings, errors) should be written to stderr. +// Structured data output (JSON, hashes, graphs) should be written to stdout. +// Use fmt.Fprintln(os.Stderr, ...) with the Format* helpers for diagnostic output. +package console diff --git a/pkg/constants/constants.go b/pkg/constants/constants.go index 3fdc63aa531..0d59b25032c 100644 --- a/pkg/constants/constants.go +++ b/pkg/constants/constants.go @@ -21,6 +21,14 @@ const CLIExtensionPrefix CommandPrefix = "gh aw" // - Easy refactoring: Can change implementation without affecting API // // See scratchpad/go-type-patterns.md for detailed guidance on type patterns. +// +// # Intentional Method Duplication +// +// Several string-based types below define identical String() and IsValid() method bodies. +// This duplication is intentional: Go does not allow shared method sets for distinct named +// types, so each type must define its own methods. The bodies are deliberately simple and +// unlikely to diverge. Code-generation approaches (go:generate) were considered but add +// more complexity than the duplication itself. // LineLength represents a line length in characters for expression formatting. // This semantic type distinguishes line lengths from arbitrary integers, diff --git a/pkg/repoutil/repoutil.go b/pkg/repoutil/repoutil.go index 304743f6268..ddd7ed882e0 100644 --- a/pkg/repoutil/repoutil.go +++ b/pkg/repoutil/repoutil.go @@ -22,12 +22,3 @@ func SplitRepoSlug(slug string) (owner, repo string, err error) { log.Printf("Split result: owner=%s, repo=%s", parts[0], parts[1]) return parts[0], parts[1], nil } - -// SanitizeForFilename converts a repository slug (owner/repo) to a filename-safe string. -// Replaces "/" with "-". Returns "clone-mode" if the slug is empty. -func SanitizeForFilename(slug string) string { - if slug == "" { - return "clone-mode" - } - return strings.ReplaceAll(slug, "/", "-") -} diff --git a/pkg/repoutil/repoutil_test.go b/pkg/repoutil/repoutil_test.go index 97b949d1f17..ecd663c5bad 100644 --- a/pkg/repoutil/repoutil_test.go +++ b/pkg/repoutil/repoutil_test.go @@ -70,44 +70,6 @@ func TestSplitRepoSlug(t *testing.T) { } } -func TestSanitizeForFilename(t *testing.T) { - tests := []struct { - name string - slug string - expected string - }{ - { - name: "normal slug", - slug: "github/gh-aw", - expected: "github-gh-aw", - }, - { - name: "empty slug", - slug: "", - expected: "clone-mode", - }, - { - name: "slug with multiple slashes", - slug: "owner/repo/extra", - expected: "owner-repo-extra", - }, - { - name: "slug with hyphen", - slug: "owner/my-repo", - expected: "owner-my-repo", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := SanitizeForFilename(tt.slug) - if result != tt.expected { - t.Errorf("SanitizeForFilename(%q) = %q; want %q", tt.slug, result, tt.expected) - } - }) - } -} - func BenchmarkSplitRepoSlug(b *testing.B) { slug := "github/gh-aw" for b.Loop() { @@ -115,13 +77,6 @@ func BenchmarkSplitRepoSlug(b *testing.B) { } } -func BenchmarkSanitizeForFilename(b *testing.B) { - slug := "github/gh-aw" - for b.Loop() { - _ = SanitizeForFilename(slug) - } -} - // Additional edge case tests func TestSplitRepoSlug_Whitespace(t *testing.T) { @@ -230,49 +185,6 @@ func TestSplitRepoSlug_SpecialCharacters(t *testing.T) { } } -func TestSanitizeForFilename_SpecialCases(t *testing.T) { - tests := []struct { - name string - slug string - expected string - }{ - { - name: "multiple slashes", - slug: "owner/repo/extra", - expected: "owner-repo-extra", - }, - { - name: "leading slash", - slug: "/owner/repo", - expected: "-owner-repo", - }, - { - name: "trailing slash", - slug: "owner/repo/", - expected: "owner-repo-", - }, - { - name: "only slashes", - slug: "///", - expected: "---", - }, - { - name: "single character owner and repo", - slug: "a/b", - expected: "a-b", - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := SanitizeForFilename(tt.slug) - if result != tt.expected { - t.Errorf("SanitizeForFilename(%q) = %q; want %q", tt.slug, result, tt.expected) - } - }) - } -} - func TestSplitRepoSlug_Idempotent(t *testing.T) { // Test that splitting and rejoining gives the same result slugs := []string{ diff --git a/pkg/stringutil/sanitize.go b/pkg/stringutil/sanitize.go index c09d9f59a31..8d4f1fd042d 100644 --- a/pkg/stringutil/sanitize.go +++ b/pkg/stringutil/sanitize.go @@ -179,3 +179,17 @@ func SanitizeToolID(toolID string) string { return cleaned } + +// SanitizeForFilename converts a repository slug (owner/repo) to a filename-safe string. +// Replaces "/" with "-". Returns "clone-mode" if the slug is empty. +// +// Examples: +// +// SanitizeForFilename("owner/repo") // returns "owner-repo" +// SanitizeForFilename("") // returns "clone-mode" +func SanitizeForFilename(slug string) string { + if slug == "" { + return "clone-mode" + } + return strings.ReplaceAll(slug, "/", "-") +} diff --git a/pkg/stringutil/sanitize_test.go b/pkg/stringutil/sanitize_test.go index 93ad1db4d89..c469f6fedf5 100644 --- a/pkg/stringutil/sanitize_test.go +++ b/pkg/stringutil/sanitize_test.go @@ -548,3 +548,73 @@ func BenchmarkSanitizeToolID(b *testing.B) { SanitizeToolID(toolID) } } + +func TestSanitizeForFilename(t *testing.T) { + tests := []struct { + name string + slug string + expected string + }{ + { + name: "normal slug", + slug: "github/gh-aw", + expected: "github-gh-aw", + }, + { + name: "empty slug", + slug: "", + expected: "clone-mode", + }, + { + name: "slug with multiple slashes", + slug: "owner/repo/extra", + expected: "owner-repo-extra", + }, + { + name: "slug with hyphen", + slug: "owner/my-repo", + expected: "owner-my-repo", + }, + { + name: "multiple slashes", + slug: "owner/repo/extra", + expected: "owner-repo-extra", + }, + { + name: "leading slash", + slug: "/owner/repo", + expected: "-owner-repo", + }, + { + name: "trailing slash", + slug: "owner/repo/", + expected: "owner-repo-", + }, + { + name: "only slashes", + slug: "///", + expected: "---", + }, + { + name: "single character owner and repo", + slug: "a/b", + expected: "a-b", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := SanitizeForFilename(tt.slug) + if result != tt.expected { + t.Errorf("SanitizeForFilename(%q) = %q; want %q", tt.slug, result, tt.expected) + } + }) + } +} + +func BenchmarkSanitizeForFilename(b *testing.B) { + slug := "github/gh-aw" + for b.Loop() { + SanitizeForFilename(slug) + } +} diff --git a/pkg/workflow/agent_validation.go b/pkg/workflow/agent_validation.go index 6f78af5be58..cfd2eed6298 100644 --- a/pkg/workflow/agent_validation.go +++ b/pkg/workflow/agent_validation.go @@ -49,11 +49,10 @@ import ( "path/filepath" "github.com/github/gh-aw/pkg/console" - "github.com/github/gh-aw/pkg/logger" "github.com/goccy/go-yaml" ) -var agentValidationLog = logger.New("workflow:agent_validation") +var agentValidationLog = newValidationLogger("agent") // validateAgentFile validates that the custom agent file specified in imports exists func (c *Compiler) validateAgentFile(workflowData *WorkflowData, markdownPath string) error { diff --git a/pkg/workflow/compiler_filters_validation.go b/pkg/workflow/compiler_filters_validation.go index 9bf85a22b4a..74af2a64e14 100644 --- a/pkg/workflow/compiler_filters_validation.go +++ b/pkg/workflow/compiler_filters_validation.go @@ -34,11 +34,9 @@ package workflow import ( "fmt" - - "github.com/github/gh-aw/pkg/logger" ) -var filterValidationLog = logger.New("workflow:filter_validation") +var filterValidationLog = newValidationLogger("filter") // ValidateEventFilters checks for GitHub Actions filter mutual exclusivity rules func ValidateEventFilters(frontmatter map[string]any) error { diff --git a/pkg/workflow/concurrency_validation.go b/pkg/workflow/concurrency_validation.go index 10d6602c806..423307e38aa 100644 --- a/pkg/workflow/concurrency_validation.go +++ b/pkg/workflow/concurrency_validation.go @@ -31,11 +31,9 @@ import ( "fmt" "regexp" "strings" - - "github.com/github/gh-aw/pkg/logger" ) -var concurrencyValidationLog = logger.New("workflow:concurrency_validation") +var concurrencyValidationLog = newValidationLogger("concurrency") // validateConcurrencyGroupExpression validates the syntax of a custom concurrency group expression. // It checks for common syntactic errors that would cause runtime failures: diff --git a/pkg/workflow/dangerous_permissions_validation.go b/pkg/workflow/dangerous_permissions_validation.go index ac456ce9df5..d21981c580a 100644 --- a/pkg/workflow/dangerous_permissions_validation.go +++ b/pkg/workflow/dangerous_permissions_validation.go @@ -3,11 +3,9 @@ package workflow import ( "fmt" "strings" - - "github.com/github/gh-aw/pkg/logger" ) -var dangerousPermissionsLog = logger.New("workflow:dangerous_permissions_validation") +var dangerousPermissionsLog = newValidationLogger("dangerous_permissions") // validateDangerousPermissions validates that write permissions are not used. // diff --git a/pkg/workflow/dispatch_workflow_validation.go b/pkg/workflow/dispatch_workflow_validation.go index 1eed55365d5..a68b98c6d05 100644 --- a/pkg/workflow/dispatch_workflow_validation.go +++ b/pkg/workflow/dispatch_workflow_validation.go @@ -8,11 +8,10 @@ import ( "strings" "github.com/github/gh-aw/pkg/fileutil" - "github.com/github/gh-aw/pkg/logger" "github.com/goccy/go-yaml" ) -var dispatchWorkflowValidationLog = logger.New("workflow:dispatch_workflow_validation") +var dispatchWorkflowValidationLog = newValidationLogger("dispatch_workflow") // validateDispatchWorkflow validates that the dispatch-workflow configuration is correct func (c *Compiler) validateDispatchWorkflow(data *WorkflowData, workflowPath string) error { diff --git a/pkg/workflow/docker_validation.go b/pkg/workflow/docker_validation.go index 09bddf6677d..17673110687 100644 --- a/pkg/workflow/docker_validation.go +++ b/pkg/workflow/docker_validation.go @@ -44,10 +44,9 @@ import ( "time" "github.com/github/gh-aw/pkg/constants" - "github.com/github/gh-aw/pkg/logger" ) -var dockerValidationLog = logger.New("workflow:docker_validation") +var dockerValidationLog = newValidationLogger("docker") // dockerDaemonCheckTimeout is how long to wait for `docker info` to respond. // If the daemon isn't running, this prevents long hangs on every docker command. diff --git a/pkg/workflow/engine_validation.go b/pkg/workflow/engine_validation.go index c210779f474..005c13905dc 100644 --- a/pkg/workflow/engine_validation.go +++ b/pkg/workflow/engine_validation.go @@ -39,11 +39,10 @@ import ( "strings" "github.com/github/gh-aw/pkg/constants" - "github.com/github/gh-aw/pkg/logger" "github.com/github/gh-aw/pkg/parser" ) -var engineValidationLog = logger.New("workflow:engine_validation") +var engineValidationLog = newValidationLogger("engine") // validateEngine validates that the given engine ID is supported func (c *Compiler) validateEngine(engineID string) error { diff --git a/pkg/workflow/error_aggregation.go b/pkg/workflow/error_aggregation.go deleted file mode 100644 index 576af8c3038..00000000000 --- a/pkg/workflow/error_aggregation.go +++ /dev/null @@ -1,140 +0,0 @@ -// This file provides error aggregation utilities for validation. -// -// # Error Aggregation -// -// This file implements error collection and aggregation for validation -// functions, allowing users to see all validation errors in a single run -// instead of discovering them one at a time. -// -// # Error Aggregation Functions -// -// - NewErrorCollector() - Creates a new error collector -// - ErrorCollector.Add() - Adds an error to the collection -// - ErrorCollector.HasErrors() - Checks if any errors were collected -// - ErrorCollector.Error() - Returns aggregated error using errors.Join -// - ErrorCollector.Count() - Returns the number of collected errors -// -// # Usage Pattern -// -// Use error collectors in validation functions to collect multiple errors: -// -// func validateMultipleThings(config Config, failFast bool) error { -// collector := NewErrorCollector(failFast) -// -// if err := validateThing1(config); err != nil { -// if returnErr := collector.Add(err); returnErr != nil { -// return returnErr // Fail-fast mode -// } -// } -// -// if err := validateThing2(config); err != nil { -// if returnErr := collector.Add(err); returnErr != nil { -// return returnErr // Fail-fast mode -// } -// } -// -// return collector.Error() -// } -// -// # Fail-Fast Mode -// -// When failFast is true, the collector returns immediately on the first error. -// When false, it collects all errors and returns them joined with errors.Join. - -package workflow - -import ( - "errors" - "fmt" - "strings" - - "github.com/github/gh-aw/pkg/logger" -) - -var errorAggregationLog = logger.New("workflow:error_aggregation") - -// ErrorCollector collects multiple validation errors -type ErrorCollector struct { - errors []error - failFast bool -} - -// NewErrorCollector creates a new error collector -// If failFast is true, the collector will stop at the first error -func NewErrorCollector(failFast bool) *ErrorCollector { - errorAggregationLog.Printf("Creating error collector: fail_fast=%v", failFast) - return &ErrorCollector{ - errors: make([]error, 0), - failFast: failFast, - } -} - -// Add adds an error to the collector -// If failFast is enabled, returns the error immediately -// Otherwise, adds it to the collection and returns nil -func (c *ErrorCollector) Add(err error) error { - if err == nil { - return nil - } - - errorAggregationLog.Printf("Adding error to collector: %v", err) - - if c.failFast { - errorAggregationLog.Print("Fail-fast enabled, returning error immediately") - return err - } - - c.errors = append(c.errors, err) - return nil -} - -// HasErrors returns true if any errors have been collected -func (c *ErrorCollector) HasErrors() bool { - return len(c.errors) > 0 -} - -// Count returns the number of errors collected -func (c *ErrorCollector) Count() int { - return len(c.errors) -} - -// Error returns the aggregated error using errors.Join -// Returns nil if no errors were collected -func (c *ErrorCollector) Error() error { - if len(c.errors) == 0 { - return nil - } - - errorAggregationLog.Printf("Aggregating %d errors", len(c.errors)) - - if len(c.errors) == 1 { - return c.errors[0] - } - - return errors.Join(c.errors...) -} - -// FormattedError returns the aggregated error with a formatted header showing the count -// Returns nil if no errors were collected -// This method is preferred over Error() + FormatAggregatedError for better accuracy -func (c *ErrorCollector) FormattedError(category string) error { - if len(c.errors) == 0 { - return nil - } - - errorAggregationLog.Printf("Formatting %d errors for category: %s", len(c.errors), category) - - if len(c.errors) == 1 { - return c.errors[0] - } - - // Build formatted error with count header - var sb strings.Builder - fmt.Fprintf(&sb, "Found %d %s errors:", len(c.errors), category) - for _, err := range c.errors { - sb.WriteString("\n • ") - sb.WriteString(err.Error()) - } - - return fmt.Errorf("%s", sb.String()) -} diff --git a/pkg/workflow/error_helpers.go b/pkg/workflow/error_helpers.go deleted file mode 100644 index f124753dde2..00000000000 --- a/pkg/workflow/error_helpers.go +++ /dev/null @@ -1,162 +0,0 @@ -package workflow - -import ( - "fmt" - "strings" - "time" - - "github.com/github/gh-aw/pkg/logger" -) - -var errorHelpersLog = logger.New("workflow:error_helpers") - -// WorkflowValidationError represents an error that occurred during input validation -type WorkflowValidationError struct { - Field string - Value string - Reason string - Suggestion string - Timestamp time.Time -} - -// Error implements the error interface -func (e *WorkflowValidationError) Error() string { - var b strings.Builder - - fmt.Fprintf(&b, "[%s] Validation failed for field '%s'", - e.Timestamp.Format(time.RFC3339), e.Field) - - if e.Value != "" { - // Truncate long values - truncatedValue := e.Value - if len(truncatedValue) > 100 { - truncatedValue = truncatedValue[:97] + "..." - } - fmt.Fprintf(&b, "\n\nValue: %s", truncatedValue) - } - - fmt.Fprintf(&b, "\nReason: %s", e.Reason) - - if e.Suggestion != "" { - fmt.Fprintf(&b, "\nSuggestion: %s", e.Suggestion) - } - - return b.String() -} - -// NewValidationError creates a new validation error with context -func NewValidationError(field, value, reason, suggestion string) *WorkflowValidationError { - errorHelpersLog.Printf("Creating validation error: field=%s, reason=%s", field, reason) - return &WorkflowValidationError{ - Field: field, - Value: value, - Reason: reason, - Suggestion: suggestion, - Timestamp: time.Now(), - } -} - -// OperationError represents an error that occurred during an operation -type OperationError struct { - Operation string - EntityType string - EntityID string - Cause error - Suggestion string - Timestamp time.Time -} - -// Error implements the error interface -func (e *OperationError) Error() string { - var b strings.Builder - - fmt.Fprintf(&b, "[%s] Failed to %s %s", - e.Timestamp.Format(time.RFC3339), e.Operation, e.EntityType) - - if e.EntityID != "" { - fmt.Fprintf(&b, " #%s", e.EntityID) - } - - if e.Cause != nil { - fmt.Fprintf(&b, "\n\nUnderlying error: %v", e.Cause) - } - - if e.Suggestion != "" { - fmt.Fprintf(&b, "\nSuggestion: %s", e.Suggestion) - } else { - // Provide default suggestion - fmt.Fprintf(&b, "\nSuggestion: Check that the %s exists and you have the necessary permissions.", e.EntityType) - } - - return b.String() -} - -// Unwrap returns the underlying error -func (e *OperationError) Unwrap() error { - return e.Cause -} - -// NewOperationError creates a new operation error with context -func NewOperationError(operation, entityType, entityID string, cause error, suggestion string) *OperationError { - if errorHelpersLog.Enabled() { - errorHelpersLog.Printf("Creating operation error: operation=%s, entityType=%s, entityID=%s, cause=%v", - operation, entityType, entityID, cause) - } - return &OperationError{ - Operation: operation, - EntityType: entityType, - EntityID: entityID, - Cause: cause, - Suggestion: suggestion, - Timestamp: time.Now(), - } -} - -// ConfigurationError represents an error in safe-outputs configuration -type ConfigurationError struct { - ConfigKey string - Value string - Reason string - Suggestion string - Timestamp time.Time -} - -// Error implements the error interface -func (e *ConfigurationError) Error() string { - var b strings.Builder - - fmt.Fprintf(&b, "[%s] Configuration error in '%s'", - e.Timestamp.Format(time.RFC3339), e.ConfigKey) - - if e.Value != "" { - // Truncate long values - truncatedValue := e.Value - if len(truncatedValue) > 100 { - truncatedValue = truncatedValue[:97] + "..." - } - fmt.Fprintf(&b, "\n\nValue: %s", truncatedValue) - } - - fmt.Fprintf(&b, "\nReason: %s", e.Reason) - - if e.Suggestion != "" { - fmt.Fprintf(&b, "\nSuggestion: %s", e.Suggestion) - } else { - // Provide default suggestion - fmt.Fprintf(&b, "\nSuggestion: Check the safe-outputs configuration in your workflow frontmatter and ensure '%s' is correctly specified.", e.ConfigKey) - } - - return b.String() -} - -// NewConfigurationError creates a new configuration error with context -func NewConfigurationError(configKey, value, reason, suggestion string) *ConfigurationError { - errorHelpersLog.Printf("Creating configuration error: configKey=%s, reason=%s", configKey, reason) - return &ConfigurationError{ - ConfigKey: configKey, - Value: value, - Reason: reason, - Suggestion: suggestion, - Timestamp: time.Now(), - } -} diff --git a/pkg/workflow/expression_validation.go b/pkg/workflow/expression_validation.go index 3827b2f6e5c..590ca8379ad 100644 --- a/pkg/workflow/expression_validation.go +++ b/pkg/workflow/expression_validation.go @@ -52,11 +52,10 @@ import ( "strings" "github.com/github/gh-aw/pkg/constants" - "github.com/github/gh-aw/pkg/logger" "github.com/github/gh-aw/pkg/parser" ) -var expressionValidationLog = logger.New("workflow:expression_validation") +var expressionValidationLog = newValidationLogger("expression") // maxFuzzyMatchSuggestions is the maximum number of similar expressions to suggest // when an unauthorized expression is found diff --git a/pkg/workflow/features_validation.go b/pkg/workflow/features_validation.go index 9f074dbe133..f82bfb06e3f 100644 --- a/pkg/workflow/features_validation.go +++ b/pkg/workflow/features_validation.go @@ -25,11 +25,9 @@ package workflow import ( "fmt" "regexp" - - "github.com/github/gh-aw/pkg/logger" ) -var featuresValidationLog = logger.New("workflow:features_validation") +var featuresValidationLog = newValidationLogger("features") var shaRegex = regexp.MustCompile("^[0-9a-f]{40}$") diff --git a/pkg/workflow/firewall_validation.go b/pkg/workflow/firewall_validation.go index d41e78c8416..333146a9055 100644 --- a/pkg/workflow/firewall_validation.go +++ b/pkg/workflow/firewall_validation.go @@ -13,11 +13,9 @@ package workflow import ( "fmt" "slices" - - "github.com/github/gh-aw/pkg/logger" ) -var firewallValidationLog = logger.New("workflow:firewall_validation") +var firewallValidationLog = newValidationLogger("firewall") // validateFirewallConfig validates firewall configuration including log-level func (c *Compiler) validateFirewallConfig(workflowData *WorkflowData) error { diff --git a/pkg/workflow/imported_steps_validation.go b/pkg/workflow/imported_steps_validation.go index 1677a94c46d..dc1acd73992 100644 --- a/pkg/workflow/imported_steps_validation.go +++ b/pkg/workflow/imported_steps_validation.go @@ -31,10 +31,9 @@ import ( "github.com/goccy/go-yaml" "github.com/github/gh-aw/pkg/console" - "github.com/github/gh-aw/pkg/logger" ) -var importedStepsValidationLog = logger.New("workflow:imported_steps_validation") +var importedStepsValidationLog = newValidationLogger("imported_steps") // validateImportedStepsNoAgenticSecrets validates that engine steps don't use agentic engine secrets // This validation is now a no-op since custom engine support has been removed. diff --git a/pkg/workflow/labels_validation.go b/pkg/workflow/labels_validation.go index 9331d73087e..682dc94eae3 100644 --- a/pkg/workflow/labels_validation.go +++ b/pkg/workflow/labels_validation.go @@ -3,11 +3,9 @@ package workflow import ( "fmt" "strings" - - "github.com/github/gh-aw/pkg/logger" ) -var labelsValidationLog = logger.New("workflow:labels_validation") +var labelsValidationLog = newValidationLogger("labels") // validateLabels validates the labels field in the workflow frontmatter. // It checks that: diff --git a/pkg/workflow/mcp_config_validation.go b/pkg/workflow/mcp_config_validation.go index 04f375bcaca..833b85e35e2 100644 --- a/pkg/workflow/mcp_config_validation.go +++ b/pkg/workflow/mcp_config_validation.go @@ -50,11 +50,10 @@ import ( "strings" "github.com/github/gh-aw/pkg/constants" - "github.com/github/gh-aw/pkg/logger" "github.com/github/gh-aw/pkg/parser" ) -var mcpValidationLog = logger.New("workflow:mcp_config_validation") +var mcpValidationLog = newValidationLogger("mcp_config") // ValidateMCPConfigs validates all MCP configurations in the tools section using JSON schema func ValidateMCPConfigs(tools map[string]any) error { diff --git a/pkg/workflow/network_firewall_validation.go b/pkg/workflow/network_firewall_validation.go index 0ba6453fb98..f4a67a6d6be 100644 --- a/pkg/workflow/network_firewall_validation.go +++ b/pkg/workflow/network_firewall_validation.go @@ -11,10 +11,9 @@ package workflow import ( "github.com/github/gh-aw/pkg/constants" - "github.com/github/gh-aw/pkg/logger" ) -var networkFirewallValidationLog = logger.New("workflow:network_firewall_validation") +var networkFirewallValidationLog = newValidationLogger("network_firewall") // validateNetworkFirewallConfig validates network firewall configuration dependencies // Returns an error if the configuration is invalid diff --git a/pkg/workflow/npm_validation.go b/pkg/workflow/npm_validation.go index 59ba7e31b00..c5559842f15 100644 --- a/pkg/workflow/npm_validation.go +++ b/pkg/workflow/npm_validation.go @@ -40,10 +40,9 @@ import ( "strings" "github.com/github/gh-aw/pkg/console" - "github.com/github/gh-aw/pkg/logger" ) -var npmValidationLog = logger.New("workflow:npm_validation") +var npmValidationLog = newValidationLogger("npm") // validateNpxPackages validates that npx packages are available on npm registry func (c *Compiler) validateNpxPackages(workflowData *WorkflowData) error { diff --git a/pkg/workflow/permissions_validation.go b/pkg/workflow/permissions_validation.go index b62d39e49c8..6a02d3f7eb0 100644 --- a/pkg/workflow/permissions_validation.go +++ b/pkg/workflow/permissions_validation.go @@ -9,10 +9,9 @@ import ( "strings" "github.com/github/gh-aw/pkg/constants" - "github.com/github/gh-aw/pkg/logger" ) -var permissionsValidationLog = logger.New("workflow:permissions_validation") +var permissionsValidationLog = newValidationLogger("permissions") //go:embed data/github_toolsets_permissions.json var githubToolsetsPermissionsJSON []byte diff --git a/pkg/workflow/pip_validation.go b/pkg/workflow/pip_validation.go index 1003e9f6fd6..f33f37e56a9 100644 --- a/pkg/workflow/pip_validation.go +++ b/pkg/workflow/pip_validation.go @@ -43,10 +43,9 @@ import ( "strings" "github.com/github/gh-aw/pkg/console" - "github.com/github/gh-aw/pkg/logger" ) -var pipValidationLog = logger.New("workflow:pip_validation") +var pipValidationLog = newValidationLogger("pip") // validatePythonPackagesWithPip is a generic helper that validates Python packages using pip index. // It accepts a package list, package type name for error messaging, and pip command to use. diff --git a/pkg/workflow/repository_features_validation.go b/pkg/workflow/repository_features_validation.go index 8798e2cc2cf..f361cb089d5 100644 --- a/pkg/workflow/repository_features_validation.go +++ b/pkg/workflow/repository_features_validation.go @@ -49,10 +49,9 @@ import ( "github.com/cli/go-gh/v2/pkg/api" "github.com/cli/go-gh/v2/pkg/repository" "github.com/github/gh-aw/pkg/console" - "github.com/github/gh-aw/pkg/logger" ) -var repositoryFeaturesLog = logger.New("workflow:repository_features_validation") +var repositoryFeaturesLog = newValidationLogger("repository_features") // RepositoryFeatures holds cached information about repository capabilities type RepositoryFeatures struct { diff --git a/pkg/workflow/runs_on_validation.go b/pkg/workflow/runs_on_validation.go index f3369d8dd1f..2b972e102ec 100644 --- a/pkg/workflow/runs_on_validation.go +++ b/pkg/workflow/runs_on_validation.go @@ -25,11 +25,9 @@ package workflow import ( "fmt" "strings" - - "github.com/github/gh-aw/pkg/logger" ) -var runsOnValidationLog = logger.New("workflow:runs_on_validation") +var runsOnValidationLog = newValidationLogger("runs_on") // macOSRunnerFAQURL is the URL to the FAQ entry explaining why macOS runners are not supported. const macOSRunnerFAQURL = "https://github.github.com/gh-aw/reference/faq/#why-are-macos-runners-not-supported" diff --git a/pkg/workflow/runtime_validation.go b/pkg/workflow/runtime_validation.go index 6106a52c6b3..95c3507d5d1 100644 --- a/pkg/workflow/runtime_validation.go +++ b/pkg/workflow/runtime_validation.go @@ -46,10 +46,9 @@ import ( "strings" "github.com/github/gh-aw/pkg/console" - "github.com/github/gh-aw/pkg/logger" ) -var runtimeValidationLog = logger.New("workflow:runtime_validation") +var runtimeValidationLog = newValidationLogger("runtime") // validateExpressionSizes validates that no expression values in the generated YAML exceed GitHub Actions limits func (c *Compiler) validateExpressionSizes(yamlContent string) error { diff --git a/pkg/workflow/safe_outputs.go b/pkg/workflow/safe_outputs.go deleted file mode 100644 index 60c69f70596..00000000000 --- a/pkg/workflow/safe_outputs.go +++ /dev/null @@ -1,8 +0,0 @@ -package workflow - -// This file serves as documentation for the safe_outputs_* module organization. -// The safe_outputs functionality has been split into multiple focused files: -// - safe_outputs_config.go: Configuration parsing and validation -// - safe_outputs_steps.go: Step builders for GitHub Script and custom actions -// - safe_outputs_env.go: Environment variable helpers -// - safe_outputs_jobs.go: Job assembly and orchestration diff --git a/pkg/workflow/safe_outputs_app.go b/pkg/workflow/safe_outputs_app.go deleted file mode 100644 index c3272272380..00000000000 --- a/pkg/workflow/safe_outputs_app.go +++ /dev/null @@ -1,333 +0,0 @@ -package workflow - -import ( - "encoding/json" - "fmt" - "sort" - - "github.com/github/gh-aw/pkg/logger" -) - -var safeOutputsAppLog = logger.New("workflow:safe_outputs_app") - -// ======================================== -// GitHub App Configuration -// ======================================== - -// GitHubAppConfig holds configuration for GitHub App-based token minting -type GitHubAppConfig struct { - AppID string `yaml:"app-id,omitempty"` // GitHub App ID (e.g., "${{ vars.APP_ID }}") - PrivateKey string `yaml:"private-key,omitempty"` // GitHub App private key (e.g., "${{ secrets.APP_PRIVATE_KEY }}") - Owner string `yaml:"owner,omitempty"` // Optional: owner of the GitHub App installation (defaults to current repository owner) - Repositories []string `yaml:"repositories,omitempty"` // Optional: comma or newline-separated list of repositories to grant access to -} - -// ======================================== -// App Configuration Parsing -// ======================================== - -// parseAppConfig parses the app configuration from a map -func parseAppConfig(appMap map[string]any) *GitHubAppConfig { - safeOutputsAppLog.Print("Parsing GitHub App configuration") - appConfig := &GitHubAppConfig{} - - // Parse app-id (required) - if appID, exists := appMap["app-id"]; exists { - if appIDStr, ok := appID.(string); ok { - appConfig.AppID = appIDStr - } - } - - // Parse private-key (required) - if privateKey, exists := appMap["private-key"]; exists { - if privateKeyStr, ok := privateKey.(string); ok { - appConfig.PrivateKey = privateKeyStr - } - } - - // Parse owner (optional) - if owner, exists := appMap["owner"]; exists { - if ownerStr, ok := owner.(string); ok { - appConfig.Owner = ownerStr - } - } - - // Parse repositories (optional) - if repos, exists := appMap["repositories"]; exists { - if reposArray, ok := repos.([]any); ok { - var repoStrings []string - for _, repo := range reposArray { - if repoStr, ok := repo.(string); ok { - repoStrings = append(repoStrings, repoStr) - } - } - appConfig.Repositories = repoStrings - } - } - - return appConfig -} - -// ======================================== -// App Configuration Merging -// ======================================== - -// mergeAppFromIncludedConfigs merges app configuration from included safe-outputs configurations -// If the top-level workflow has an app configured, it takes precedence -// Otherwise, the first app configuration found in included configs is used -func (c *Compiler) mergeAppFromIncludedConfigs(topSafeOutputs *SafeOutputsConfig, includedConfigs []string) (*GitHubAppConfig, error) { - safeOutputsAppLog.Printf("Merging app configuration: included_configs=%d", len(includedConfigs)) - // If top-level workflow already has app configured, use it (no merge needed) - if topSafeOutputs != nil && topSafeOutputs.GitHubApp != nil { - safeOutputsAppLog.Print("Using top-level app configuration") - return topSafeOutputs.GitHubApp, nil - } - - // Otherwise, find the first app configuration in included configs - for _, configJSON := range includedConfigs { - if configJSON == "" || configJSON == "{}" { - continue - } - - // Parse the safe-outputs configuration - var safeOutputsConfig map[string]any - if err := json.Unmarshal([]byte(configJSON), &safeOutputsConfig); err != nil { - continue // Skip invalid JSON - } - - // Extract app from the safe-outputs.github-app field - if appData, exists := safeOutputsConfig["github-app"]; exists { - if appMap, ok := appData.(map[string]any); ok { - appConfig := parseAppConfig(appMap) - - // Return first valid app configuration found - if appConfig.AppID != "" && appConfig.PrivateKey != "" { - safeOutputsAppLog.Print("Found valid app configuration in included config") - return appConfig, nil - } - } - } - } - - safeOutputsAppLog.Print("No app configuration found in included configs") - return nil, nil -} - -// ======================================== -// GitHub App Token Steps Generation -// ======================================== - -// buildGitHubAppTokenMintStep generates the step to mint a GitHub App installation access token -// Permissions are automatically computed from the safe output job requirements -func (c *Compiler) buildGitHubAppTokenMintStep(app *GitHubAppConfig, permissions *Permissions) []string { - safeOutputsAppLog.Printf("Building GitHub App token mint step: owner=%s, repos=%d", app.Owner, len(app.Repositories)) - var steps []string - - steps = append(steps, " - name: Generate GitHub App token\n") - steps = append(steps, " id: safe-outputs-app-token\n") - steps = append(steps, fmt.Sprintf(" uses: %s\n", GetActionPin("actions/create-github-app-token"))) - steps = append(steps, " with:\n") - steps = append(steps, fmt.Sprintf(" app-id: %s\n", app.AppID)) - steps = append(steps, fmt.Sprintf(" private-key: %s\n", app.PrivateKey)) - - // Add owner - default to current repository owner if not specified - owner := app.Owner - if owner == "" { - owner = "${{ github.repository_owner }}" - } - steps = append(steps, fmt.Sprintf(" owner: %s\n", owner)) - - // Add repositories - behavior depends on configuration: - // - If repositories is ["*"], omit the field to allow org-wide access - // - If repositories is a single value, use inline format - // - If repositories has multiple values, use block scalar format (newline-separated) - // to ensure clarity and proper parsing by actions/create-github-app-token - // - If repositories is empty/not specified, default to current repository - if len(app.Repositories) == 1 && app.Repositories[0] == "*" { - // Org-wide access: omit repositories field entirely - safeOutputsAppLog.Print("Using org-wide GitHub App token (repositories: *)") - } else if len(app.Repositories) == 1 { - // Single repository: use inline format for clarity - steps = append(steps, fmt.Sprintf(" repositories: %s\n", app.Repositories[0])) - } else if len(app.Repositories) > 1 { - // Multiple repositories: use block scalar format (newline-separated) - // This format is more readable and avoids potential issues with comma-separated parsing - steps = append(steps, " repositories: |-\n") - for _, repo := range app.Repositories { - steps = append(steps, fmt.Sprintf(" %s\n", repo)) - } - } else { - // Extract repo name from github.repository (which is "owner/repo") - // Using GitHub Actions expression: split(github.repository, '/')[1] - steps = append(steps, " repositories: ${{ github.event.repository.name }}\n") - } - - // Always add github-api-url from environment variable - steps = append(steps, " github-api-url: ${{ github.api_url }}\n") - - // Add permission-* fields automatically computed from job permissions - // Sort keys to ensure deterministic compilation order - if permissions != nil { - permissionFields := convertPermissionsToAppTokenFields(permissions) - - // Extract and sort keys for deterministic ordering - keys := make([]string, 0, len(permissionFields)) - for key := range permissionFields { - keys = append(keys, key) - } - sort.Strings(keys) - - // Add permissions in sorted order - for _, key := range keys { - steps = append(steps, fmt.Sprintf(" %s: %s\n", key, permissionFields[key])) - } - } - - return steps -} - -// convertPermissionsToAppTokenFields converts job Permissions to permission-* action inputs -// This follows GitHub's recommendation for explicit permission control -// Note: This only includes permissions that are valid for GitHub App tokens. -// Some GitHub Actions permissions (like 'discussions', 'models') don't have -// corresponding GitHub App permissions and are skipped. -func convertPermissionsToAppTokenFields(permissions *Permissions) map[string]string { - fields := make(map[string]string) - - // Map GitHub Actions permissions to GitHub App permissions - // Only include permissions that exist in the actions/create-github-app-token action - // See: https://github.com/actions/create-github-app-token#permissions - - // Repository permissions that map directly - if level, ok := permissions.Get(PermissionActions); ok { - fields["permission-actions"] = string(level) - } - if level, ok := permissions.Get(PermissionChecks); ok { - fields["permission-checks"] = string(level) - } - if level, ok := permissions.Get(PermissionContents); ok { - fields["permission-contents"] = string(level) - } - if level, ok := permissions.Get(PermissionDeployments); ok { - fields["permission-deployments"] = string(level) - } - if level, ok := permissions.Get(PermissionIssues); ok { - fields["permission-issues"] = string(level) - } - if level, ok := permissions.Get(PermissionPackages); ok { - fields["permission-packages"] = string(level) - } - if level, ok := permissions.Get(PermissionPages); ok { - fields["permission-pages"] = string(level) - } - if level, ok := permissions.Get(PermissionPullRequests); ok { - fields["permission-pull-requests"] = string(level) - } - if level, ok := permissions.Get(PermissionSecurityEvents); ok { - fields["permission-security-events"] = string(level) - } - if level, ok := permissions.Get(PermissionStatuses); ok { - fields["permission-statuses"] = string(level) - } - if level, ok := permissions.Get(PermissionOrganizationProj); ok { - fields["permission-organization-projects"] = string(level) - } - if level, ok := permissions.Get(PermissionDiscussions); ok { - fields["permission-discussions"] = string(level) - } - - // Note: The following GitHub Actions permissions do NOT have GitHub App equivalents: - // - models (no GitHub App permission for this) - // - id-token (not applicable to GitHub Apps) - // - attestations (no GitHub App permission for this) - // - repository-projects (removed - classic projects are sunset; use organization-projects for Projects v2 via PAT/GitHub App) - - return fields -} - -// buildGitHubAppTokenInvalidationStep generates the step to invalidate the GitHub App token -// This step always runs (even on failure) to ensure tokens are properly cleaned up -// Only runs if a token was successfully minted -func (c *Compiler) buildGitHubAppTokenInvalidationStep() []string { - var steps []string - - steps = append(steps, " - name: Invalidate GitHub App token\n") - steps = append(steps, " if: always() && steps.safe-outputs-app-token.outputs.token != ''\n") - steps = append(steps, " env:\n") - steps = append(steps, " TOKEN: ${{ steps.safe-outputs-app-token.outputs.token }}\n") - steps = append(steps, " run: |\n") - steps = append(steps, " echo \"Revoking GitHub App installation token...\"\n") - steps = append(steps, " # GitHub CLI will auth with the token being revoked.\n") - steps = append(steps, " gh api \\\n") - steps = append(steps, " --method DELETE \\\n") - steps = append(steps, " -H \"Authorization: token $TOKEN\" \\\n") - steps = append(steps, " /installation/token || echo \"Token revoke may already be expired.\"\n") - steps = append(steps, " \n") - steps = append(steps, " echo \"Token invalidation step complete.\"\n") - - return steps -} - -// ======================================== -// Activation Token Steps Generation -// ======================================== - -// buildActivationAppTokenMintStep generates the step to mint a GitHub App installation access token -// for use in the pre-activation (reaction) and activation (status comment) jobs. -func (c *Compiler) buildActivationAppTokenMintStep(app *GitHubAppConfig, permissions *Permissions) []string { - safeOutputsAppLog.Printf("Building activation GitHub App token mint step: owner=%s", app.Owner) - var steps []string - - steps = append(steps, " - name: Generate GitHub App token for activation\n") - steps = append(steps, " id: activation-app-token\n") - steps = append(steps, fmt.Sprintf(" uses: %s\n", GetActionPin("actions/create-github-app-token"))) - steps = append(steps, " with:\n") - steps = append(steps, fmt.Sprintf(" app-id: %s\n", app.AppID)) - steps = append(steps, fmt.Sprintf(" private-key: %s\n", app.PrivateKey)) - - // Add owner - default to current repository owner if not specified - owner := app.Owner - if owner == "" { - owner = "${{ github.repository_owner }}" - } - steps = append(steps, fmt.Sprintf(" owner: %s\n", owner)) - - // Default to current repository - steps = append(steps, " repositories: ${{ github.event.repository.name }}\n") - - // Always add github-api-url from environment variable - steps = append(steps, " github-api-url: ${{ github.api_url }}\n") - - // Add permission-* fields automatically computed from job permissions - if permissions != nil { - permissionFields := convertPermissionsToAppTokenFields(permissions) - - keys := make([]string, 0, len(permissionFields)) - for key := range permissionFields { - keys = append(keys, key) - } - sort.Strings(keys) - - for _, key := range keys { - steps = append(steps, fmt.Sprintf(" %s: %s\n", key, permissionFields[key])) - } - } - - return steps -} - -// resolveActivationToken returns the GitHub token to use for activation steps (reactions, status comments). -// Priority: GitHub App minted token > custom github-token > GITHUB_TOKEN (default) -// -// When returning the app token reference, callers MUST ensure that buildActivationAppTokenMintStep -// has already been called to generate the 'activation-app-token' step, since this function returns -// a reference to that step's output (${{ steps.activation-app-token.outputs.token }}). -func (c *Compiler) resolveActivationToken(data *WorkflowData) string { - if data.ActivationGitHubApp != nil { - return "${{ steps.activation-app-token.outputs.token }}" - } - if data.ActivationGitHubToken != "" { - return data.ActivationGitHubToken - } - return "${{ secrets.GITHUB_TOKEN }}" -} diff --git a/pkg/workflow/safe_outputs_config.go b/pkg/workflow/safe_outputs_config.go index a044372e3c6..d4a9007989f 100644 --- a/pkg/workflow/safe_outputs_config.go +++ b/pkg/workflow/safe_outputs_config.go @@ -1,6 +1,9 @@ package workflow import ( + "encoding/json" + "fmt" + "sort" "strings" "github.com/github/gh-aw/pkg/logger" @@ -581,3 +584,472 @@ func (c *Compiler) parseBaseSafeOutputConfig(configMap map[string]any, config *B } } } + +var safeOutputsAppLog = logger.New("workflow:safe_outputs_app") + +// ======================================== +// GitHub App Configuration +// ======================================== + +// GitHubAppConfig holds configuration for GitHub App-based token minting +type GitHubAppConfig struct { + AppID string `yaml:"app-id,omitempty"` // GitHub App ID (e.g., "${{ vars.APP_ID }}") + PrivateKey string `yaml:"private-key,omitempty"` // GitHub App private key (e.g., "${{ secrets.APP_PRIVATE_KEY }}") + Owner string `yaml:"owner,omitempty"` // Optional: owner of the GitHub App installation (defaults to current repository owner) + Repositories []string `yaml:"repositories,omitempty"` // Optional: comma or newline-separated list of repositories to grant access to +} + +// ======================================== +// App Configuration Parsing +// ======================================== + +// parseAppConfig parses the app configuration from a map +func parseAppConfig(appMap map[string]any) *GitHubAppConfig { + safeOutputsAppLog.Print("Parsing GitHub App configuration") + appConfig := &GitHubAppConfig{} + + // Parse app-id (required) + if appID, exists := appMap["app-id"]; exists { + if appIDStr, ok := appID.(string); ok { + appConfig.AppID = appIDStr + } + } + + // Parse private-key (required) + if privateKey, exists := appMap["private-key"]; exists { + if privateKeyStr, ok := privateKey.(string); ok { + appConfig.PrivateKey = privateKeyStr + } + } + + // Parse owner (optional) + if owner, exists := appMap["owner"]; exists { + if ownerStr, ok := owner.(string); ok { + appConfig.Owner = ownerStr + } + } + + // Parse repositories (optional) + if repos, exists := appMap["repositories"]; exists { + if reposArray, ok := repos.([]any); ok { + var repoStrings []string + for _, repo := range reposArray { + if repoStr, ok := repo.(string); ok { + repoStrings = append(repoStrings, repoStr) + } + } + appConfig.Repositories = repoStrings + } + } + + return appConfig +} + +// ======================================== +// App Configuration Merging +// ======================================== + +// mergeAppFromIncludedConfigs merges app configuration from included safe-outputs configurations +// If the top-level workflow has an app configured, it takes precedence +// Otherwise, the first app configuration found in included configs is used +func (c *Compiler) mergeAppFromIncludedConfigs(topSafeOutputs *SafeOutputsConfig, includedConfigs []string) (*GitHubAppConfig, error) { + safeOutputsAppLog.Printf("Merging app configuration: included_configs=%d", len(includedConfigs)) + // If top-level workflow already has app configured, use it (no merge needed) + if topSafeOutputs != nil && topSafeOutputs.GitHubApp != nil { + safeOutputsAppLog.Print("Using top-level app configuration") + return topSafeOutputs.GitHubApp, nil + } + + // Otherwise, find the first app configuration in included configs + for _, configJSON := range includedConfigs { + if configJSON == "" || configJSON == "{}" { + continue + } + + // Parse the safe-outputs configuration + var safeOutputsConfig map[string]any + if err := json.Unmarshal([]byte(configJSON), &safeOutputsConfig); err != nil { + continue // Skip invalid JSON + } + + // Extract app from the safe-outputs.github-app field + if appData, exists := safeOutputsConfig["github-app"]; exists { + if appMap, ok := appData.(map[string]any); ok { + appConfig := parseAppConfig(appMap) + + // Return first valid app configuration found + if appConfig.AppID != "" && appConfig.PrivateKey != "" { + safeOutputsAppLog.Print("Found valid app configuration in included config") + return appConfig, nil + } + } + } + } + + safeOutputsAppLog.Print("No app configuration found in included configs") + return nil, nil +} + +// ======================================== +// GitHub App Token Steps Generation +// ======================================== + +// buildGitHubAppTokenMintStep generates the step to mint a GitHub App installation access token +// Permissions are automatically computed from the safe output job requirements +func (c *Compiler) buildGitHubAppTokenMintStep(app *GitHubAppConfig, permissions *Permissions) []string { + safeOutputsAppLog.Printf("Building GitHub App token mint step: owner=%s, repos=%d", app.Owner, len(app.Repositories)) + var steps []string + + steps = append(steps, " - name: Generate GitHub App token\n") + steps = append(steps, " id: safe-outputs-app-token\n") + steps = append(steps, fmt.Sprintf(" uses: %s\n", GetActionPin("actions/create-github-app-token"))) + steps = append(steps, " with:\n") + steps = append(steps, fmt.Sprintf(" app-id: %s\n", app.AppID)) + steps = append(steps, fmt.Sprintf(" private-key: %s\n", app.PrivateKey)) + + // Add owner - default to current repository owner if not specified + owner := app.Owner + if owner == "" { + owner = "${{ github.repository_owner }}" + } + steps = append(steps, fmt.Sprintf(" owner: %s\n", owner)) + + // Add repositories - behavior depends on configuration: + // - If repositories is ["*"], omit the field to allow org-wide access + // - If repositories is a single value, use inline format + // - If repositories has multiple values, use block scalar format (newline-separated) + // to ensure clarity and proper parsing by actions/create-github-app-token + // - If repositories is empty/not specified, default to current repository + if len(app.Repositories) == 1 && app.Repositories[0] == "*" { + // Org-wide access: omit repositories field entirely + safeOutputsAppLog.Print("Using org-wide GitHub App token (repositories: *)") + } else if len(app.Repositories) == 1 { + // Single repository: use inline format for clarity + steps = append(steps, fmt.Sprintf(" repositories: %s\n", app.Repositories[0])) + } else if len(app.Repositories) > 1 { + // Multiple repositories: use block scalar format (newline-separated) + // This format is more readable and avoids potential issues with comma-separated parsing + steps = append(steps, " repositories: |-\n") + for _, repo := range app.Repositories { + steps = append(steps, fmt.Sprintf(" %s\n", repo)) + } + } else { + // Extract repo name from github.repository (which is "owner/repo") + // Using GitHub Actions expression: split(github.repository, '/')[1] + steps = append(steps, " repositories: ${{ github.event.repository.name }}\n") + } + + // Always add github-api-url from environment variable + steps = append(steps, " github-api-url: ${{ github.api_url }}\n") + + // Add permission-* fields automatically computed from job permissions + // Sort keys to ensure deterministic compilation order + if permissions != nil { + permissionFields := convertPermissionsToAppTokenFields(permissions) + + // Extract and sort keys for deterministic ordering + keys := make([]string, 0, len(permissionFields)) + for key := range permissionFields { + keys = append(keys, key) + } + sort.Strings(keys) + + // Add permissions in sorted order + for _, key := range keys { + steps = append(steps, fmt.Sprintf(" %s: %s\n", key, permissionFields[key])) + } + } + + return steps +} + +// convertPermissionsToAppTokenFields converts job Permissions to permission-* action inputs +// This follows GitHub's recommendation for explicit permission control +// Note: This only includes permissions that are valid for GitHub App tokens. +// Some GitHub Actions permissions (like 'discussions', 'models') don't have +// corresponding GitHub App permissions and are skipped. +func convertPermissionsToAppTokenFields(permissions *Permissions) map[string]string { + fields := make(map[string]string) + + // Map GitHub Actions permissions to GitHub App permissions + // Only include permissions that exist in the actions/create-github-app-token action + // See: https://github.com/actions/create-github-app-token#permissions + + // Repository permissions that map directly + if level, ok := permissions.Get(PermissionActions); ok { + fields["permission-actions"] = string(level) + } + if level, ok := permissions.Get(PermissionChecks); ok { + fields["permission-checks"] = string(level) + } + if level, ok := permissions.Get(PermissionContents); ok { + fields["permission-contents"] = string(level) + } + if level, ok := permissions.Get(PermissionDeployments); ok { + fields["permission-deployments"] = string(level) + } + if level, ok := permissions.Get(PermissionIssues); ok { + fields["permission-issues"] = string(level) + } + if level, ok := permissions.Get(PermissionPackages); ok { + fields["permission-packages"] = string(level) + } + if level, ok := permissions.Get(PermissionPages); ok { + fields["permission-pages"] = string(level) + } + if level, ok := permissions.Get(PermissionPullRequests); ok { + fields["permission-pull-requests"] = string(level) + } + if level, ok := permissions.Get(PermissionSecurityEvents); ok { + fields["permission-security-events"] = string(level) + } + if level, ok := permissions.Get(PermissionStatuses); ok { + fields["permission-statuses"] = string(level) + } + if level, ok := permissions.Get(PermissionOrganizationProj); ok { + fields["permission-organization-projects"] = string(level) + } + if level, ok := permissions.Get(PermissionDiscussions); ok { + fields["permission-discussions"] = string(level) + } + + // Note: The following GitHub Actions permissions do NOT have GitHub App equivalents: + // - models (no GitHub App permission for this) + // - id-token (not applicable to GitHub Apps) + // - attestations (no GitHub App permission for this) + // - repository-projects (removed - classic projects are sunset; use organization-projects for Projects v2 via PAT/GitHub App) + + return fields +} + +// buildGitHubAppTokenInvalidationStep generates the step to invalidate the GitHub App token +// This step always runs (even on failure) to ensure tokens are properly cleaned up +// Only runs if a token was successfully minted +func (c *Compiler) buildGitHubAppTokenInvalidationStep() []string { + var steps []string + + steps = append(steps, " - name: Invalidate GitHub App token\n") + steps = append(steps, " if: always() && steps.safe-outputs-app-token.outputs.token != ''\n") + steps = append(steps, " env:\n") + steps = append(steps, " TOKEN: ${{ steps.safe-outputs-app-token.outputs.token }}\n") + steps = append(steps, " run: |\n") + steps = append(steps, " echo \"Revoking GitHub App installation token...\"\n") + steps = append(steps, " # GitHub CLI will auth with the token being revoked.\n") + steps = append(steps, " gh api \\\n") + steps = append(steps, " --method DELETE \\\n") + steps = append(steps, " -H \"Authorization: token $TOKEN\" \\\n") + steps = append(steps, " /installation/token || echo \"Token revoke may already be expired.\"\n") + steps = append(steps, " \n") + steps = append(steps, " echo \"Token invalidation step complete.\"\n") + + return steps +} + +// ======================================== +// Activation Token Steps Generation +// ======================================== + +// buildActivationAppTokenMintStep generates the step to mint a GitHub App installation access token +// for use in the pre-activation (reaction) and activation (status comment) jobs. +func (c *Compiler) buildActivationAppTokenMintStep(app *GitHubAppConfig, permissions *Permissions) []string { + safeOutputsAppLog.Printf("Building activation GitHub App token mint step: owner=%s", app.Owner) + var steps []string + + steps = append(steps, " - name: Generate GitHub App token for activation\n") + steps = append(steps, " id: activation-app-token\n") + steps = append(steps, fmt.Sprintf(" uses: %s\n", GetActionPin("actions/create-github-app-token"))) + steps = append(steps, " with:\n") + steps = append(steps, fmt.Sprintf(" app-id: %s\n", app.AppID)) + steps = append(steps, fmt.Sprintf(" private-key: %s\n", app.PrivateKey)) + + // Add owner - default to current repository owner if not specified + owner := app.Owner + if owner == "" { + owner = "${{ github.repository_owner }}" + } + steps = append(steps, fmt.Sprintf(" owner: %s\n", owner)) + + // Default to current repository + steps = append(steps, " repositories: ${{ github.event.repository.name }}\n") + + // Always add github-api-url from environment variable + steps = append(steps, " github-api-url: ${{ github.api_url }}\n") + + // Add permission-* fields automatically computed from job permissions + if permissions != nil { + permissionFields := convertPermissionsToAppTokenFields(permissions) + + keys := make([]string, 0, len(permissionFields)) + for key := range permissionFields { + keys = append(keys, key) + } + sort.Strings(keys) + + for _, key := range keys { + steps = append(steps, fmt.Sprintf(" %s: %s\n", key, permissionFields[key])) + } + } + + return steps +} + +// resolveActivationToken returns the GitHub token to use for activation steps (reactions, status comments). +// Priority: GitHub App minted token > custom github-token > GITHUB_TOKEN (default) +// +// When returning the app token reference, callers MUST ensure that buildActivationAppTokenMintStep +// has already been called to generate the 'activation-app-token' step, since this function returns +// a reference to that step's output (${{ steps.activation-app-token.outputs.token }}). +func (c *Compiler) resolveActivationToken(data *WorkflowData) string { + if data.ActivationGitHubApp != nil { + return "${{ steps.activation-app-token.outputs.token }}" + } + if data.ActivationGitHubToken != "" { + return data.ActivationGitHubToken + } + return "${{ secrets.GITHUB_TOKEN }}" +} + +var safeOutputMessagesLog = logger.New("workflow:safe_outputs_config_messages") + +// ======================================== +// Safe Output Messages Configuration +// ======================================== + +// setStringFromMap reads m[key] and assigns its string value to *dest if found. +func setStringFromMap(m map[string]any, key string, dest *string) { + if val, exists := m[key]; exists { + if str, ok := val.(string); ok { + *dest = str + } + } +} + +// parseMessagesConfig parses the messages configuration from safe-outputs frontmatter +func parseMessagesConfig(messagesMap map[string]any) *SafeOutputMessagesConfig { + safeOutputMessagesLog.Printf("Parsing messages configuration with %d fields", len(messagesMap)) + config := &SafeOutputMessagesConfig{} + + if appendOnly, exists := messagesMap["append-only-comments"]; exists { + if appendOnlyBool, ok := appendOnly.(bool); ok { + config.AppendOnlyComments = appendOnlyBool + safeOutputMessagesLog.Printf("Set append-only-comments: %t", appendOnlyBool) + } + } + + setStringFromMap(messagesMap, "footer", &config.Footer) + setStringFromMap(messagesMap, "footer-install", &config.FooterInstall) + setStringFromMap(messagesMap, "footer-workflow-recompile", &config.FooterWorkflowRecompile) + setStringFromMap(messagesMap, "footer-workflow-recompile-comment", &config.FooterWorkflowRecompileComment) + setStringFromMap(messagesMap, "staged-title", &config.StagedTitle) + setStringFromMap(messagesMap, "staged-description", &config.StagedDescription) + setStringFromMap(messagesMap, "run-started", &config.RunStarted) + setStringFromMap(messagesMap, "run-success", &config.RunSuccess) + setStringFromMap(messagesMap, "run-failure", &config.RunFailure) + setStringFromMap(messagesMap, "detection-failure", &config.DetectionFailure) + setStringFromMap(messagesMap, "pull-request-created", &config.PullRequestCreated) + setStringFromMap(messagesMap, "issue-created", &config.IssueCreated) + setStringFromMap(messagesMap, "commit-pushed", &config.CommitPushed) + setStringFromMap(messagesMap, "agent-failure-issue", &config.AgentFailureIssue) + setStringFromMap(messagesMap, "agent-failure-comment", &config.AgentFailureComment) + + return config +} + +// parseMentionsConfig parses the mentions configuration from safe-outputs frontmatter +// Mentions can be: +// - false: always escapes mentions +// - true: always allows mentions (error in strict mode) +// - object: detailed configuration with allow-team-members, allow-context, allowed, max +func parseMentionsConfig(mentions any) *MentionsConfig { + safeOutputMessagesLog.Printf("Parsing mentions configuration: type=%T", mentions) + config := &MentionsConfig{} + + // Handle boolean value + if boolVal, ok := mentions.(bool); ok { + config.Enabled = &boolVal + safeOutputMessagesLog.Printf("Mentions configured as boolean: %t", boolVal) + return config + } + + // Handle object configuration + if mentionsMap, ok := mentions.(map[string]any); ok { + // Parse allow-team-members + if allowTeamMembers, exists := mentionsMap["allow-team-members"]; exists { + if val, ok := allowTeamMembers.(bool); ok { + config.AllowTeamMembers = &val + } + } + + // Parse allow-context + if allowContext, exists := mentionsMap["allow-context"]; exists { + if val, ok := allowContext.(bool); ok { + config.AllowContext = &val + } + } + + // Parse allowed list + if allowed, exists := mentionsMap["allowed"]; exists { + if allowedArray, ok := allowed.([]any); ok { + var allowedStrings []string + for _, item := range allowedArray { + if str, ok := item.(string); ok { + // Normalize username by removing '@' prefix if present + normalized := str + if len(str) > 0 && str[0] == '@' { + normalized = str[1:] + safeOutputMessagesLog.Printf("Normalized mention '%s' to '%s'", str, normalized) + } + allowedStrings = append(allowedStrings, normalized) + } + } + config.Allowed = allowedStrings + } + } + + // Parse max + if maxVal, exists := mentionsMap["max"]; exists { + switch v := maxVal.(type) { + case int: + if v >= 1 { + config.Max = &v + } + case int64: + intVal := int(v) + if intVal >= 1 { + config.Max = &intVal + } + case uint64: + intVal := int(v) + if intVal >= 1 { + config.Max = &intVal + } + case float64: + intVal := int(v) + // Warn if truncation occurs + if v != float64(intVal) { + safeOutputsConfigLog.Printf("mentions.max: float value %.2f truncated to integer %d", v, intVal) + } + if intVal >= 1 { + config.Max = &intVal + } + } + } + } + + return config +} + +// serializeMessagesConfig converts SafeOutputMessagesConfig to JSON for passing as environment variable +func serializeMessagesConfig(messages *SafeOutputMessagesConfig) (string, error) { + if messages == nil { + return "", nil + } + safeOutputMessagesLog.Print("Serializing messages configuration to JSON") + jsonBytes, err := json.Marshal(messages) + if err != nil { + safeOutputMessagesLog.Printf("Failed to serialize messages config: %v", err) + return "", fmt.Errorf("failed to serialize messages config: %w", err) + } + safeOutputMessagesLog.Printf("Serialized messages config: %d bytes", len(jsonBytes)) + return string(jsonBytes), nil +} diff --git a/pkg/workflow/safe_outputs_config_generation.go b/pkg/workflow/safe_outputs_config_generation.go deleted file mode 100644 index f0eadfd7b5b..00000000000 --- a/pkg/workflow/safe_outputs_config_generation.go +++ /dev/null @@ -1,599 +0,0 @@ -package workflow - -import ( - "encoding/json" - "fmt" - "sort" - "strconv" - "strings" -) - -// ======================================== -// Safe Output Configuration Generation -// ======================================== - -// generateCustomJobToolDefinition creates an MCP tool definition for a custom safe-output job -// Returns a map representing the tool definition in MCP format with name, description, and inputSchema -func generateCustomJobToolDefinition(jobName string, jobConfig *SafeJobConfig) map[string]any { - safeOutputsConfigLog.Printf("Generating tool definition for custom job: %s", jobName) - - // Build the tool definition - tool := map[string]any{ - "name": jobName, - } - - // Add description if present - if jobConfig.Description != "" { - tool["description"] = jobConfig.Description - } else { - // Provide a default description if none is specified - tool["description"] = fmt.Sprintf("Execute the %s custom job", jobName) - } - - // Build the input schema - inputSchema := map[string]any{ - "type": "object", - "properties": make(map[string]any), - } - - // Track required fields - var requiredFields []string - - // Add each input to the schema - if len(jobConfig.Inputs) > 0 { - properties := inputSchema["properties"].(map[string]any) - - for inputName, inputDef := range jobConfig.Inputs { - property := map[string]any{} - - // Add description - if inputDef.Description != "" { - property["description"] = inputDef.Description - } - - // Convert type to JSON Schema type - switch inputDef.Type { - case "choice": - // Choice inputs are strings with enum constraints - property["type"] = "string" - if len(inputDef.Options) > 0 { - property["enum"] = inputDef.Options - } - case "boolean": - property["type"] = "boolean" - case "number": - property["type"] = "number" - case "string", "": - // Default to string if type is not specified - property["type"] = "string" - default: - // For any unknown type, default to string - property["type"] = "string" - } - - // Add default value if present - if inputDef.Default != nil { - property["default"] = inputDef.Default - } - - // Track required fields - if inputDef.Required { - requiredFields = append(requiredFields, inputName) - } - - properties[inputName] = property - } - } - - // Add required fields array if any inputs are required - if len(requiredFields) > 0 { - sort.Strings(requiredFields) - inputSchema["required"] = requiredFields - } - - // Prevent additional properties to maintain schema strictness - inputSchema["additionalProperties"] = false - - tool["inputSchema"] = inputSchema - - safeOutputsConfigLog.Printf("Generated tool definition for %s with %d inputs, %d required", - jobName, len(jobConfig.Inputs), len(requiredFields)) - - return tool -} - -func populateDispatchWorkflowFiles(data *WorkflowData, markdownPath string) { - if data.SafeOutputs == nil || data.SafeOutputs.DispatchWorkflow == nil { - return - } - - if len(data.SafeOutputs.DispatchWorkflow.Workflows) == 0 { - return - } - - safeOutputsConfigLog.Printf("Populating workflow files for %d dispatch workflows", len(data.SafeOutputs.DispatchWorkflow.Workflows)) - - // Initialize WorkflowFiles map if not already initialized - if data.SafeOutputs.DispatchWorkflow.WorkflowFiles == nil { - data.SafeOutputs.DispatchWorkflow.WorkflowFiles = make(map[string]string) - } - - for _, workflowName := range data.SafeOutputs.DispatchWorkflow.Workflows { - // Find the workflow file - fileResult, err := findWorkflowFile(workflowName, markdownPath) - if err != nil { - safeOutputsConfigLog.Printf("Warning: error finding workflow %s: %v", workflowName, err) - continue - } - - // Determine which file to use - priority: .lock.yml > .yml - var extension string - if fileResult.lockExists { - extension = ".lock.yml" - } else if fileResult.ymlExists { - extension = ".yml" - } else { - safeOutputsConfigLog.Printf("Warning: workflow file not found for %s (only .md exists, needs compilation)", workflowName) - continue - } - - // Store the file extension for runtime use - data.SafeOutputs.DispatchWorkflow.WorkflowFiles[workflowName] = extension - safeOutputsConfigLog.Printf("Mapped workflow %s to extension %s", workflowName, extension) - } -} - -func generateSafeOutputsConfig(data *WorkflowData) string { - // Pass the safe-outputs configuration for validation - if data.SafeOutputs == nil { - safeOutputsConfigLog.Print("No safe outputs configuration found, returning empty config") - return "" - } - safeOutputsConfigLog.Print("Generating safe outputs configuration for workflow") - // Create a simplified config object for validation - safeOutputsConfig := make(map[string]any) - - // Handle safe-outputs configuration if present - if data.SafeOutputs != nil { - if data.SafeOutputs.CreateIssues != nil { - config := generateMaxWithAllowedLabelsConfig( - data.SafeOutputs.CreateIssues.Max, - 1, // default max - data.SafeOutputs.CreateIssues.AllowedLabels, - ) - // Add group flag if enabled - if data.SafeOutputs.CreateIssues.Group != nil && *data.SafeOutputs.CreateIssues.Group == "true" { - config["group"] = true - } - // Add expires value if set (0 means explicitly disabled or not set) - if data.SafeOutputs.CreateIssues.Expires > 0 { - config["expires"] = data.SafeOutputs.CreateIssues.Expires - } - safeOutputsConfig["create_issue"] = config - } - if data.SafeOutputs.CreateAgentSessions != nil { - safeOutputsConfig["create_agent_session"] = generateMaxConfig( - data.SafeOutputs.CreateAgentSessions.Max, - 1, // default max - ) - } - if data.SafeOutputs.AddComments != nil { - additionalFields := make(map[string]any) - // Note: AddCommentsConfig has Target, TargetRepoSlug, AllowedRepos but not embedded SafeOutputTargetConfig - // So we need to construct the target config manually - targetConfig := SafeOutputTargetConfig{ - Target: data.SafeOutputs.AddComments.Target, - TargetRepoSlug: data.SafeOutputs.AddComments.TargetRepoSlug, - AllowedRepos: data.SafeOutputs.AddComments.AllowedRepos, - } - safeOutputsConfig["add_comment"] = generateTargetConfigWithRepos( - targetConfig, - data.SafeOutputs.AddComments.Max, - 1, // default max - additionalFields, - ) - } - if data.SafeOutputs.CreateDiscussions != nil { - config := generateMaxWithAllowedLabelsConfig( - data.SafeOutputs.CreateDiscussions.Max, - 1, // default max - data.SafeOutputs.CreateDiscussions.AllowedLabels, - ) - // Add expires value if set (0 means explicitly disabled or not set) - if data.SafeOutputs.CreateDiscussions.Expires > 0 { - config["expires"] = data.SafeOutputs.CreateDiscussions.Expires - } - safeOutputsConfig["create_discussion"] = config - } - if data.SafeOutputs.CloseDiscussions != nil { - safeOutputsConfig["close_discussion"] = generateMaxWithDiscussionFieldsConfig( - data.SafeOutputs.CloseDiscussions.Max, - 1, // default max - data.SafeOutputs.CloseDiscussions.RequiredCategory, - data.SafeOutputs.CloseDiscussions.RequiredLabels, - data.SafeOutputs.CloseDiscussions.RequiredTitlePrefix, - ) - } - if data.SafeOutputs.CloseIssues != nil { - additionalFields := make(map[string]any) - if len(data.SafeOutputs.CloseIssues.RequiredLabels) > 0 { - additionalFields["required_labels"] = data.SafeOutputs.CloseIssues.RequiredLabels - } - if data.SafeOutputs.CloseIssues.RequiredTitlePrefix != "" { - additionalFields["required_title_prefix"] = data.SafeOutputs.CloseIssues.RequiredTitlePrefix - } - safeOutputsConfig["close_issue"] = generateTargetConfigWithRepos( - data.SafeOutputs.CloseIssues.SafeOutputTargetConfig, - data.SafeOutputs.CloseIssues.Max, - 1, // default max - additionalFields, - ) - } - if data.SafeOutputs.CreatePullRequests != nil { - safeOutputsConfig["create_pull_request"] = generatePullRequestConfig( - data.SafeOutputs.CreatePullRequests, - 1, // default max - ) - } - if data.SafeOutputs.CreatePullRequestReviewComments != nil { - safeOutputsConfig["create_pull_request_review_comment"] = generateMaxConfig( - data.SafeOutputs.CreatePullRequestReviewComments.Max, - 10, // default max - ) - } - if data.SafeOutputs.SubmitPullRequestReview != nil { - safeOutputsConfig["submit_pull_request_review"] = generateMaxConfig( - data.SafeOutputs.SubmitPullRequestReview.Max, - 1, // default max - ) - } - if data.SafeOutputs.ResolvePullRequestReviewThread != nil { - safeOutputsConfig["resolve_pull_request_review_thread"] = generateMaxConfig( - data.SafeOutputs.ResolvePullRequestReviewThread.Max, - 10, // default max - ) - } - if data.SafeOutputs.CreateCodeScanningAlerts != nil { - safeOutputsConfig["create_code_scanning_alert"] = generateMaxConfig( - data.SafeOutputs.CreateCodeScanningAlerts.Max, - 0, // default: unlimited - ) - } - if data.SafeOutputs.AutofixCodeScanningAlert != nil { - safeOutputsConfig["autofix_code_scanning_alert"] = generateMaxConfig( - data.SafeOutputs.AutofixCodeScanningAlert.Max, - 10, // default max - ) - } - if data.SafeOutputs.AddLabels != nil { - additionalFields := make(map[string]any) - if len(data.SafeOutputs.AddLabels.Allowed) > 0 { - additionalFields["allowed"] = data.SafeOutputs.AddLabels.Allowed - } - if len(data.SafeOutputs.AddLabels.Blocked) > 0 { - additionalFields["blocked"] = data.SafeOutputs.AddLabels.Blocked - } - safeOutputsConfig["add_labels"] = generateTargetConfigWithRepos( - data.SafeOutputs.AddLabels.SafeOutputTargetConfig, - data.SafeOutputs.AddLabels.Max, - 3, // default max - additionalFields, - ) - } - if data.SafeOutputs.RemoveLabels != nil { - safeOutputsConfig["remove_labels"] = generateMaxWithAllowedConfig( - data.SafeOutputs.RemoveLabels.Max, - 3, // default max - data.SafeOutputs.RemoveLabels.Allowed, - ) - } - if data.SafeOutputs.AddReviewer != nil { - safeOutputsConfig["add_reviewer"] = generateMaxWithReviewersConfig( - data.SafeOutputs.AddReviewer.Max, - 3, // default max - data.SafeOutputs.AddReviewer.Reviewers, - ) - } - if data.SafeOutputs.AssignMilestone != nil { - safeOutputsConfig["assign_milestone"] = generateMaxWithAllowedConfig( - data.SafeOutputs.AssignMilestone.Max, - 1, // default max - data.SafeOutputs.AssignMilestone.Allowed, - ) - } - if data.SafeOutputs.AssignToAgent != nil { - safeOutputsConfig["assign_to_agent"] = generateAssignToAgentConfig( - data.SafeOutputs.AssignToAgent.Max, - 1, // default max - data.SafeOutputs.AssignToAgent.DefaultAgent, - data.SafeOutputs.AssignToAgent.Target, - data.SafeOutputs.AssignToAgent.Allowed, - ) - } - if data.SafeOutputs.AssignToUser != nil { - safeOutputsConfig["assign_to_user"] = generateMaxWithAllowedAndBlockedConfig( - data.SafeOutputs.AssignToUser.Max, - 1, // default max - data.SafeOutputs.AssignToUser.Allowed, - data.SafeOutputs.AssignToUser.Blocked, - ) - } - if data.SafeOutputs.UnassignFromUser != nil { - safeOutputsConfig["unassign_from_user"] = generateMaxWithAllowedAndBlockedConfig( - data.SafeOutputs.UnassignFromUser.Max, - 1, // default max - data.SafeOutputs.UnassignFromUser.Allowed, - data.SafeOutputs.UnassignFromUser.Blocked, - ) - } - if data.SafeOutputs.UpdateIssues != nil { - safeOutputsConfig["update_issue"] = generateMaxConfig( - data.SafeOutputs.UpdateIssues.Max, - 1, // default max - ) - } - if data.SafeOutputs.UpdateDiscussions != nil { - safeOutputsConfig["update_discussion"] = generateMaxWithAllowedLabelsConfig( - data.SafeOutputs.UpdateDiscussions.Max, - 1, // default max - data.SafeOutputs.UpdateDiscussions.AllowedLabels, - ) - } - if data.SafeOutputs.UpdatePullRequests != nil { - safeOutputsConfig["update_pull_request"] = generateMaxConfig( - data.SafeOutputs.UpdatePullRequests.Max, - 1, // default max - ) - } - if data.SafeOutputs.MarkPullRequestAsReadyForReview != nil { - safeOutputsConfig["mark_pull_request_as_ready_for_review"] = generateMaxConfig( - data.SafeOutputs.MarkPullRequestAsReadyForReview.Max, - 10, // default max - ) - } - if data.SafeOutputs.PushToPullRequestBranch != nil { - safeOutputsConfig["push_to_pull_request_branch"] = generateMaxWithTargetConfig( - data.SafeOutputs.PushToPullRequestBranch.Max, - 0, // default: unlimited - data.SafeOutputs.PushToPullRequestBranch.Target, - ) - } - if data.SafeOutputs.UploadAssets != nil { - safeOutputsConfig["upload_asset"] = generateMaxConfig( - data.SafeOutputs.UploadAssets.Max, - 0, // default: unlimited - ) - } - if data.SafeOutputs.MissingTool != nil { - // Generate config for missing_tool with issue creation support - missingToolConfig := make(map[string]any) - - // Add max if set - if data.SafeOutputs.MissingTool.Max != nil { - missingToolConfig["max"] = resolveMaxForConfig(data.SafeOutputs.MissingTool.Max, 0) - } - - // Add issue creation config if enabled - if data.SafeOutputs.MissingTool.CreateIssue { - createIssueConfig := make(map[string]any) - createIssueConfig["max"] = 1 // Only create one issue per workflow run - - if data.SafeOutputs.MissingTool.TitlePrefix != "" { - createIssueConfig["title_prefix"] = data.SafeOutputs.MissingTool.TitlePrefix - } - - if len(data.SafeOutputs.MissingTool.Labels) > 0 { - createIssueConfig["labels"] = data.SafeOutputs.MissingTool.Labels - } - - safeOutputsConfig["create_missing_tool_issue"] = createIssueConfig - } - - safeOutputsConfig["missing_tool"] = missingToolConfig - } - if data.SafeOutputs.MissingData != nil { - // Generate config for missing_data with issue creation support - missingDataConfig := make(map[string]any) - - // Add max if set - if data.SafeOutputs.MissingData.Max != nil { - missingDataConfig["max"] = resolveMaxForConfig(data.SafeOutputs.MissingData.Max, 0) - } - - // Add issue creation config if enabled - if data.SafeOutputs.MissingData.CreateIssue { - createIssueConfig := make(map[string]any) - createIssueConfig["max"] = 1 // Only create one issue per workflow run - - if data.SafeOutputs.MissingData.TitlePrefix != "" { - createIssueConfig["title_prefix"] = data.SafeOutputs.MissingData.TitlePrefix - } - - if len(data.SafeOutputs.MissingData.Labels) > 0 { - createIssueConfig["labels"] = data.SafeOutputs.MissingData.Labels - } - - safeOutputsConfig["create_missing_data_issue"] = createIssueConfig - } - - safeOutputsConfig["missing_data"] = missingDataConfig - } - if data.SafeOutputs.UpdateProjects != nil { - safeOutputsConfig["update_project"] = generateMaxConfig( - data.SafeOutputs.UpdateProjects.Max, - 10, // default max - ) - } - if data.SafeOutputs.CreateProjectStatusUpdates != nil { - safeOutputsConfig["create_project_status_update"] = generateMaxConfig( - data.SafeOutputs.CreateProjectStatusUpdates.Max, - 10, // default max - ) - } - if data.SafeOutputs.CreateProjects != nil { - config := generateMaxConfig( - data.SafeOutputs.CreateProjects.Max, - 1, // default max - ) - // Add target-owner if specified - if data.SafeOutputs.CreateProjects.TargetOwner != "" { - config["target_owner"] = data.SafeOutputs.CreateProjects.TargetOwner - } - // Add title-prefix if specified - if data.SafeOutputs.CreateProjects.TitlePrefix != "" { - config["title_prefix"] = data.SafeOutputs.CreateProjects.TitlePrefix - } - safeOutputsConfig["create_project"] = config - } - if data.SafeOutputs.UpdateRelease != nil { - safeOutputsConfig["update_release"] = generateMaxConfig( - data.SafeOutputs.UpdateRelease.Max, - 1, // default max - ) - } - if data.SafeOutputs.LinkSubIssue != nil { - safeOutputsConfig["link_sub_issue"] = generateMaxConfig( - data.SafeOutputs.LinkSubIssue.Max, - 5, // default max - ) - } - if data.SafeOutputs.NoOp != nil { - safeOutputsConfig["noop"] = generateMaxConfig( - data.SafeOutputs.NoOp.Max, - 1, // default max - ) - } - if data.SafeOutputs.HideComment != nil { - safeOutputsConfig["hide_comment"] = generateHideCommentConfig( - data.SafeOutputs.HideComment.Max, - 5, // default max - data.SafeOutputs.HideComment.AllowedReasons, - ) - } - if data.SafeOutputs.SetIssueType != nil { - additionalFields := make(map[string]any) - if len(data.SafeOutputs.SetIssueType.Allowed) > 0 { - additionalFields["allowed"] = data.SafeOutputs.SetIssueType.Allowed - } - safeOutputsConfig["set_issue_type"] = generateTargetConfigWithRepos( - data.SafeOutputs.SetIssueType.SafeOutputTargetConfig, - data.SafeOutputs.SetIssueType.Max, - 5, // default max - additionalFields, - ) - } - } - - // Add safe-jobs configuration from SafeOutputs.Jobs - if len(data.SafeOutputs.Jobs) > 0 { - safeOutputsConfigLog.Printf("Processing %d safe job configurations", len(data.SafeOutputs.Jobs)) - for jobName, jobConfig := range data.SafeOutputs.Jobs { - safeOutputsConfigLog.Printf("Generating config for safe job: %s", jobName) - safeJobConfig := map[string]any{} - - // Add description if present - if jobConfig.Description != "" { - safeJobConfig["description"] = jobConfig.Description - } - - // Add output if present - if jobConfig.Output != "" { - safeJobConfig["output"] = jobConfig.Output - } - - // Add inputs information - if len(jobConfig.Inputs) > 0 { - inputsConfig := make(map[string]any) - for inputName, inputDef := range jobConfig.Inputs { - inputConfig := map[string]any{ - "type": inputDef.Type, - "description": inputDef.Description, - "required": inputDef.Required, - } - if inputDef.Default != "" { - inputConfig["default"] = inputDef.Default - } - if len(inputDef.Options) > 0 { - inputConfig["options"] = inputDef.Options - } - inputsConfig[inputName] = inputConfig - } - safeJobConfig["inputs"] = inputsConfig - } - - safeOutputsConfig[jobName] = safeJobConfig - } - } - - // Add mentions configuration - if data.SafeOutputs.Mentions != nil { - mentionsConfig := make(map[string]any) - - // Handle enabled flag (simple boolean mode) - if data.SafeOutputs.Mentions.Enabled != nil { - mentionsConfig["enabled"] = *data.SafeOutputs.Mentions.Enabled - } - - // Handle allow-team-members - if data.SafeOutputs.Mentions.AllowTeamMembers != nil { - mentionsConfig["allowTeamMembers"] = *data.SafeOutputs.Mentions.AllowTeamMembers - } - - // Handle allow-context - if data.SafeOutputs.Mentions.AllowContext != nil { - mentionsConfig["allowContext"] = *data.SafeOutputs.Mentions.AllowContext - } - - // Handle allowed list - if len(data.SafeOutputs.Mentions.Allowed) > 0 { - mentionsConfig["allowed"] = data.SafeOutputs.Mentions.Allowed - } - - // Handle max - if data.SafeOutputs.Mentions.Max != nil { - mentionsConfig["max"] = *data.SafeOutputs.Mentions.Max - } - - // Only add mentions config if it has any fields - if len(mentionsConfig) > 0 { - safeOutputsConfig["mentions"] = mentionsConfig - } - } - - // Add dispatch-workflow configuration - if data.SafeOutputs.DispatchWorkflow != nil { - dispatchWorkflowConfig := map[string]any{} - - // Include workflows list - if len(data.SafeOutputs.DispatchWorkflow.Workflows) > 0 { - dispatchWorkflowConfig["workflows"] = data.SafeOutputs.DispatchWorkflow.Workflows - } - - // Include workflow files mapping (file extension for each workflow) - if len(data.SafeOutputs.DispatchWorkflow.WorkflowFiles) > 0 { - dispatchWorkflowConfig["workflow_files"] = data.SafeOutputs.DispatchWorkflow.WorkflowFiles - } - - // Include max count - dispatchWorkflowConfig["max"] = resolveMaxForConfig(data.SafeOutputs.DispatchWorkflow.Max, 1) - - // Only add if it has fields - if len(dispatchWorkflowConfig) > 0 { - safeOutputsConfig["dispatch_workflow"] = dispatchWorkflowConfig - } - } - - // Add max-bot-mentions if set (templatable integer) - if data.SafeOutputs.MaxBotMentions != nil { - v := *data.SafeOutputs.MaxBotMentions - if n, err := strconv.Atoi(v); err == nil && n > 0 { - safeOutputsConfig["max_bot_mentions"] = n - } else if strings.HasPrefix(v, "${{") { - safeOutputsConfig["max_bot_mentions"] = v - } - } - - configJSON, _ := json.Marshal(safeOutputsConfig) - safeOutputsConfigLog.Printf("Safe outputs config generation complete: %d tool types configured", len(safeOutputsConfig)) - return string(configJSON) -} diff --git a/pkg/workflow/safe_outputs_config_generation_helpers.go b/pkg/workflow/safe_outputs_config_generation_helpers.go deleted file mode 100644 index 143259d614f..00000000000 --- a/pkg/workflow/safe_outputs_config_generation_helpers.go +++ /dev/null @@ -1,221 +0,0 @@ -package workflow - -import ( - "maps" - "strings" - - "github.com/github/gh-aw/pkg/logger" -) - -var safeOutputsConfigGenLog = logger.New("workflow:safe_outputs_config_generation_helpers") - -// ======================================== -// Safe Output Configuration Generation Helpers -// ======================================== -// -// This file contains helper functions to reduce duplication in safe output -// configuration generation. These helpers extract common patterns for: -// - Generating max value configs with defaults -// - Generating configs with allowed fields (labels, repos, etc.) -// - Generating configs with optional target fields -// -// The goal is to make generateSafeOutputsConfig more maintainable by -// extracting repetitive code patterns into reusable functions. - -// resolveMaxForConfig resolves a templatable max *string to a config value. -// For expression strings (e.g. "${{ inputs.max }}"), the expression is stored -// as-is so GitHub Actions can resolve it at runtime. -// For literal numeric strings, the parsed integer is used. -// Falls back to defaultMax if max is nil or zero. -func resolveMaxForConfig(max *string, defaultMax int) any { - if max != nil { - v := *max - if strings.HasPrefix(v, "${{") { - return v // expression: evaluated at runtime by GitHub Actions - } - if n := templatableIntValue(max); n > 0 { - return n - } - } - return defaultMax -} - -// generateMaxConfig creates a simple config map with just a max value -func generateMaxConfig(max *string, defaultMax int) map[string]any { - config := make(map[string]any) - config["max"] = resolveMaxForConfig(max, defaultMax) - return config -} - -// generateMaxWithAllowedLabelsConfig creates a config with max and optional allowed_labels -func generateMaxWithAllowedLabelsConfig(max *string, defaultMax int, allowedLabels []string) map[string]any { - config := generateMaxConfig(max, defaultMax) - if len(allowedLabels) > 0 { - config["allowed_labels"] = allowedLabels - } - return config -} - -// generateMaxWithTargetConfig creates a config with max and optional target field -func generateMaxWithTargetConfig(max *string, defaultMax int, target string) map[string]any { - config := make(map[string]any) - if target != "" { - config["target"] = target - } - config["max"] = resolveMaxForConfig(max, defaultMax) - return config -} - -// generateMaxWithAllowedConfig creates a config with max and optional allowed list -func generateMaxWithAllowedConfig(max *string, defaultMax int, allowed []string) map[string]any { - config := generateMaxConfig(max, defaultMax) - if len(allowed) > 0 { - config["allowed"] = allowed - } - return config -} - -// generateMaxWithAllowedAndBlockedConfig creates a config with max, optional allowed list, and optional blocked list -func generateMaxWithAllowedAndBlockedConfig(max *string, defaultMax int, allowed []string, blocked []string) map[string]any { - config := generateMaxConfig(max, defaultMax) - if len(allowed) > 0 { - config["allowed"] = allowed - } - if len(blocked) > 0 { - config["blocked"] = blocked - } - return config -} - -// generateMaxWithDiscussionFieldsConfig creates a config with discussion-specific filter fields -func generateMaxWithDiscussionFieldsConfig(max *string, defaultMax int, requiredCategory string, requiredLabels []string, requiredTitlePrefix string) map[string]any { - config := generateMaxConfig(max, defaultMax) - if requiredCategory != "" { - config["required_category"] = requiredCategory - } - if len(requiredLabels) > 0 { - config["required_labels"] = requiredLabels - } - if requiredTitlePrefix != "" { - config["required_title_prefix"] = requiredTitlePrefix - } - return config -} - -// generateMaxWithReviewersConfig creates a config with max and optional reviewers list -func generateMaxWithReviewersConfig(max *string, defaultMax int, reviewers []string) map[string]any { - config := generateMaxConfig(max, defaultMax) - if len(reviewers) > 0 { - config["reviewers"] = reviewers - } - return config -} - -// generateAssignToAgentConfig creates a config with optional max, default_agent, target, and allowed -func generateAssignToAgentConfig(max *string, defaultMax int, defaultAgent string, target string, allowed []string) map[string]any { - if safeOutputsConfigGenLog.Enabled() { - safeOutputsConfigGenLog.Printf("Generating assign-to-agent config: max=%v, defaultMax=%d, defaultAgent=%s, target=%s, allowed_count=%d", - max, defaultMax, defaultAgent, target, len(allowed)) - } - config := make(map[string]any) - config["max"] = resolveMaxForConfig(max, defaultMax) - if defaultAgent != "" { - config["default_agent"] = defaultAgent - } - if target != "" { - config["target"] = target - } - if len(allowed) > 0 { - config["allowed"] = allowed - } - return config -} - -// generatePullRequestConfig creates a config with all pull request fields including target-repo, -// allowed_repos, base_branch, draft, reviewers, title_prefix, fallback_as_issue, and more. -func generatePullRequestConfig(prConfig *CreatePullRequestsConfig, defaultMax int) map[string]any { - safeOutputsConfigGenLog.Printf("Generating pull request config: max=%v, allowEmpty=%v, autoMerge=%v, expires=%d, labels_count=%d, targetRepo=%s", - prConfig.Max, prConfig.AllowEmpty, prConfig.AutoMerge, prConfig.Expires, len(prConfig.AllowedLabels), prConfig.TargetRepoSlug) - - additionalFields := make(map[string]any) - if len(prConfig.AllowedLabels) > 0 { - additionalFields["allowed_labels"] = prConfig.AllowedLabels - } - // Pass allow_empty flag to MCP server so it can skip patch generation - if prConfig.AllowEmpty != nil && *prConfig.AllowEmpty == "true" { - additionalFields["allow_empty"] = true - } - // Pass auto_merge flag to enable auto-merge for the pull request - if prConfig.AutoMerge != nil && *prConfig.AutoMerge == "true" { - additionalFields["auto_merge"] = true - } - // Pass expires to configure pull request expiration - if prConfig.Expires > 0 { - additionalFields["expires"] = prConfig.Expires - } - // Pass base_branch to configure the base branch for the pull request - if prConfig.BaseBranch != "" { - additionalFields["base_branch"] = prConfig.BaseBranch - } - // Pass draft flag to create the pull request as a draft - if prConfig.Draft != nil && *prConfig.Draft == "true" { - additionalFields["draft"] = true - } - // Pass reviewers to assign reviewers to the pull request - if len(prConfig.Reviewers) > 0 { - additionalFields["reviewers"] = prConfig.Reviewers - } - // Pass title_prefix to prepend to pull request titles - if prConfig.TitlePrefix != "" { - additionalFields["title_prefix"] = prConfig.TitlePrefix - } - // Pass fallback_as_issue if explicitly configured - if prConfig.FallbackAsIssue != nil { - additionalFields["fallback_as_issue"] = *prConfig.FallbackAsIssue - } - - // Use generateTargetConfigWithRepos to include target-repo and allowed_repos - targetConfig := SafeOutputTargetConfig{ - TargetRepoSlug: prConfig.TargetRepoSlug, - AllowedRepos: prConfig.AllowedRepos, - } - return generateTargetConfigWithRepos(targetConfig, prConfig.Max, defaultMax, additionalFields) -} - -// generateHideCommentConfig creates a config with max and optional allowed_reasons -func generateHideCommentConfig(max *string, defaultMax int, allowedReasons []string) map[string]any { - config := generateMaxConfig(max, defaultMax) - if len(allowedReasons) > 0 { - config["allowed_reasons"] = allowedReasons - } - return config -} - -// generateTargetConfigWithRepos creates a config with target, target-repo, allowed_repos, and optional fields. -// Note on naming conventions: -// - "target-repo" uses hyphen to match frontmatter YAML format (key in config.json) -// - "allowed_repos" uses underscore to match JavaScript handler expectations (see repo_helpers.cjs) -// This inconsistency is intentional to maintain compatibility with existing handler code. -func generateTargetConfigWithRepos(targetConfig SafeOutputTargetConfig, max *string, defaultMax int, additionalFields map[string]any) map[string]any { - config := generateMaxConfig(max, defaultMax) - - // Add target if specified - if targetConfig.Target != "" { - config["target"] = targetConfig.Target - } - - // Add target-repo if specified (use hyphenated key for consistency with frontmatter) - if targetConfig.TargetRepoSlug != "" { - config["target-repo"] = targetConfig.TargetRepoSlug - } - - // Add allowed_repos if specified (use underscore for consistency with handler code) - if len(targetConfig.AllowedRepos) > 0 { - config["allowed_repos"] = targetConfig.AllowedRepos - } - - // Add any additional fields - maps.Copy(config, additionalFields) - - return config -} diff --git a/pkg/workflow/safe_outputs_config_helpers.go b/pkg/workflow/safe_outputs_config_helpers.go deleted file mode 100644 index d21d83fca6c..00000000000 --- a/pkg/workflow/safe_outputs_config_helpers.go +++ /dev/null @@ -1,233 +0,0 @@ -package workflow - -import ( - "fmt" - "reflect" - "sort" - - "github.com/github/gh-aw/pkg/constants" - "github.com/github/gh-aw/pkg/logger" -) - -// ======================================== -// Safe Output Configuration Helpers -// ======================================== - -var safeOutputReflectionLog = logger.New("workflow:safe_outputs_config_helpers_reflection") - -// safeOutputFieldMapping maps struct field names to their tool names -var safeOutputFieldMapping = map[string]string{ - "CreateIssues": "create_issue", - "CreateAgentSessions": "create_agent_session", - "CreateDiscussions": "create_discussion", - "UpdateDiscussions": "update_discussion", - "CloseDiscussions": "close_discussion", - "CloseIssues": "close_issue", - "ClosePullRequests": "close_pull_request", - "AddComments": "add_comment", - "CreatePullRequests": "create_pull_request", - "CreatePullRequestReviewComments": "create_pull_request_review_comment", - "SubmitPullRequestReview": "submit_pull_request_review", - "ReplyToPullRequestReviewComment": "reply_to_pull_request_review_comment", - "ResolvePullRequestReviewThread": "resolve_pull_request_review_thread", - "CreateCodeScanningAlerts": "create_code_scanning_alert", - "AddLabels": "add_labels", - "RemoveLabels": "remove_labels", - "AddReviewer": "add_reviewer", - "AssignMilestone": "assign_milestone", - "AssignToAgent": "assign_to_agent", - "AssignToUser": "assign_to_user", - "UpdateIssues": "update_issue", - "UpdatePullRequests": "update_pull_request", - "PushToPullRequestBranch": "push_to_pull_request_branch", - "UploadAssets": "upload_asset", - "UpdateRelease": "update_release", - "UpdateProjects": "update_project", - "CreateProjects": "create_project", - "CreateProjectStatusUpdates": "create_project_status_update", - "LinkSubIssue": "link_sub_issue", - "HideComment": "hide_comment", - "DispatchWorkflow": "dispatch_workflow", - "MissingTool": "missing_tool", - "NoOp": "noop", - "MarkPullRequestAsReadyForReview": "mark_pull_request_as_ready_for_review", -} - -// hasAnySafeOutputEnabled uses reflection to check if any safe output field is non-nil -func hasAnySafeOutputEnabled(safeOutputs *SafeOutputsConfig) bool { - if safeOutputs == nil { - return false - } - - safeOutputReflectionLog.Print("Checking if any safe outputs are enabled using reflection") - - // Check Jobs separately as it's a map - if len(safeOutputs.Jobs) > 0 { - safeOutputReflectionLog.Printf("Found %d custom jobs enabled", len(safeOutputs.Jobs)) - return true - } - - // Use reflection to check all pointer fields - val := reflect.ValueOf(safeOutputs).Elem() - for fieldName := range safeOutputFieldMapping { - field := val.FieldByName(fieldName) - if field.IsValid() && !field.IsNil() { - safeOutputReflectionLog.Printf("Found enabled safe output field: %s", fieldName) - return true - } - } - - safeOutputReflectionLog.Print("No safe outputs enabled") - return false -} - -// getEnabledSafeOutputToolNamesReflection uses reflection to get enabled tool names -func getEnabledSafeOutputToolNamesReflection(safeOutputs *SafeOutputsConfig) []string { - if safeOutputs == nil { - return nil - } - - safeOutputReflectionLog.Print("Getting enabled safe output tool names using reflection") - var tools []string - - // Use reflection to check all pointer fields - val := reflect.ValueOf(safeOutputs).Elem() - for fieldName, toolName := range safeOutputFieldMapping { - field := val.FieldByName(fieldName) - if field.IsValid() && !field.IsNil() { - tools = append(tools, toolName) - } - } - - // Add custom job tools - for jobName := range safeOutputs.Jobs { - tools = append(tools, jobName) - safeOutputReflectionLog.Printf("Added custom job tool: %s", jobName) - } - - // Sort tools to ensure deterministic compilation - sort.Strings(tools) - - safeOutputReflectionLog.Printf("Found %d enabled safe output tools", len(tools)) - return tools -} - -// formatSafeOutputsRunsOn formats the runs-on value from SafeOutputsConfig for job output -func (c *Compiler) formatSafeOutputsRunsOn(safeOutputs *SafeOutputsConfig) string { - if safeOutputs == nil || safeOutputs.RunsOn == "" { - return "runs-on: " + constants.DefaultActivationJobRunnerImage - } - - return "runs-on: " + safeOutputs.RunsOn -} - -// formatDetectionRunsOn resolves the runner for the detection job using the following priority: -// 1. safe-outputs.detection.runs-on (detection-specific override) -// 2. agentRunsOn (the agent job's runner, passed by the caller) -func (c *Compiler) formatDetectionRunsOn(safeOutputs *SafeOutputsConfig, agentRunsOn string) string { - if safeOutputs != nil && safeOutputs.ThreatDetection != nil && safeOutputs.ThreatDetection.RunsOn != "" { - return "runs-on: " + safeOutputs.ThreatDetection.RunsOn - } - return agentRunsOn -} - -// builtinSafeOutputFields contains the struct field names for the built-in safe output types -// that are excluded from the "non-builtin" check. These are: noop, missing-data, missing-tool. -var builtinSafeOutputFields = map[string]bool{ - "NoOp": true, - "MissingData": true, - "MissingTool": true, -} - -// nonBuiltinSafeOutputFieldNames is a pre-computed list of field names from safeOutputFieldMapping -// that are not builtins, used by hasNonBuiltinSafeOutputsEnabled to avoid repeated map iterations. -var nonBuiltinSafeOutputFieldNames = func() []string { - var fields []string - for fieldName := range safeOutputFieldMapping { - if !builtinSafeOutputFields[fieldName] { - fields = append(fields, fieldName) - } - } - return fields -}() - -// hasNonBuiltinSafeOutputsEnabled checks if any non-builtin safe outputs are configured. -// The builtin types (noop, missing-data, missing-tool) are excluded from this check -// because they are always auto-enabled and do not represent a meaningful output action. -func hasNonBuiltinSafeOutputsEnabled(safeOutputs *SafeOutputsConfig) bool { - if safeOutputs == nil { - return false - } - - // Custom safe-jobs are always non-builtin - if len(safeOutputs.Jobs) > 0 { - return true - } - - // Check non-builtin pointer fields using the pre-computed list - val := reflect.ValueOf(safeOutputs).Elem() - for _, fieldName := range nonBuiltinSafeOutputFieldNames { - field := val.FieldByName(fieldName) - if field.IsValid() && !field.IsNil() { - return true - } - } - - return false -} - -// HasSafeOutputsEnabled checks if any safe-outputs are enabled -func HasSafeOutputsEnabled(safeOutputs *SafeOutputsConfig) bool { - enabled := hasAnySafeOutputEnabled(safeOutputs) - - if safeOutputsConfigLog.Enabled() { - safeOutputsConfigLog.Printf("Safe outputs enabled check: %v", enabled) - } - - return enabled -} - -// applyDefaultCreateIssue injects a default create-issues safe output when safe-outputs is configured -// but has no non-builtin output types. The injected config uses the workflow ID as the label -// and [workflowID] as the title prefix. The AutoInjectedCreateIssue flag is set so the prompt -// generator can add a specific instruction for the agent. -func applyDefaultCreateIssue(workflowData *WorkflowData) { - if workflowData.SafeOutputs == nil { - return - } - if hasNonBuiltinSafeOutputsEnabled(workflowData.SafeOutputs) { - return - } - - workflowID := workflowData.WorkflowID - safeOutputsConfigLog.Printf("Auto-injecting create-issues for workflow %q (no non-builtin safe outputs configured)", workflowID) - workflowData.SafeOutputs.CreateIssues = &CreateIssuesConfig{ - BaseSafeOutputConfig: BaseSafeOutputConfig{Max: defaultIntStr(1)}, - Labels: []string{workflowID}, - TitlePrefix: fmt.Sprintf("[%s]", workflowID), - } - workflowData.SafeOutputs.AutoInjectedCreateIssue = true -} - -// GetEnabledSafeOutputToolNames returns a list of enabled safe output tool names. -// NOTE: Tool names should NOT be included in agent prompts. The agent should query -// the MCP server to discover available tools. This function is used for generating -// the tools.json file that the MCP server provides, and for diagnostic logging. -func GetEnabledSafeOutputToolNames(safeOutputs *SafeOutputsConfig) []string { - tools := getEnabledSafeOutputToolNamesReflection(safeOutputs) - - if safeOutputsConfigLog.Enabled() { - safeOutputsConfigLog.Printf("Enabled safe output tools: %v", tools) - } - - return tools -} - -// usesPatchesAndCheckouts checks if the workflow uses safe outputs that require -// git patches and checkouts (create-pull-request or push-to-pull-request-branch) -func usesPatchesAndCheckouts(safeOutputs *SafeOutputsConfig) bool { - if safeOutputs == nil { - return false - } - return safeOutputs.CreatePullRequests != nil || safeOutputs.PushToPullRequestBranch != nil -} diff --git a/pkg/workflow/safe_outputs_config_messages.go b/pkg/workflow/safe_outputs_config_messages.go deleted file mode 100644 index f11818ea2b2..00000000000 --- a/pkg/workflow/safe_outputs_config_messages.go +++ /dev/null @@ -1,153 +0,0 @@ -package workflow - -import ( - "encoding/json" - "fmt" - - "github.com/github/gh-aw/pkg/logger" -) - -var safeOutputMessagesLog = logger.New("workflow:safe_outputs_config_messages") - -// ======================================== -// Safe Output Messages Configuration -// ======================================== - -// setStringFromMap reads m[key] and assigns its string value to *dest if found. -func setStringFromMap(m map[string]any, key string, dest *string) { - if val, exists := m[key]; exists { - if str, ok := val.(string); ok { - *dest = str - } - } -} - -// parseMessagesConfig parses the messages configuration from safe-outputs frontmatter -func parseMessagesConfig(messagesMap map[string]any) *SafeOutputMessagesConfig { - safeOutputMessagesLog.Printf("Parsing messages configuration with %d fields", len(messagesMap)) - config := &SafeOutputMessagesConfig{} - - if appendOnly, exists := messagesMap["append-only-comments"]; exists { - if appendOnlyBool, ok := appendOnly.(bool); ok { - config.AppendOnlyComments = appendOnlyBool - safeOutputMessagesLog.Printf("Set append-only-comments: %t", appendOnlyBool) - } - } - - setStringFromMap(messagesMap, "footer", &config.Footer) - setStringFromMap(messagesMap, "footer-install", &config.FooterInstall) - setStringFromMap(messagesMap, "footer-workflow-recompile", &config.FooterWorkflowRecompile) - setStringFromMap(messagesMap, "footer-workflow-recompile-comment", &config.FooterWorkflowRecompileComment) - setStringFromMap(messagesMap, "staged-title", &config.StagedTitle) - setStringFromMap(messagesMap, "staged-description", &config.StagedDescription) - setStringFromMap(messagesMap, "run-started", &config.RunStarted) - setStringFromMap(messagesMap, "run-success", &config.RunSuccess) - setStringFromMap(messagesMap, "run-failure", &config.RunFailure) - setStringFromMap(messagesMap, "detection-failure", &config.DetectionFailure) - setStringFromMap(messagesMap, "pull-request-created", &config.PullRequestCreated) - setStringFromMap(messagesMap, "issue-created", &config.IssueCreated) - setStringFromMap(messagesMap, "commit-pushed", &config.CommitPushed) - setStringFromMap(messagesMap, "agent-failure-issue", &config.AgentFailureIssue) - setStringFromMap(messagesMap, "agent-failure-comment", &config.AgentFailureComment) - - return config -} - -// parseMentionsConfig parses the mentions configuration from safe-outputs frontmatter -// Mentions can be: -// - false: always escapes mentions -// - true: always allows mentions (error in strict mode) -// - object: detailed configuration with allow-team-members, allow-context, allowed, max -func parseMentionsConfig(mentions any) *MentionsConfig { - safeOutputMessagesLog.Printf("Parsing mentions configuration: type=%T", mentions) - config := &MentionsConfig{} - - // Handle boolean value - if boolVal, ok := mentions.(bool); ok { - config.Enabled = &boolVal - safeOutputMessagesLog.Printf("Mentions configured as boolean: %t", boolVal) - return config - } - - // Handle object configuration - if mentionsMap, ok := mentions.(map[string]any); ok { - // Parse allow-team-members - if allowTeamMembers, exists := mentionsMap["allow-team-members"]; exists { - if val, ok := allowTeamMembers.(bool); ok { - config.AllowTeamMembers = &val - } - } - - // Parse allow-context - if allowContext, exists := mentionsMap["allow-context"]; exists { - if val, ok := allowContext.(bool); ok { - config.AllowContext = &val - } - } - - // Parse allowed list - if allowed, exists := mentionsMap["allowed"]; exists { - if allowedArray, ok := allowed.([]any); ok { - var allowedStrings []string - for _, item := range allowedArray { - if str, ok := item.(string); ok { - // Normalize username by removing '@' prefix if present - normalized := str - if len(str) > 0 && str[0] == '@' { - normalized = str[1:] - safeOutputMessagesLog.Printf("Normalized mention '%s' to '%s'", str, normalized) - } - allowedStrings = append(allowedStrings, normalized) - } - } - config.Allowed = allowedStrings - } - } - - // Parse max - if maxVal, exists := mentionsMap["max"]; exists { - switch v := maxVal.(type) { - case int: - if v >= 1 { - config.Max = &v - } - case int64: - intVal := int(v) - if intVal >= 1 { - config.Max = &intVal - } - case uint64: - intVal := int(v) - if intVal >= 1 { - config.Max = &intVal - } - case float64: - intVal := int(v) - // Warn if truncation occurs - if v != float64(intVal) { - safeOutputsConfigLog.Printf("mentions.max: float value %.2f truncated to integer %d", v, intVal) - } - if intVal >= 1 { - config.Max = &intVal - } - } - } - } - - return config -} - -// serializeMessagesConfig converts SafeOutputMessagesConfig to JSON for passing as environment variable -func serializeMessagesConfig(messages *SafeOutputMessagesConfig) (string, error) { - if messages == nil { - return "", nil - } - safeOutputMessagesLog.Print("Serializing messages configuration to JSON") - jsonBytes, err := json.Marshal(messages) - if err != nil { - safeOutputMessagesLog.Printf("Failed to serialize messages config: %v", err) - return "", fmt.Errorf("failed to serialize messages config: %w", err) - } - safeOutputMessagesLog.Printf("Serialized messages config: %d bytes", len(jsonBytes)) - return string(jsonBytes), nil -} diff --git a/pkg/workflow/safe_outputs_env.go b/pkg/workflow/safe_outputs_env.go deleted file mode 100644 index 11f8d7c3844..00000000000 --- a/pkg/workflow/safe_outputs_env.go +++ /dev/null @@ -1,348 +0,0 @@ -package workflow - -import ( - "fmt" - "strconv" - "strings" - - "github.com/github/gh-aw/pkg/logger" -) - -var safeOutputsEnvLog = logger.New("workflow:safe_outputs_env") - -// ======================================== -// Safe Output Environment Variables -// ======================================== - -// applySafeOutputEnvToMap adds safe-output related environment variables to an env map -// This extracts the duplicated safe-output env setup logic across all engines (copilot, codex, claude, custom) -func applySafeOutputEnvToMap(env map[string]string, data *WorkflowData) { - if data.SafeOutputs == nil { - return - } - - safeOutputsEnvLog.Printf("Applying safe output env vars: trial_mode=%t, staged=%t", data.TrialMode, data.SafeOutputs.Staged) - - env["GH_AW_SAFE_OUTPUTS"] = "${{ env.GH_AW_SAFE_OUTPUTS }}" - - // Add staged flag if specified - if data.TrialMode || data.SafeOutputs.Staged { - env["GH_AW_SAFE_OUTPUTS_STAGED"] = "true" - } - if data.TrialMode && data.TrialLogicalRepo != "" { - env["GH_AW_TARGET_REPO_SLUG"] = data.TrialLogicalRepo - } - - // Add branch name if upload assets is configured - if data.SafeOutputs.UploadAssets != nil { - safeOutputsEnvLog.Printf("Adding upload assets env vars: branch=%s", data.SafeOutputs.UploadAssets.BranchName) - env["GH_AW_ASSETS_BRANCH"] = fmt.Sprintf("%q", data.SafeOutputs.UploadAssets.BranchName) - env["GH_AW_ASSETS_MAX_SIZE_KB"] = strconv.Itoa(data.SafeOutputs.UploadAssets.MaxSizeKB) - env["GH_AW_ASSETS_ALLOWED_EXTS"] = fmt.Sprintf("%q", strings.Join(data.SafeOutputs.UploadAssets.AllowedExts, ",")) - } -} - -// applySafeOutputEnvToSlice adds safe-output related environment variables to a YAML string slice -// This is for engines that build YAML line-by-line (like Claude) -func applySafeOutputEnvToSlice(stepLines *[]string, workflowData *WorkflowData) { - if workflowData.SafeOutputs == nil { - return - } - - *stepLines = append(*stepLines, " GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}") - - // Add staged flag if specified - if workflowData.TrialMode || workflowData.SafeOutputs.Staged { - *stepLines = append(*stepLines, " GH_AW_SAFE_OUTPUTS_STAGED: \"true\"") - } - if workflowData.TrialMode && workflowData.TrialLogicalRepo != "" { - *stepLines = append(*stepLines, fmt.Sprintf(" GH_AW_TARGET_REPO_SLUG: %q", workflowData.TrialLogicalRepo)) - } - - // Add branch name if upload assets is configured - if workflowData.SafeOutputs.UploadAssets != nil { - *stepLines = append(*stepLines, fmt.Sprintf(" GH_AW_ASSETS_BRANCH: %q", workflowData.SafeOutputs.UploadAssets.BranchName)) - *stepLines = append(*stepLines, fmt.Sprintf(" GH_AW_ASSETS_MAX_SIZE_KB: %d", workflowData.SafeOutputs.UploadAssets.MaxSizeKB)) - *stepLines = append(*stepLines, fmt.Sprintf(" GH_AW_ASSETS_ALLOWED_EXTS: %q", strings.Join(workflowData.SafeOutputs.UploadAssets.AllowedExts, ","))) - } -} - -// buildWorkflowMetadataEnvVars builds workflow name and source environment variables -// This extracts the duplicated workflow metadata setup logic from safe-output job builders -func buildWorkflowMetadataEnvVars(workflowName string, workflowSource string) []string { - var customEnvVars []string - - // Add workflow name - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_WORKFLOW_NAME: %q\n", workflowName)) - - // Add workflow source and source URL if present - if workflowSource != "" { - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_WORKFLOW_SOURCE: %q\n", workflowSource)) - sourceURL := buildSourceURL(workflowSource) - if sourceURL != "" { - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_WORKFLOW_SOURCE_URL: %q\n", sourceURL)) - } - } - - return customEnvVars -} - -// buildWorkflowMetadataEnvVarsWithTrackerID builds workflow metadata env vars including tracker-id -func buildWorkflowMetadataEnvVarsWithTrackerID(workflowName string, workflowSource string, trackerID string) []string { - customEnvVars := buildWorkflowMetadataEnvVars(workflowName, workflowSource) - - // Add tracker-id if present - if trackerID != "" { - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_TRACKER_ID: %q\n", trackerID)) - } - - return customEnvVars -} - -// buildSafeOutputJobEnvVars builds environment variables for safe-output jobs with staged/target repo handling -// This extracts the duplicated env setup logic in safe-output job builders (create_issue, add_comment, etc.) -func buildSafeOutputJobEnvVars(trialMode bool, trialLogicalRepoSlug string, staged bool, targetRepoSlug string) []string { - var customEnvVars []string - - // Pass the staged flag if it's set to true - if trialMode || staged { - safeOutputsEnvLog.Printf("Setting staged flag: trial_mode=%t, staged=%t", trialMode, staged) - customEnvVars = append(customEnvVars, " GH_AW_SAFE_OUTPUTS_STAGED: \"true\"\n") - } - - // Set GH_AW_TARGET_REPO_SLUG - prefer target-repo config over trial target repo - if targetRepoSlug != "" { - safeOutputsEnvLog.Printf("Setting target repo slug from config: %s", targetRepoSlug) - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_TARGET_REPO_SLUG: %q\n", targetRepoSlug)) - } else if trialMode && trialLogicalRepoSlug != "" { - safeOutputsEnvLog.Printf("Setting target repo slug from trial mode: %s", trialLogicalRepoSlug) - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_TARGET_REPO_SLUG: %q\n", trialLogicalRepoSlug)) - } - - return customEnvVars -} - -// buildStandardSafeOutputEnvVars builds the standard set of environment variables -// that all safe-output job builders need: metadata + staged/target repo handling -// This reduces duplication in safe-output job builders -func (c *Compiler) buildStandardSafeOutputEnvVars(data *WorkflowData, targetRepoSlug string) []string { - var customEnvVars []string - - // Add workflow metadata (name, source, and tracker-id) - customEnvVars = append(customEnvVars, buildWorkflowMetadataEnvVarsWithTrackerID(data.Name, data.Source, data.TrackerID)...) - - // Add engine metadata (id, version, model) for XML comment marker - customEnvVars = append(customEnvVars, buildEngineMetadataEnvVars(data.EngineConfig)...) - - // Add common safe output job environment variables (staged/target repo) - customEnvVars = append(customEnvVars, buildSafeOutputJobEnvVars( - c.trialMode, - c.trialLogicalRepoSlug, - data.SafeOutputs.Staged, - targetRepoSlug, - )...) - - // Add messages config if present - if data.SafeOutputs.Messages != nil { - messagesJSON, err := serializeMessagesConfig(data.SafeOutputs.Messages) - if err != nil { - safeOutputsEnvLog.Printf("Warning: failed to serialize messages config: %v", err) - } else if messagesJSON != "" { - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_SAFE_OUTPUT_MESSAGES: %q\n", messagesJSON)) - } - } - - return customEnvVars -} - -// buildStepLevelSafeOutputEnvVars builds environment variables for consolidated safe output steps -// This excludes variables that are already set at the job level in consolidated jobs -func (c *Compiler) buildStepLevelSafeOutputEnvVars(data *WorkflowData, targetRepoSlug string) []string { - var customEnvVars []string - - // Only add target repo slug if it's different from the job-level setting - // (i.e., this step has a specific target-repo config that overrides the global trial mode target) - if targetRepoSlug != "" { - // Step-specific target repo overrides job-level setting - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_TARGET_REPO_SLUG: %q\n", targetRepoSlug)) - } else if !c.trialMode && data.SafeOutputs.Staged { - // Step needs staged flag but there's no job-level target repo (not in trial mode) - // Job level only sets this if trialMode is true - customEnvVars = append(customEnvVars, " GH_AW_SAFE_OUTPUTS_STAGED: \"true\"\n") - } - - // Note: The following are now set at job level and should NOT be included here: - // - GH_AW_WORKFLOW_NAME - // - GH_AW_WORKFLOW_SOURCE - // - GH_AW_WORKFLOW_SOURCE_URL - // - GH_AW_TRACKER_ID - // - GH_AW_ENGINE_ID - // - GH_AW_ENGINE_VERSION - // - GH_AW_ENGINE_MODEL - // - GH_AW_SAFE_OUTPUTS_STAGED (if in trial mode) - // - GH_AW_TARGET_REPO_SLUG (if in trial mode and no step override) - // - GH_AW_SAFE_OUTPUT_MESSAGES - - return customEnvVars -} - -// buildEngineMetadataEnvVars builds engine metadata environment variables (id, version, model) -// These are used by the JavaScript footer generation to create XML comment markers for traceability -func buildEngineMetadataEnvVars(engineConfig *EngineConfig) []string { - var customEnvVars []string - - if engineConfig == nil { - return customEnvVars - } - - safeOutputsEnvLog.Printf("Building engine metadata env vars: id=%s, version=%s", engineConfig.ID, engineConfig.Version) - - // Add engine ID if present - if engineConfig.ID != "" { - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_ENGINE_ID: %q\n", engineConfig.ID)) - } - - // Add engine version if present - if engineConfig.Version != "" { - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_ENGINE_VERSION: %q\n", engineConfig.Version)) - } - - // Add engine model if present - if engineConfig.Model != "" { - customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_ENGINE_MODEL: %q\n", engineConfig.Model)) - } - - return customEnvVars -} - -// ======================================== -// Safe Output Environment Helpers -// ======================================== - -// addCustomSafeOutputEnvVars adds custom environment variables to safe output job steps -func (c *Compiler) addCustomSafeOutputEnvVars(steps *[]string, data *WorkflowData) { - if data.SafeOutputs != nil && len(data.SafeOutputs.Env) > 0 { - for key, value := range data.SafeOutputs.Env { - *steps = append(*steps, fmt.Sprintf(" %s: %s\n", key, value)) - } - } -} - -// addSafeOutputGitHubTokenForConfig adds github-token to the with section, preferring per-config token over global -// Uses precedence: config token > safe-outputs global github-token > GH_AW_GITHUB_TOKEN || GITHUB_TOKEN -func (c *Compiler) addSafeOutputGitHubTokenForConfig(steps *[]string, data *WorkflowData, configToken string) { - var safeOutputsToken string - if data.SafeOutputs != nil { - safeOutputsToken = data.SafeOutputs.GitHubToken - } - - // If app is configured, use app token - if data.SafeOutputs != nil && data.SafeOutputs.GitHubApp != nil { - *steps = append(*steps, " github-token: ${{ steps.safe-outputs-app-token.outputs.token }}\n") - return - } - - // Choose the first non-empty custom token for precedence - effectiveCustomToken := configToken - if effectiveCustomToken == "" { - effectiveCustomToken = safeOutputsToken - } - - // Get effective token - effectiveToken := getEffectiveSafeOutputGitHubToken(effectiveCustomToken) - *steps = append(*steps, fmt.Sprintf(" github-token: %s\n", effectiveToken)) -} - -// addSafeOutputCopilotGitHubTokenForConfig adds github-token to the with section for Copilot-related operations -// Uses precedence: config token > safe-outputs global github-token > COPILOT_GITHUB_TOKEN -func (c *Compiler) addSafeOutputCopilotGitHubTokenForConfig(steps *[]string, data *WorkflowData, configToken string) { - var safeOutputsToken string - if data.SafeOutputs != nil { - safeOutputsToken = data.SafeOutputs.GitHubToken - } - - // If app is configured, use app token - if data.SafeOutputs != nil && data.SafeOutputs.GitHubApp != nil { - *steps = append(*steps, " github-token: ${{ steps.safe-outputs-app-token.outputs.token }}\n") - return - } - - // Choose the first non-empty custom token for precedence - effectiveCustomToken := configToken - if effectiveCustomToken == "" { - effectiveCustomToken = safeOutputsToken - } - - // Get effective token - effectiveToken := getEffectiveCopilotRequestsToken(effectiveCustomToken) - *steps = append(*steps, fmt.Sprintf(" github-token: %s\n", effectiveToken)) -} - -// addSafeOutputAgentGitHubTokenForConfig adds github-token to the with section for agent assignment operations -// Uses precedence: config token > safe-outputs token > GH_AW_AGENT_TOKEN || GH_AW_GITHUB_TOKEN || GITHUB_TOKEN -// This is specifically for assign-to-agent operations which require elevated permissions. -// -// Note: GitHub App tokens are intentionally NOT used here, even when github-app: is configured. -// The Copilot assignment API only accepts PATs (fine-grained or classic), not GitHub App -// installation tokens. Callers must provide an explicit github-token or rely on GH_AW_AGENT_TOKEN. -func (c *Compiler) addSafeOutputAgentGitHubTokenForConfig(steps *[]string, data *WorkflowData, configToken string) { - // Get safe-outputs level token - var safeOutputsToken string - if data.SafeOutputs != nil { - safeOutputsToken = data.SafeOutputs.GitHubToken - } - - // Choose the first non-empty custom token for precedence - effectiveCustomToken := configToken - if effectiveCustomToken == "" { - effectiveCustomToken = safeOutputsToken - } - - // Get effective token - falls back to ${{ secrets.GH_AW_AGENT_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} - // when no explicit token is provided. GitHub App tokens are never used here because the - // Copilot assignment API rejects them. - effectiveToken := getEffectiveCopilotCodingAgentGitHubToken(effectiveCustomToken) - *steps = append(*steps, fmt.Sprintf(" github-token: %s\n", effectiveToken)) -} - -// buildTitlePrefixEnvVar builds a title-prefix environment variable line for safe-output jobs. -// envVarName should be the full env var name like "GH_AW_ISSUE_TITLE_PREFIX" or "GH_AW_DISCUSSION_TITLE_PREFIX". -// Returns an empty slice if titlePrefix is empty. -func buildTitlePrefixEnvVar(envVarName string, titlePrefix string) []string { - if titlePrefix == "" { - return nil - } - return []string{fmt.Sprintf(" %s: %q\n", envVarName, titlePrefix)} -} - -// buildLabelsEnvVar builds a labels environment variable line for safe-output jobs. -// envVarName should be the full env var name like "GH_AW_ISSUE_LABELS" or "GH_AW_PR_LABELS". -// Returns an empty slice if labels is empty. -func buildLabelsEnvVar(envVarName string, labels []string) []string { - if len(labels) == 0 { - return nil - } - labelsStr := strings.Join(labels, ",") - return []string{fmt.Sprintf(" %s: %q\n", envVarName, labelsStr)} -} - -// buildCategoryEnvVar builds a category environment variable line for discussion safe-output jobs. -// envVarName should be the full env var name like "GH_AW_DISCUSSION_CATEGORY". -// Returns an empty slice if category is empty. -func buildCategoryEnvVar(envVarName string, category string) []string { - if category == "" { - return nil - } - return []string{fmt.Sprintf(" %s: %q\n", envVarName, category)} -} - -// buildAllowedReposEnvVar builds an allowed-repos environment variable line for safe-output jobs. -// envVarName should be the full env var name like "GH_AW_ALLOWED_REPOS". -// Returns an empty slice if allowedRepos is empty. -func buildAllowedReposEnvVar(envVarName string, allowedRepos []string) []string { - if len(allowedRepos) == 0 { - return nil - } - reposStr := strings.Join(allowedRepos, ",") - return []string{fmt.Sprintf(" %s: %q\n", envVarName, reposStr)} -} diff --git a/pkg/workflow/safe_outputs_generation.go b/pkg/workflow/safe_outputs_generation.go new file mode 100644 index 00000000000..1482fd51c3c --- /dev/null +++ b/pkg/workflow/safe_outputs_generation.go @@ -0,0 +1,1603 @@ +package workflow + +import ( + "encoding/json" + "fmt" + "maps" + "reflect" + "sort" + "strconv" + "strings" + + "github.com/github/gh-aw/pkg/constants" + "github.com/github/gh-aw/pkg/logger" + "github.com/github/gh-aw/pkg/stringutil" +) + +// ======================================== +// Safe Output Configuration Generation +// ======================================== + +// generateCustomJobToolDefinition creates an MCP tool definition for a custom safe-output job +// Returns a map representing the tool definition in MCP format with name, description, and inputSchema +func generateCustomJobToolDefinition(jobName string, jobConfig *SafeJobConfig) map[string]any { + safeOutputsConfigLog.Printf("Generating tool definition for custom job: %s", jobName) + + // Build the tool definition + tool := map[string]any{ + "name": jobName, + } + + // Add description if present + if jobConfig.Description != "" { + tool["description"] = jobConfig.Description + } else { + // Provide a default description if none is specified + tool["description"] = fmt.Sprintf("Execute the %s custom job", jobName) + } + + // Build the input schema + inputSchema := map[string]any{ + "type": "object", + "properties": make(map[string]any), + } + + // Track required fields + var requiredFields []string + + // Add each input to the schema + if len(jobConfig.Inputs) > 0 { + properties := inputSchema["properties"].(map[string]any) + + for inputName, inputDef := range jobConfig.Inputs { + property := map[string]any{} + + // Add description + if inputDef.Description != "" { + property["description"] = inputDef.Description + } + + // Convert type to JSON Schema type + switch inputDef.Type { + case "choice": + // Choice inputs are strings with enum constraints + property["type"] = "string" + if len(inputDef.Options) > 0 { + property["enum"] = inputDef.Options + } + case "boolean": + property["type"] = "boolean" + case "number": + property["type"] = "number" + case "string", "": + // Default to string if type is not specified + property["type"] = "string" + default: + // For any unknown type, default to string + property["type"] = "string" + } + + // Add default value if present + if inputDef.Default != nil { + property["default"] = inputDef.Default + } + + // Track required fields + if inputDef.Required { + requiredFields = append(requiredFields, inputName) + } + + properties[inputName] = property + } + } + + // Add required fields array if any inputs are required + if len(requiredFields) > 0 { + sort.Strings(requiredFields) + inputSchema["required"] = requiredFields + } + + // Prevent additional properties to maintain schema strictness + inputSchema["additionalProperties"] = false + + tool["inputSchema"] = inputSchema + + safeOutputsConfigLog.Printf("Generated tool definition for %s with %d inputs, %d required", + jobName, len(jobConfig.Inputs), len(requiredFields)) + + return tool +} + +func populateDispatchWorkflowFiles(data *WorkflowData, markdownPath string) { + if data.SafeOutputs == nil || data.SafeOutputs.DispatchWorkflow == nil { + return + } + + if len(data.SafeOutputs.DispatchWorkflow.Workflows) == 0 { + return + } + + safeOutputsConfigLog.Printf("Populating workflow files for %d dispatch workflows", len(data.SafeOutputs.DispatchWorkflow.Workflows)) + + // Initialize WorkflowFiles map if not already initialized + if data.SafeOutputs.DispatchWorkflow.WorkflowFiles == nil { + data.SafeOutputs.DispatchWorkflow.WorkflowFiles = make(map[string]string) + } + + for _, workflowName := range data.SafeOutputs.DispatchWorkflow.Workflows { + // Find the workflow file + fileResult, err := findWorkflowFile(workflowName, markdownPath) + if err != nil { + safeOutputsConfigLog.Printf("Warning: error finding workflow %s: %v", workflowName, err) + continue + } + + // Determine which file to use - priority: .lock.yml > .yml + var extension string + if fileResult.lockExists { + extension = ".lock.yml" + } else if fileResult.ymlExists { + extension = ".yml" + } else { + safeOutputsConfigLog.Printf("Warning: workflow file not found for %s (only .md exists, needs compilation)", workflowName) + continue + } + + // Store the file extension for runtime use + data.SafeOutputs.DispatchWorkflow.WorkflowFiles[workflowName] = extension + safeOutputsConfigLog.Printf("Mapped workflow %s to extension %s", workflowName, extension) + } +} + +func generateSafeOutputsConfig(data *WorkflowData) string { + // Pass the safe-outputs configuration for validation + if data.SafeOutputs == nil { + safeOutputsConfigLog.Print("No safe outputs configuration found, returning empty config") + return "" + } + safeOutputsConfigLog.Print("Generating safe outputs configuration for workflow") + // Create a simplified config object for validation + safeOutputsConfig := make(map[string]any) + + // Handle safe-outputs configuration if present + if data.SafeOutputs != nil { + if data.SafeOutputs.CreateIssues != nil { + config := generateMaxWithAllowedLabelsConfig( + data.SafeOutputs.CreateIssues.Max, + 1, // default max + data.SafeOutputs.CreateIssues.AllowedLabels, + ) + // Add group flag if enabled + if data.SafeOutputs.CreateIssues.Group != nil && *data.SafeOutputs.CreateIssues.Group == "true" { + config["group"] = true + } + // Add expires value if set (0 means explicitly disabled or not set) + if data.SafeOutputs.CreateIssues.Expires > 0 { + config["expires"] = data.SafeOutputs.CreateIssues.Expires + } + safeOutputsConfig["create_issue"] = config + } + if data.SafeOutputs.CreateAgentSessions != nil { + safeOutputsConfig["create_agent_session"] = generateMaxConfig( + data.SafeOutputs.CreateAgentSessions.Max, + 1, // default max + ) + } + if data.SafeOutputs.AddComments != nil { + additionalFields := make(map[string]any) + // Note: AddCommentsConfig has Target, TargetRepoSlug, AllowedRepos but not embedded SafeOutputTargetConfig + // So we need to construct the target config manually + targetConfig := SafeOutputTargetConfig{ + Target: data.SafeOutputs.AddComments.Target, + TargetRepoSlug: data.SafeOutputs.AddComments.TargetRepoSlug, + AllowedRepos: data.SafeOutputs.AddComments.AllowedRepos, + } + safeOutputsConfig["add_comment"] = generateTargetConfigWithRepos( + targetConfig, + data.SafeOutputs.AddComments.Max, + 1, // default max + additionalFields, + ) + } + if data.SafeOutputs.CreateDiscussions != nil { + config := generateMaxWithAllowedLabelsConfig( + data.SafeOutputs.CreateDiscussions.Max, + 1, // default max + data.SafeOutputs.CreateDiscussions.AllowedLabels, + ) + // Add expires value if set (0 means explicitly disabled or not set) + if data.SafeOutputs.CreateDiscussions.Expires > 0 { + config["expires"] = data.SafeOutputs.CreateDiscussions.Expires + } + safeOutputsConfig["create_discussion"] = config + } + if data.SafeOutputs.CloseDiscussions != nil { + safeOutputsConfig["close_discussion"] = generateMaxWithDiscussionFieldsConfig( + data.SafeOutputs.CloseDiscussions.Max, + 1, // default max + data.SafeOutputs.CloseDiscussions.RequiredCategory, + data.SafeOutputs.CloseDiscussions.RequiredLabels, + data.SafeOutputs.CloseDiscussions.RequiredTitlePrefix, + ) + } + if data.SafeOutputs.CloseIssues != nil { + additionalFields := make(map[string]any) + if len(data.SafeOutputs.CloseIssues.RequiredLabels) > 0 { + additionalFields["required_labels"] = data.SafeOutputs.CloseIssues.RequiredLabels + } + if data.SafeOutputs.CloseIssues.RequiredTitlePrefix != "" { + additionalFields["required_title_prefix"] = data.SafeOutputs.CloseIssues.RequiredTitlePrefix + } + safeOutputsConfig["close_issue"] = generateTargetConfigWithRepos( + data.SafeOutputs.CloseIssues.SafeOutputTargetConfig, + data.SafeOutputs.CloseIssues.Max, + 1, // default max + additionalFields, + ) + } + if data.SafeOutputs.CreatePullRequests != nil { + safeOutputsConfig["create_pull_request"] = generatePullRequestConfig( + data.SafeOutputs.CreatePullRequests, + 1, // default max + ) + } + if data.SafeOutputs.CreatePullRequestReviewComments != nil { + safeOutputsConfig["create_pull_request_review_comment"] = generateMaxConfig( + data.SafeOutputs.CreatePullRequestReviewComments.Max, + 10, // default max + ) + } + if data.SafeOutputs.SubmitPullRequestReview != nil { + safeOutputsConfig["submit_pull_request_review"] = generateMaxConfig( + data.SafeOutputs.SubmitPullRequestReview.Max, + 1, // default max + ) + } + if data.SafeOutputs.ResolvePullRequestReviewThread != nil { + safeOutputsConfig["resolve_pull_request_review_thread"] = generateMaxConfig( + data.SafeOutputs.ResolvePullRequestReviewThread.Max, + 10, // default max + ) + } + if data.SafeOutputs.CreateCodeScanningAlerts != nil { + safeOutputsConfig["create_code_scanning_alert"] = generateMaxConfig( + data.SafeOutputs.CreateCodeScanningAlerts.Max, + 0, // default: unlimited + ) + } + if data.SafeOutputs.AutofixCodeScanningAlert != nil { + safeOutputsConfig["autofix_code_scanning_alert"] = generateMaxConfig( + data.SafeOutputs.AutofixCodeScanningAlert.Max, + 10, // default max + ) + } + if data.SafeOutputs.AddLabels != nil { + additionalFields := make(map[string]any) + if len(data.SafeOutputs.AddLabels.Allowed) > 0 { + additionalFields["allowed"] = data.SafeOutputs.AddLabels.Allowed + } + if len(data.SafeOutputs.AddLabels.Blocked) > 0 { + additionalFields["blocked"] = data.SafeOutputs.AddLabels.Blocked + } + safeOutputsConfig["add_labels"] = generateTargetConfigWithRepos( + data.SafeOutputs.AddLabels.SafeOutputTargetConfig, + data.SafeOutputs.AddLabels.Max, + 3, // default max + additionalFields, + ) + } + if data.SafeOutputs.RemoveLabels != nil { + safeOutputsConfig["remove_labels"] = generateMaxWithAllowedConfig( + data.SafeOutputs.RemoveLabels.Max, + 3, // default max + data.SafeOutputs.RemoveLabels.Allowed, + ) + } + if data.SafeOutputs.AddReviewer != nil { + safeOutputsConfig["add_reviewer"] = generateMaxWithReviewersConfig( + data.SafeOutputs.AddReviewer.Max, + 3, // default max + data.SafeOutputs.AddReviewer.Reviewers, + ) + } + if data.SafeOutputs.AssignMilestone != nil { + safeOutputsConfig["assign_milestone"] = generateMaxWithAllowedConfig( + data.SafeOutputs.AssignMilestone.Max, + 1, // default max + data.SafeOutputs.AssignMilestone.Allowed, + ) + } + if data.SafeOutputs.AssignToAgent != nil { + safeOutputsConfig["assign_to_agent"] = generateAssignToAgentConfig( + data.SafeOutputs.AssignToAgent.Max, + 1, // default max + data.SafeOutputs.AssignToAgent.DefaultAgent, + data.SafeOutputs.AssignToAgent.Target, + data.SafeOutputs.AssignToAgent.Allowed, + ) + } + if data.SafeOutputs.AssignToUser != nil { + safeOutputsConfig["assign_to_user"] = generateMaxWithAllowedAndBlockedConfig( + data.SafeOutputs.AssignToUser.Max, + 1, // default max + data.SafeOutputs.AssignToUser.Allowed, + data.SafeOutputs.AssignToUser.Blocked, + ) + } + if data.SafeOutputs.UnassignFromUser != nil { + safeOutputsConfig["unassign_from_user"] = generateMaxWithAllowedAndBlockedConfig( + data.SafeOutputs.UnassignFromUser.Max, + 1, // default max + data.SafeOutputs.UnassignFromUser.Allowed, + data.SafeOutputs.UnassignFromUser.Blocked, + ) + } + if data.SafeOutputs.UpdateIssues != nil { + safeOutputsConfig["update_issue"] = generateMaxConfig( + data.SafeOutputs.UpdateIssues.Max, + 1, // default max + ) + } + if data.SafeOutputs.UpdateDiscussions != nil { + safeOutputsConfig["update_discussion"] = generateMaxWithAllowedLabelsConfig( + data.SafeOutputs.UpdateDiscussions.Max, + 1, // default max + data.SafeOutputs.UpdateDiscussions.AllowedLabels, + ) + } + if data.SafeOutputs.UpdatePullRequests != nil { + safeOutputsConfig["update_pull_request"] = generateMaxConfig( + data.SafeOutputs.UpdatePullRequests.Max, + 1, // default max + ) + } + if data.SafeOutputs.MarkPullRequestAsReadyForReview != nil { + safeOutputsConfig["mark_pull_request_as_ready_for_review"] = generateMaxConfig( + data.SafeOutputs.MarkPullRequestAsReadyForReview.Max, + 10, // default max + ) + } + if data.SafeOutputs.PushToPullRequestBranch != nil { + safeOutputsConfig["push_to_pull_request_branch"] = generateMaxWithTargetConfig( + data.SafeOutputs.PushToPullRequestBranch.Max, + 0, // default: unlimited + data.SafeOutputs.PushToPullRequestBranch.Target, + ) + } + if data.SafeOutputs.UploadAssets != nil { + safeOutputsConfig["upload_asset"] = generateMaxConfig( + data.SafeOutputs.UploadAssets.Max, + 0, // default: unlimited + ) + } + if data.SafeOutputs.MissingTool != nil { + // Generate config for missing_tool with issue creation support + missingToolConfig := make(map[string]any) + + // Add max if set + if data.SafeOutputs.MissingTool.Max != nil { + missingToolConfig["max"] = resolveMaxForConfig(data.SafeOutputs.MissingTool.Max, 0) + } + + // Add issue creation config if enabled + if data.SafeOutputs.MissingTool.CreateIssue { + createIssueConfig := make(map[string]any) + createIssueConfig["max"] = 1 // Only create one issue per workflow run + + if data.SafeOutputs.MissingTool.TitlePrefix != "" { + createIssueConfig["title_prefix"] = data.SafeOutputs.MissingTool.TitlePrefix + } + + if len(data.SafeOutputs.MissingTool.Labels) > 0 { + createIssueConfig["labels"] = data.SafeOutputs.MissingTool.Labels + } + + safeOutputsConfig["create_missing_tool_issue"] = createIssueConfig + } + + safeOutputsConfig["missing_tool"] = missingToolConfig + } + if data.SafeOutputs.MissingData != nil { + // Generate config for missing_data with issue creation support + missingDataConfig := make(map[string]any) + + // Add max if set + if data.SafeOutputs.MissingData.Max != nil { + missingDataConfig["max"] = resolveMaxForConfig(data.SafeOutputs.MissingData.Max, 0) + } + + // Add issue creation config if enabled + if data.SafeOutputs.MissingData.CreateIssue { + createIssueConfig := make(map[string]any) + createIssueConfig["max"] = 1 // Only create one issue per workflow run + + if data.SafeOutputs.MissingData.TitlePrefix != "" { + createIssueConfig["title_prefix"] = data.SafeOutputs.MissingData.TitlePrefix + } + + if len(data.SafeOutputs.MissingData.Labels) > 0 { + createIssueConfig["labels"] = data.SafeOutputs.MissingData.Labels + } + + safeOutputsConfig["create_missing_data_issue"] = createIssueConfig + } + + safeOutputsConfig["missing_data"] = missingDataConfig + } + if data.SafeOutputs.UpdateProjects != nil { + safeOutputsConfig["update_project"] = generateMaxConfig( + data.SafeOutputs.UpdateProjects.Max, + 10, // default max + ) + } + if data.SafeOutputs.CreateProjectStatusUpdates != nil { + safeOutputsConfig["create_project_status_update"] = generateMaxConfig( + data.SafeOutputs.CreateProjectStatusUpdates.Max, + 10, // default max + ) + } + if data.SafeOutputs.CreateProjects != nil { + config := generateMaxConfig( + data.SafeOutputs.CreateProjects.Max, + 1, // default max + ) + // Add target-owner if specified + if data.SafeOutputs.CreateProjects.TargetOwner != "" { + config["target_owner"] = data.SafeOutputs.CreateProjects.TargetOwner + } + // Add title-prefix if specified + if data.SafeOutputs.CreateProjects.TitlePrefix != "" { + config["title_prefix"] = data.SafeOutputs.CreateProjects.TitlePrefix + } + safeOutputsConfig["create_project"] = config + } + if data.SafeOutputs.UpdateRelease != nil { + safeOutputsConfig["update_release"] = generateMaxConfig( + data.SafeOutputs.UpdateRelease.Max, + 1, // default max + ) + } + if data.SafeOutputs.LinkSubIssue != nil { + safeOutputsConfig["link_sub_issue"] = generateMaxConfig( + data.SafeOutputs.LinkSubIssue.Max, + 5, // default max + ) + } + if data.SafeOutputs.NoOp != nil { + safeOutputsConfig["noop"] = generateMaxConfig( + data.SafeOutputs.NoOp.Max, + 1, // default max + ) + } + if data.SafeOutputs.HideComment != nil { + safeOutputsConfig["hide_comment"] = generateHideCommentConfig( + data.SafeOutputs.HideComment.Max, + 5, // default max + data.SafeOutputs.HideComment.AllowedReasons, + ) + } + if data.SafeOutputs.SetIssueType != nil { + additionalFields := make(map[string]any) + if len(data.SafeOutputs.SetIssueType.Allowed) > 0 { + additionalFields["allowed"] = data.SafeOutputs.SetIssueType.Allowed + } + safeOutputsConfig["set_issue_type"] = generateTargetConfigWithRepos( + data.SafeOutputs.SetIssueType.SafeOutputTargetConfig, + data.SafeOutputs.SetIssueType.Max, + 5, // default max + additionalFields, + ) + } + } + + // Add safe-jobs configuration from SafeOutputs.Jobs + if len(data.SafeOutputs.Jobs) > 0 { + safeOutputsConfigLog.Printf("Processing %d safe job configurations", len(data.SafeOutputs.Jobs)) + for jobName, jobConfig := range data.SafeOutputs.Jobs { + safeOutputsConfigLog.Printf("Generating config for safe job: %s", jobName) + safeJobConfig := map[string]any{} + + // Add description if present + if jobConfig.Description != "" { + safeJobConfig["description"] = jobConfig.Description + } + + // Add output if present + if jobConfig.Output != "" { + safeJobConfig["output"] = jobConfig.Output + } + + // Add inputs information + if len(jobConfig.Inputs) > 0 { + inputsConfig := make(map[string]any) + for inputName, inputDef := range jobConfig.Inputs { + inputConfig := map[string]any{ + "type": inputDef.Type, + "description": inputDef.Description, + "required": inputDef.Required, + } + if inputDef.Default != "" { + inputConfig["default"] = inputDef.Default + } + if len(inputDef.Options) > 0 { + inputConfig["options"] = inputDef.Options + } + inputsConfig[inputName] = inputConfig + } + safeJobConfig["inputs"] = inputsConfig + } + + safeOutputsConfig[jobName] = safeJobConfig + } + } + + // Add mentions configuration + if data.SafeOutputs.Mentions != nil { + mentionsConfig := make(map[string]any) + + // Handle enabled flag (simple boolean mode) + if data.SafeOutputs.Mentions.Enabled != nil { + mentionsConfig["enabled"] = *data.SafeOutputs.Mentions.Enabled + } + + // Handle allow-team-members + if data.SafeOutputs.Mentions.AllowTeamMembers != nil { + mentionsConfig["allowTeamMembers"] = *data.SafeOutputs.Mentions.AllowTeamMembers + } + + // Handle allow-context + if data.SafeOutputs.Mentions.AllowContext != nil { + mentionsConfig["allowContext"] = *data.SafeOutputs.Mentions.AllowContext + } + + // Handle allowed list + if len(data.SafeOutputs.Mentions.Allowed) > 0 { + mentionsConfig["allowed"] = data.SafeOutputs.Mentions.Allowed + } + + // Handle max + if data.SafeOutputs.Mentions.Max != nil { + mentionsConfig["max"] = *data.SafeOutputs.Mentions.Max + } + + // Only add mentions config if it has any fields + if len(mentionsConfig) > 0 { + safeOutputsConfig["mentions"] = mentionsConfig + } + } + + // Add dispatch-workflow configuration + if data.SafeOutputs.DispatchWorkflow != nil { + dispatchWorkflowConfig := map[string]any{} + + // Include workflows list + if len(data.SafeOutputs.DispatchWorkflow.Workflows) > 0 { + dispatchWorkflowConfig["workflows"] = data.SafeOutputs.DispatchWorkflow.Workflows + } + + // Include workflow files mapping (file extension for each workflow) + if len(data.SafeOutputs.DispatchWorkflow.WorkflowFiles) > 0 { + dispatchWorkflowConfig["workflow_files"] = data.SafeOutputs.DispatchWorkflow.WorkflowFiles + } + + // Include max count + dispatchWorkflowConfig["max"] = resolveMaxForConfig(data.SafeOutputs.DispatchWorkflow.Max, 1) + + // Only add if it has fields + if len(dispatchWorkflowConfig) > 0 { + safeOutputsConfig["dispatch_workflow"] = dispatchWorkflowConfig + } + } + + // Add max-bot-mentions if set (templatable integer) + if data.SafeOutputs.MaxBotMentions != nil { + v := *data.SafeOutputs.MaxBotMentions + if n, err := strconv.Atoi(v); err == nil && n > 0 { + safeOutputsConfig["max_bot_mentions"] = n + } else if strings.HasPrefix(v, "${{") { + safeOutputsConfig["max_bot_mentions"] = v + } + } + + configJSON, _ := json.Marshal(safeOutputsConfig) + safeOutputsConfigLog.Printf("Safe outputs config generation complete: %d tool types configured", len(safeOutputsConfig)) + return string(configJSON) +} + +var safeOutputsConfigGenLog = logger.New("workflow:safe_outputs_config_generation_helpers") + +// ======================================== +// Safe Output Configuration Generation Helpers +// ======================================== +// +// This file contains helper functions to reduce duplication in safe output +// configuration generation. These helpers extract common patterns for: +// - Generating max value configs with defaults +// - Generating configs with allowed fields (labels, repos, etc.) +// - Generating configs with optional target fields +// +// The goal is to make generateSafeOutputsConfig more maintainable by +// extracting repetitive code patterns into reusable functions. + +// resolveMaxForConfig resolves a templatable max *string to a config value. +// For expression strings (e.g. "${{ inputs.max }}"), the expression is stored +// as-is so GitHub Actions can resolve it at runtime. +// For literal numeric strings, the parsed integer is used. +// Falls back to defaultMax if max is nil or zero. +func resolveMaxForConfig(max *string, defaultMax int) any { + if max != nil { + v := *max + if strings.HasPrefix(v, "${{") { + return v // expression: evaluated at runtime by GitHub Actions + } + if n := templatableIntValue(max); n > 0 { + return n + } + } + return defaultMax +} + +// generateMaxConfig creates a simple config map with just a max value +func generateMaxConfig(max *string, defaultMax int) map[string]any { + config := make(map[string]any) + config["max"] = resolveMaxForConfig(max, defaultMax) + return config +} + +// generateMaxWithAllowedLabelsConfig creates a config with max and optional allowed_labels +func generateMaxWithAllowedLabelsConfig(max *string, defaultMax int, allowedLabels []string) map[string]any { + config := generateMaxConfig(max, defaultMax) + if len(allowedLabels) > 0 { + config["allowed_labels"] = allowedLabels + } + return config +} + +// generateMaxWithTargetConfig creates a config with max and optional target field +func generateMaxWithTargetConfig(max *string, defaultMax int, target string) map[string]any { + config := make(map[string]any) + if target != "" { + config["target"] = target + } + config["max"] = resolveMaxForConfig(max, defaultMax) + return config +} + +// generateMaxWithAllowedConfig creates a config with max and optional allowed list +func generateMaxWithAllowedConfig(max *string, defaultMax int, allowed []string) map[string]any { + config := generateMaxConfig(max, defaultMax) + if len(allowed) > 0 { + config["allowed"] = allowed + } + return config +} + +// generateMaxWithAllowedAndBlockedConfig creates a config with max, optional allowed list, and optional blocked list +func generateMaxWithAllowedAndBlockedConfig(max *string, defaultMax int, allowed []string, blocked []string) map[string]any { + config := generateMaxConfig(max, defaultMax) + if len(allowed) > 0 { + config["allowed"] = allowed + } + if len(blocked) > 0 { + config["blocked"] = blocked + } + return config +} + +// generateMaxWithDiscussionFieldsConfig creates a config with discussion-specific filter fields +func generateMaxWithDiscussionFieldsConfig(max *string, defaultMax int, requiredCategory string, requiredLabels []string, requiredTitlePrefix string) map[string]any { + config := generateMaxConfig(max, defaultMax) + if requiredCategory != "" { + config["required_category"] = requiredCategory + } + if len(requiredLabels) > 0 { + config["required_labels"] = requiredLabels + } + if requiredTitlePrefix != "" { + config["required_title_prefix"] = requiredTitlePrefix + } + return config +} + +// generateMaxWithReviewersConfig creates a config with max and optional reviewers list +func generateMaxWithReviewersConfig(max *string, defaultMax int, reviewers []string) map[string]any { + config := generateMaxConfig(max, defaultMax) + if len(reviewers) > 0 { + config["reviewers"] = reviewers + } + return config +} + +// generateAssignToAgentConfig creates a config with optional max, default_agent, target, and allowed +func generateAssignToAgentConfig(max *string, defaultMax int, defaultAgent string, target string, allowed []string) map[string]any { + if safeOutputsConfigGenLog.Enabled() { + safeOutputsConfigGenLog.Printf("Generating assign-to-agent config: max=%v, defaultMax=%d, defaultAgent=%s, target=%s, allowed_count=%d", + max, defaultMax, defaultAgent, target, len(allowed)) + } + config := make(map[string]any) + config["max"] = resolveMaxForConfig(max, defaultMax) + if defaultAgent != "" { + config["default_agent"] = defaultAgent + } + if target != "" { + config["target"] = target + } + if len(allowed) > 0 { + config["allowed"] = allowed + } + return config +} + +// generatePullRequestConfig creates a config with all pull request fields including target-repo, +// allowed_repos, base_branch, draft, reviewers, title_prefix, fallback_as_issue, and more. +func generatePullRequestConfig(prConfig *CreatePullRequestsConfig, defaultMax int) map[string]any { + safeOutputsConfigGenLog.Printf("Generating pull request config: max=%v, allowEmpty=%v, autoMerge=%v, expires=%d, labels_count=%d, targetRepo=%s", + prConfig.Max, prConfig.AllowEmpty, prConfig.AutoMerge, prConfig.Expires, len(prConfig.AllowedLabels), prConfig.TargetRepoSlug) + + additionalFields := make(map[string]any) + if len(prConfig.AllowedLabels) > 0 { + additionalFields["allowed_labels"] = prConfig.AllowedLabels + } + // Pass allow_empty flag to MCP server so it can skip patch generation + if prConfig.AllowEmpty != nil && *prConfig.AllowEmpty == "true" { + additionalFields["allow_empty"] = true + } + // Pass auto_merge flag to enable auto-merge for the pull request + if prConfig.AutoMerge != nil && *prConfig.AutoMerge == "true" { + additionalFields["auto_merge"] = true + } + // Pass expires to configure pull request expiration + if prConfig.Expires > 0 { + additionalFields["expires"] = prConfig.Expires + } + // Pass base_branch to configure the base branch for the pull request + if prConfig.BaseBranch != "" { + additionalFields["base_branch"] = prConfig.BaseBranch + } + // Pass draft flag to create the pull request as a draft + if prConfig.Draft != nil && *prConfig.Draft == "true" { + additionalFields["draft"] = true + } + // Pass reviewers to assign reviewers to the pull request + if len(prConfig.Reviewers) > 0 { + additionalFields["reviewers"] = prConfig.Reviewers + } + // Pass title_prefix to prepend to pull request titles + if prConfig.TitlePrefix != "" { + additionalFields["title_prefix"] = prConfig.TitlePrefix + } + // Pass fallback_as_issue if explicitly configured + if prConfig.FallbackAsIssue != nil { + additionalFields["fallback_as_issue"] = *prConfig.FallbackAsIssue + } + + // Use generateTargetConfigWithRepos to include target-repo and allowed_repos + targetConfig := SafeOutputTargetConfig{ + TargetRepoSlug: prConfig.TargetRepoSlug, + AllowedRepos: prConfig.AllowedRepos, + } + return generateTargetConfigWithRepos(targetConfig, prConfig.Max, defaultMax, additionalFields) +} + +// generateHideCommentConfig creates a config with max and optional allowed_reasons +func generateHideCommentConfig(max *string, defaultMax int, allowedReasons []string) map[string]any { + config := generateMaxConfig(max, defaultMax) + if len(allowedReasons) > 0 { + config["allowed_reasons"] = allowedReasons + } + return config +} + +// generateTargetConfigWithRepos creates a config with target, target-repo, allowed_repos, and optional fields. +// Note on naming conventions: +// - "target-repo" uses hyphen to match frontmatter YAML format (key in config.json) +// - "allowed_repos" uses underscore to match JavaScript handler expectations (see repo_helpers.cjs) +// This inconsistency is intentional to maintain compatibility with existing handler code. +func generateTargetConfigWithRepos(targetConfig SafeOutputTargetConfig, max *string, defaultMax int, additionalFields map[string]any) map[string]any { + config := generateMaxConfig(max, defaultMax) + + // Add target if specified + if targetConfig.Target != "" { + config["target"] = targetConfig.Target + } + + // Add target-repo if specified (use hyphenated key for consistency with frontmatter) + if targetConfig.TargetRepoSlug != "" { + config["target-repo"] = targetConfig.TargetRepoSlug + } + + // Add allowed_repos if specified (use underscore for consistency with handler code) + if len(targetConfig.AllowedRepos) > 0 { + config["allowed_repos"] = targetConfig.AllowedRepos + } + + // Add any additional fields + maps.Copy(config, additionalFields) + + return config +} + +// ======================================== +// Safe Output Configuration Helpers +// ======================================== + +var safeOutputReflectionLog = logger.New("workflow:safe_outputs_config_helpers_reflection") + +// safeOutputFieldMapping maps struct field names to their tool names +var safeOutputFieldMapping = map[string]string{ + "CreateIssues": "create_issue", + "CreateAgentSessions": "create_agent_session", + "CreateDiscussions": "create_discussion", + "UpdateDiscussions": "update_discussion", + "CloseDiscussions": "close_discussion", + "CloseIssues": "close_issue", + "ClosePullRequests": "close_pull_request", + "AddComments": "add_comment", + "CreatePullRequests": "create_pull_request", + "CreatePullRequestReviewComments": "create_pull_request_review_comment", + "SubmitPullRequestReview": "submit_pull_request_review", + "ReplyToPullRequestReviewComment": "reply_to_pull_request_review_comment", + "ResolvePullRequestReviewThread": "resolve_pull_request_review_thread", + "CreateCodeScanningAlerts": "create_code_scanning_alert", + "AddLabels": "add_labels", + "RemoveLabels": "remove_labels", + "AddReviewer": "add_reviewer", + "AssignMilestone": "assign_milestone", + "AssignToAgent": "assign_to_agent", + "AssignToUser": "assign_to_user", + "UpdateIssues": "update_issue", + "UpdatePullRequests": "update_pull_request", + "PushToPullRequestBranch": "push_to_pull_request_branch", + "UploadAssets": "upload_asset", + "UpdateRelease": "update_release", + "UpdateProjects": "update_project", + "CreateProjects": "create_project", + "CreateProjectStatusUpdates": "create_project_status_update", + "LinkSubIssue": "link_sub_issue", + "HideComment": "hide_comment", + "DispatchWorkflow": "dispatch_workflow", + "MissingTool": "missing_tool", + "NoOp": "noop", + "MarkPullRequestAsReadyForReview": "mark_pull_request_as_ready_for_review", +} + +// hasAnySafeOutputEnabled uses reflection to check if any safe output field is non-nil +func hasAnySafeOutputEnabled(safeOutputs *SafeOutputsConfig) bool { + if safeOutputs == nil { + return false + } + + safeOutputReflectionLog.Print("Checking if any safe outputs are enabled using reflection") + + // Check Jobs separately as it's a map + if len(safeOutputs.Jobs) > 0 { + safeOutputReflectionLog.Printf("Found %d custom jobs enabled", len(safeOutputs.Jobs)) + return true + } + + // Use reflection to check all pointer fields + val := reflect.ValueOf(safeOutputs).Elem() + for fieldName := range safeOutputFieldMapping { + field := val.FieldByName(fieldName) + if field.IsValid() && !field.IsNil() { + safeOutputReflectionLog.Printf("Found enabled safe output field: %s", fieldName) + return true + } + } + + safeOutputReflectionLog.Print("No safe outputs enabled") + return false +} + +// getEnabledSafeOutputToolNamesReflection uses reflection to get enabled tool names +func getEnabledSafeOutputToolNamesReflection(safeOutputs *SafeOutputsConfig) []string { + if safeOutputs == nil { + return nil + } + + safeOutputReflectionLog.Print("Getting enabled safe output tool names using reflection") + var tools []string + + // Use reflection to check all pointer fields + val := reflect.ValueOf(safeOutputs).Elem() + for fieldName, toolName := range safeOutputFieldMapping { + field := val.FieldByName(fieldName) + if field.IsValid() && !field.IsNil() { + tools = append(tools, toolName) + } + } + + // Add custom job tools + for jobName := range safeOutputs.Jobs { + tools = append(tools, jobName) + safeOutputReflectionLog.Printf("Added custom job tool: %s", jobName) + } + + // Sort tools to ensure deterministic compilation + sort.Strings(tools) + + safeOutputReflectionLog.Printf("Found %d enabled safe output tools", len(tools)) + return tools +} + +// formatSafeOutputsRunsOn formats the runs-on value from SafeOutputsConfig for job output +func (c *Compiler) formatSafeOutputsRunsOn(safeOutputs *SafeOutputsConfig) string { + if safeOutputs == nil || safeOutputs.RunsOn == "" { + return "runs-on: " + constants.DefaultActivationJobRunnerImage + } + + return "runs-on: " + safeOutputs.RunsOn +} + +// formatDetectionRunsOn resolves the runner for the detection job using the following priority: +// 1. safe-outputs.detection.runs-on (detection-specific override) +// 2. agentRunsOn (the agent job's runner, passed by the caller) +func (c *Compiler) formatDetectionRunsOn(safeOutputs *SafeOutputsConfig, agentRunsOn string) string { + if safeOutputs != nil && safeOutputs.ThreatDetection != nil && safeOutputs.ThreatDetection.RunsOn != "" { + return "runs-on: " + safeOutputs.ThreatDetection.RunsOn + } + return agentRunsOn +} + +// builtinSafeOutputFields contains the struct field names for the built-in safe output types +// that are excluded from the "non-builtin" check. These are: noop, missing-data, missing-tool. +var builtinSafeOutputFields = map[string]bool{ + "NoOp": true, + "MissingData": true, + "MissingTool": true, +} + +// nonBuiltinSafeOutputFieldNames is a pre-computed list of field names from safeOutputFieldMapping +// that are not builtins, used by hasNonBuiltinSafeOutputsEnabled to avoid repeated map iterations. +var nonBuiltinSafeOutputFieldNames = func() []string { + var fields []string + for fieldName := range safeOutputFieldMapping { + if !builtinSafeOutputFields[fieldName] { + fields = append(fields, fieldName) + } + } + return fields +}() + +// hasNonBuiltinSafeOutputsEnabled checks if any non-builtin safe outputs are configured. +// The builtin types (noop, missing-data, missing-tool) are excluded from this check +// because they are always auto-enabled and do not represent a meaningful output action. +func hasNonBuiltinSafeOutputsEnabled(safeOutputs *SafeOutputsConfig) bool { + if safeOutputs == nil { + return false + } + + // Custom safe-jobs are always non-builtin + if len(safeOutputs.Jobs) > 0 { + return true + } + + // Check non-builtin pointer fields using the pre-computed list + val := reflect.ValueOf(safeOutputs).Elem() + for _, fieldName := range nonBuiltinSafeOutputFieldNames { + field := val.FieldByName(fieldName) + if field.IsValid() && !field.IsNil() { + return true + } + } + + return false +} + +// HasSafeOutputsEnabled checks if any safe-outputs are enabled +func HasSafeOutputsEnabled(safeOutputs *SafeOutputsConfig) bool { + enabled := hasAnySafeOutputEnabled(safeOutputs) + + if safeOutputsConfigLog.Enabled() { + safeOutputsConfigLog.Printf("Safe outputs enabled check: %v", enabled) + } + + return enabled +} + +// applyDefaultCreateIssue injects a default create-issues safe output when safe-outputs is configured +// but has no non-builtin output types. The injected config uses the workflow ID as the label +// and [workflowID] as the title prefix. The AutoInjectedCreateIssue flag is set so the prompt +// generator can add a specific instruction for the agent. +func applyDefaultCreateIssue(workflowData *WorkflowData) { + if workflowData.SafeOutputs == nil { + return + } + if hasNonBuiltinSafeOutputsEnabled(workflowData.SafeOutputs) { + return + } + + workflowID := workflowData.WorkflowID + safeOutputsConfigLog.Printf("Auto-injecting create-issues for workflow %q (no non-builtin safe outputs configured)", workflowID) + workflowData.SafeOutputs.CreateIssues = &CreateIssuesConfig{ + BaseSafeOutputConfig: BaseSafeOutputConfig{Max: defaultIntStr(1)}, + Labels: []string{workflowID}, + TitlePrefix: fmt.Sprintf("[%s]", workflowID), + } + workflowData.SafeOutputs.AutoInjectedCreateIssue = true +} + +// GetEnabledSafeOutputToolNames returns a list of enabled safe output tool names. +// NOTE: Tool names should NOT be included in agent prompts. The agent should query +// the MCP server to discover available tools. This function is used for generating +// the tools.json file that the MCP server provides, and for diagnostic logging. +func GetEnabledSafeOutputToolNames(safeOutputs *SafeOutputsConfig) []string { + tools := getEnabledSafeOutputToolNamesReflection(safeOutputs) + + if safeOutputsConfigLog.Enabled() { + safeOutputsConfigLog.Printf("Enabled safe output tools: %v", tools) + } + + return tools +} + +// usesPatchesAndCheckouts checks if the workflow uses safe outputs that require +// git patches and checkouts (create-pull-request or push-to-pull-request-branch) +func usesPatchesAndCheckouts(safeOutputs *SafeOutputsConfig) bool { + if safeOutputs == nil { + return false + } + return safeOutputs.CreatePullRequests != nil || safeOutputs.PushToPullRequestBranch != nil +} + +// ======================================== +// Safe Output Tools Generation +// ======================================== + +// checkAllEnabledToolsPresent verifies that every tool in enabledTools has a matching entry +// in filteredTools. This is a compiler error check: if a safe-output type is registered in +// Go code but its definition is missing from safe-output-tools.json, it will not appear in +// filteredTools and this function returns an error. +// +// Dispatch-workflow and custom-job tools are intentionally excluded from this check because +// they are generated dynamically and are never part of the static tools JSON. +func checkAllEnabledToolsPresent(enabledTools map[string]bool, filteredTools []map[string]any) error { + presentTools := make(map[string]bool, len(filteredTools)) + for _, tool := range filteredTools { + if name, ok := tool["name"].(string); ok { + presentTools[name] = true + } + } + + var missingTools []string + for toolName := range enabledTools { + if !presentTools[toolName] { + missingTools = append(missingTools, toolName) + } + } + + if len(missingTools) == 0 { + return nil + } + + sort.Strings(missingTools) + return fmt.Errorf("compiler error: safe-output tool(s) %v are registered but missing from safe-output-tools.json; please report this issue to the developer", missingTools) +} + +// generateFilteredToolsJSON filters the ALL_TOOLS array based on enabled safe outputs +// Returns a JSON string containing only the tools that are enabled in the workflow +func generateFilteredToolsJSON(data *WorkflowData, markdownPath string) (string, error) { + if data.SafeOutputs == nil { + return "[]", nil + } + + safeOutputsConfigLog.Print("Generating filtered tools JSON for workflow") + + // Load the full tools JSON + allToolsJSON := GetSafeOutputsToolsJSON() + + // Parse the JSON to get all tools + var allTools []map[string]any + if err := json.Unmarshal([]byte(allToolsJSON), &allTools); err != nil { + safeOutputsConfigLog.Printf("Failed to parse safe outputs tools JSON: %v", err) + return "", fmt.Errorf("failed to parse safe outputs tools JSON: %w", err) + } + + // Create a set of enabled tool names + enabledTools := make(map[string]bool) + + // Check which safe outputs are enabled and add their corresponding tool names + if data.SafeOutputs.CreateIssues != nil { + enabledTools["create_issue"] = true + } + if data.SafeOutputs.CreateAgentSessions != nil { + enabledTools["create_agent_session"] = true + } + if data.SafeOutputs.CreateDiscussions != nil { + enabledTools["create_discussion"] = true + } + if data.SafeOutputs.UpdateDiscussions != nil { + enabledTools["update_discussion"] = true + } + if data.SafeOutputs.CloseDiscussions != nil { + enabledTools["close_discussion"] = true + } + if data.SafeOutputs.CloseIssues != nil { + enabledTools["close_issue"] = true + } + if data.SafeOutputs.ClosePullRequests != nil { + enabledTools["close_pull_request"] = true + } + if data.SafeOutputs.MarkPullRequestAsReadyForReview != nil { + enabledTools["mark_pull_request_as_ready_for_review"] = true + } + if data.SafeOutputs.AddComments != nil { + enabledTools["add_comment"] = true + } + if data.SafeOutputs.CreatePullRequests != nil { + enabledTools["create_pull_request"] = true + } + if data.SafeOutputs.CreatePullRequestReviewComments != nil { + enabledTools["create_pull_request_review_comment"] = true + } + if data.SafeOutputs.SubmitPullRequestReview != nil { + enabledTools["submit_pull_request_review"] = true + } + if data.SafeOutputs.ReplyToPullRequestReviewComment != nil { + enabledTools["reply_to_pull_request_review_comment"] = true + } + if data.SafeOutputs.ResolvePullRequestReviewThread != nil { + enabledTools["resolve_pull_request_review_thread"] = true + } + if data.SafeOutputs.CreateCodeScanningAlerts != nil { + enabledTools["create_code_scanning_alert"] = true + } + if data.SafeOutputs.AutofixCodeScanningAlert != nil { + enabledTools["autofix_code_scanning_alert"] = true + } + if data.SafeOutputs.AddLabels != nil { + enabledTools["add_labels"] = true + } + if data.SafeOutputs.RemoveLabels != nil { + enabledTools["remove_labels"] = true + } + if data.SafeOutputs.AddReviewer != nil { + enabledTools["add_reviewer"] = true + } + if data.SafeOutputs.AssignMilestone != nil { + enabledTools["assign_milestone"] = true + } + if data.SafeOutputs.AssignToAgent != nil { + enabledTools["assign_to_agent"] = true + } + if data.SafeOutputs.AssignToUser != nil { + enabledTools["assign_to_user"] = true + } + if data.SafeOutputs.UnassignFromUser != nil { + enabledTools["unassign_from_user"] = true + } + if data.SafeOutputs.UpdateIssues != nil { + enabledTools["update_issue"] = true + } + if data.SafeOutputs.UpdatePullRequests != nil { + enabledTools["update_pull_request"] = true + } + if data.SafeOutputs.PushToPullRequestBranch != nil { + enabledTools["push_to_pull_request_branch"] = true + } + if data.SafeOutputs.UploadAssets != nil { + enabledTools["upload_asset"] = true + } + if data.SafeOutputs.MissingTool != nil { + enabledTools["missing_tool"] = true + } + if data.SafeOutputs.MissingData != nil { + enabledTools["missing_data"] = true + } + if data.SafeOutputs.UpdateRelease != nil { + enabledTools["update_release"] = true + } + if data.SafeOutputs.NoOp != nil { + enabledTools["noop"] = true + } + if data.SafeOutputs.LinkSubIssue != nil { + enabledTools["link_sub_issue"] = true + } + if data.SafeOutputs.HideComment != nil { + enabledTools["hide_comment"] = true + } + if data.SafeOutputs.SetIssueType != nil { + enabledTools["set_issue_type"] = true + } + if data.SafeOutputs.UpdateProjects != nil { + enabledTools["update_project"] = true + } + if data.SafeOutputs.CreateProjectStatusUpdates != nil { + enabledTools["create_project_status_update"] = true + } + if data.SafeOutputs.CreateProjects != nil { + enabledTools["create_project"] = true + } + // Note: dispatch_workflow tools are generated dynamically below, not from the static tools list + + // Filter tools to only include enabled ones and enhance descriptions + var filteredTools []map[string]any + for _, tool := range allTools { + toolName, ok := tool["name"].(string) + if !ok { + continue + } + if enabledTools[toolName] { + // Create a copy of the tool to avoid modifying the original + enhancedTool := make(map[string]any) + maps.Copy(enhancedTool, tool) + + // Enhance the description with configuration details + if description, ok := enhancedTool["description"].(string); ok { + enhancedDescription := enhanceToolDescription(toolName, description, data.SafeOutputs) + enhancedTool["description"] = enhancedDescription + } + + // Add repo parameter to inputSchema if allowed-repos has entries + addRepoParameterIfNeeded(enhancedTool, toolName, data.SafeOutputs) + + filteredTools = append(filteredTools, enhancedTool) + } + } + + // Verify all registered safe-outputs are present in the static tools JSON. + // Dispatch-workflow and custom-job tools are excluded because they are generated dynamically. + if err := checkAllEnabledToolsPresent(enabledTools, filteredTools); err != nil { + return "", err + } + + // Add custom job tools from SafeOutputs.Jobs + if len(data.SafeOutputs.Jobs) > 0 { + safeOutputsConfigLog.Printf("Adding %d custom job tools", len(data.SafeOutputs.Jobs)) + + // Sort job names for deterministic output + // This ensures compiled workflows have consistent tool ordering + jobNames := make([]string, 0, len(data.SafeOutputs.Jobs)) + for jobName := range data.SafeOutputs.Jobs { + jobNames = append(jobNames, jobName) + } + sort.Strings(jobNames) + + // Iterate over jobs in sorted order + for _, jobName := range jobNames { + jobConfig := data.SafeOutputs.Jobs[jobName] + // Normalize job name to use underscores for consistency + normalizedJobName := stringutil.NormalizeSafeOutputIdentifier(jobName) + + // Create the tool definition for this custom job + customTool := generateCustomJobToolDefinition(normalizedJobName, jobConfig) + filteredTools = append(filteredTools, customTool) + } + } + + if safeOutputsConfigLog.Enabled() { + safeOutputsConfigLog.Printf("Filtered %d tools from %d total tools (including %d custom jobs)", len(filteredTools), len(allTools), len(data.SafeOutputs.Jobs)) + } + + // Add dynamic dispatch_workflow tools + if data.SafeOutputs.DispatchWorkflow != nil && len(data.SafeOutputs.DispatchWorkflow.Workflows) > 0 { + safeOutputsConfigLog.Printf("Adding %d dispatch_workflow tools", len(data.SafeOutputs.DispatchWorkflow.Workflows)) + + // Initialize WorkflowFiles map if not already initialized + if data.SafeOutputs.DispatchWorkflow.WorkflowFiles == nil { + data.SafeOutputs.DispatchWorkflow.WorkflowFiles = make(map[string]string) + } + + for _, workflowName := range data.SafeOutputs.DispatchWorkflow.Workflows { + // Find the workflow file in multiple locations + fileResult, err := findWorkflowFile(workflowName, markdownPath) + if err != nil { + safeOutputsConfigLog.Printf("Warning: error finding workflow %s: %v", workflowName, err) + // Continue with empty inputs + tool := generateDispatchWorkflowTool(workflowName, make(map[string]any)) + filteredTools = append(filteredTools, tool) + continue + } + + // Determine which file to use - priority: .lock.yml > .yml + var workflowPath string + var extension string + if fileResult.lockExists { + workflowPath = fileResult.lockPath + extension = ".lock.yml" + } else if fileResult.ymlExists { + workflowPath = fileResult.ymlPath + extension = ".yml" + } else { + safeOutputsConfigLog.Printf("Warning: workflow file not found for %s (only .md exists, needs compilation)", workflowName) + // Continue with empty inputs + tool := generateDispatchWorkflowTool(workflowName, make(map[string]any)) + filteredTools = append(filteredTools, tool) + continue + } + + // Store the file extension for runtime use + data.SafeOutputs.DispatchWorkflow.WorkflowFiles[workflowName] = extension + + // Extract workflow_dispatch inputs + workflowInputs, err := extractWorkflowDispatchInputs(workflowPath) + if err != nil { + safeOutputsConfigLog.Printf("Warning: failed to extract inputs for workflow %s from %s: %v", workflowName, workflowPath, err) + // Continue with empty inputs + workflowInputs = make(map[string]any) + } + + // Generate tool schema + tool := generateDispatchWorkflowTool(workflowName, workflowInputs) + filteredTools = append(filteredTools, tool) + } + } + + // Marshal the filtered tools back to JSON with indentation for better readability + // and to reduce merge conflicts in generated lockfiles + filteredJSON, err := json.MarshalIndent(filteredTools, "", " ") + if err != nil { + safeOutputsConfigLog.Printf("Failed to marshal filtered tools: %v", err) + return "", fmt.Errorf("failed to marshal filtered tools: %w", err) + } + + safeOutputsConfigLog.Printf("Successfully generated filtered tools JSON with %d tools", len(filteredTools)) + return string(filteredJSON), nil +} + +// addRepoParameterIfNeeded adds a "repo" parameter to the tool's inputSchema +// if the safe output configuration has allowed-repos entries +func addRepoParameterIfNeeded(tool map[string]any, toolName string, safeOutputs *SafeOutputsConfig) { + if safeOutputs == nil { + return + } + + // Determine if this tool should have a repo parameter based on allowed-repos configuration + var hasAllowedRepos bool + var targetRepoSlug string + + switch toolName { + case "create_issue": + if config := safeOutputs.CreateIssues; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "create_discussion": + if config := safeOutputs.CreateDiscussions; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "add_comment": + if config := safeOutputs.AddComments; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "create_pull_request": + if config := safeOutputs.CreatePullRequests; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "create_pull_request_review_comment": + if config := safeOutputs.CreatePullRequestReviewComments; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "reply_to_pull_request_review_comment": + if config := safeOutputs.ReplyToPullRequestReviewComment; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "create_agent_session": + if config := safeOutputs.CreateAgentSessions; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "close_issue", "update_issue": + if config := safeOutputs.CloseIssues; config != nil && toolName == "close_issue" { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } else if config := safeOutputs.UpdateIssues; config != nil && toolName == "update_issue" { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "close_discussion", "update_discussion": + if config := safeOutputs.CloseDiscussions; config != nil && toolName == "close_discussion" { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } else if config := safeOutputs.UpdateDiscussions; config != nil && toolName == "update_discussion" { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "close_pull_request", "update_pull_request": + if config := safeOutputs.ClosePullRequests; config != nil && toolName == "close_pull_request" { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } else if config := safeOutputs.UpdatePullRequests; config != nil && toolName == "update_pull_request" { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "add_labels", "remove_labels", "hide_comment", "link_sub_issue", "mark_pull_request_as_ready_for_review", + "add_reviewer", "assign_milestone", "assign_to_agent", "assign_to_user", "unassign_from_user", + "set_issue_type": + // These use SafeOutputTargetConfig - check the appropriate config + switch toolName { + case "add_labels": + if config := safeOutputs.AddLabels; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "remove_labels": + if config := safeOutputs.RemoveLabels; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "hide_comment": + if config := safeOutputs.HideComment; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "link_sub_issue": + if config := safeOutputs.LinkSubIssue; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "mark_pull_request_as_ready_for_review": + if config := safeOutputs.MarkPullRequestAsReadyForReview; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "add_reviewer": + if config := safeOutputs.AddReviewer; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "assign_milestone": + if config := safeOutputs.AssignMilestone; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "assign_to_agent": + if config := safeOutputs.AssignToAgent; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "assign_to_user": + if config := safeOutputs.AssignToUser; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "unassign_from_user": + if config := safeOutputs.UnassignFromUser; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + case "set_issue_type": + if config := safeOutputs.SetIssueType; config != nil { + hasAllowedRepos = len(config.AllowedRepos) > 0 + targetRepoSlug = config.TargetRepoSlug + } + } + } + + // Only add repo parameter if allowed-repos has entries or target-repo is wildcard ("*") + if !hasAllowedRepos && targetRepoSlug != "*" { + return + } + + // Get the inputSchema + inputSchema, ok := tool["inputSchema"].(map[string]any) + if !ok { + return + } + + properties, ok := inputSchema["properties"].(map[string]any) + if !ok { + return + } + + // Build repo parameter description + var repoDescription string + if targetRepoSlug == "*" { + repoDescription = "Target repository for this operation in 'owner/repo' format. Any repository can be targeted." + } else if targetRepoSlug != "" { + repoDescription = fmt.Sprintf("Target repository for this operation in 'owner/repo' format. Default is %q. Must be the target-repo or in the allowed-repos list.", targetRepoSlug) + } else { + repoDescription = "Target repository for this operation in 'owner/repo' format. Must be the target-repo or in the allowed-repos list." + } + + // Add repo parameter to properties + properties["repo"] = map[string]any{ + "type": "string", + "description": repoDescription, + } + + safeOutputsConfigLog.Printf("Added repo parameter to tool: %s (has allowed-repos or wildcard target-repo)", toolName) +} + +// generateDispatchWorkflowTool generates an MCP tool definition for a specific workflow +// The tool will be named after the workflow and accept the workflow's defined inputs +func generateDispatchWorkflowTool(workflowName string, workflowInputs map[string]any) map[string]any { + // Normalize workflow name to use underscores for tool name + toolName := stringutil.NormalizeSafeOutputIdentifier(workflowName) + + // Build the description + description := fmt.Sprintf("Dispatch the '%s' workflow with workflow_dispatch trigger. This workflow must support workflow_dispatch and be in .github/workflows/ directory in the same repository.", workflowName) + + // Build input schema properties + properties := make(map[string]any) + required := []string{} // No required fields by default + + // Convert GitHub Actions workflow_dispatch inputs to MCP tool schema + for inputName, inputDef := range workflowInputs { + inputDefMap, ok := inputDef.(map[string]any) + if !ok { + continue + } + + // Extract input properties + inputType := "string" // Default type + inputDescription := fmt.Sprintf("Input parameter '%s' for workflow %s", inputName, workflowName) + inputRequired := false + + if desc, ok := inputDefMap["description"].(string); ok && desc != "" { + inputDescription = desc + } + + if req, ok := inputDefMap["required"].(bool); ok { + inputRequired = req + } + + // GitHub Actions workflow_dispatch supports: string, number, boolean, choice, environment + // Map these to JSON schema types + if typeStr, ok := inputDefMap["type"].(string); ok { + switch typeStr { + case "number": + inputType = "number" + case "boolean": + inputType = "boolean" + case "choice": + inputType = "string" + // Add enum if options are provided + if options, ok := inputDefMap["options"].([]any); ok && len(options) > 0 { + properties[inputName] = map[string]any{ + "type": inputType, + "description": inputDescription, + "enum": options, + } + if inputRequired { + required = append(required, inputName) + } + continue + } + case "environment": + inputType = "string" + } + } + + properties[inputName] = map[string]any{ + "type": inputType, + "description": inputDescription, + } + + // Add default value if provided + if defaultVal, ok := inputDefMap["default"]; ok { + properties[inputName].(map[string]any)["default"] = defaultVal + } + + if inputRequired { + required = append(required, inputName) + } + } + + // Add internal workflow_name parameter (hidden from description but used internally) + // This will be injected by the safe output handler + + // Build the complete tool definition + tool := map[string]any{ + "name": toolName, + "description": description, + "_workflow_name": workflowName, // Internal metadata for handler routing + "inputSchema": map[string]any{ + "type": "object", + "properties": properties, + "additionalProperties": false, + }, + } + + if len(required) > 0 { + sort.Strings(required) + tool["inputSchema"].(map[string]any)["required"] = required + } + + return tool +} diff --git a/pkg/workflow/safe_outputs_jobs.go b/pkg/workflow/safe_outputs_jobs.go index 69e8e60cfbd..224a904147f 100644 --- a/pkg/workflow/safe_outputs_jobs.go +++ b/pkg/workflow/safe_outputs_jobs.go @@ -1,6 +1,12 @@ package workflow import ( + "fmt" + "slices" + "strconv" + "strings" + + "github.com/github/gh-aw/pkg/constants" "github.com/github/gh-aw/pkg/logger" ) @@ -143,3 +149,883 @@ func (c *Compiler) buildSafeOutputJob(data *WorkflowData, config SafeOutputJobCo return job, nil } + +var safeOutputsPermissionsLog = logger.New("workflow:safe_outputs_permissions") + +// oidcVaultActions is the list of known GitHub Actions that require id-token: write +// to authenticate with secret vaults or cloud providers via OIDC (OpenID Connect). +// Inclusion criteria: actions that use the GitHub OIDC token to authenticate to +// external cloud providers or secret management systems. Add new entries when +// a well-known action is identified that exchanges an OIDC JWT for cloud credentials. +var oidcVaultActions = []string{ + "aws-actions/configure-aws-credentials", // AWS OIDC / Secrets Manager + "azure/login", // Azure Key Vault / OIDC + "google-github-actions/auth", // GCP Secret Manager / OIDC + "hashicorp/vault-action", // HashiCorp Vault + "cyberark/conjur-action", // CyberArk Conjur +} + +// stepsRequireIDToken returns true if any of the provided steps use a known +// OIDC/secret-vault action that requires the id-token: write permission. +func stepsRequireIDToken(steps []any) bool { + for _, step := range steps { + stepMap, ok := step.(map[string]any) + if !ok { + continue + } + uses, ok := stepMap["uses"].(string) + if !ok || uses == "" { + continue + } + // Strip the @version suffix before matching + actionRef, _, _ := strings.Cut(uses, "@") + if slices.Contains(oidcVaultActions, actionRef) { + return true + } + } + return false +} + +// ComputePermissionsForSafeOutputs computes the minimal required permissions +// based on the configured safe-outputs. This function is used by both the +// consolidated safe outputs job and the conclusion job to ensure they only +// request the permissions they actually need. +// +// This implements the principle of least privilege by only including +// permissions that are required by the configured safe outputs. +func ComputePermissionsForSafeOutputs(safeOutputs *SafeOutputsConfig) *Permissions { + if safeOutputs == nil { + safeOutputsPermissionsLog.Print("No safe outputs configured, returning empty permissions") + return NewPermissions() + } + + permissions := NewPermissions() + + // Merge permissions for all handler-managed types + if safeOutputs.CreateIssues != nil { + safeOutputsPermissionsLog.Print("Adding permissions for create-issue") + permissions.Merge(NewPermissionsContentsReadIssuesWrite()) + } + if safeOutputs.CreateDiscussions != nil { + safeOutputsPermissionsLog.Print("Adding permissions for create-discussion") + permissions.Merge(NewPermissionsContentsReadIssuesWriteDiscussionsWrite()) + } + if safeOutputs.AddComments != nil { + safeOutputsPermissionsLog.Print("Adding permissions for add-comment") + permissions.Merge(buildAddCommentPermissions(safeOutputs.AddComments)) + } + if safeOutputs.CloseIssues != nil { + safeOutputsPermissionsLog.Print("Adding permissions for close-issue") + permissions.Merge(NewPermissionsContentsReadIssuesWrite()) + } + if safeOutputs.CloseDiscussions != nil { + safeOutputsPermissionsLog.Print("Adding permissions for close-discussion") + permissions.Merge(NewPermissionsContentsReadDiscussionsWrite()) + } + if safeOutputs.AddLabels != nil { + safeOutputsPermissionsLog.Print("Adding permissions for add-labels") + permissions.Merge(NewPermissionsContentsReadIssuesWritePRWrite()) + } + if safeOutputs.RemoveLabels != nil { + safeOutputsPermissionsLog.Print("Adding permissions for remove-labels") + permissions.Merge(NewPermissionsContentsReadIssuesWritePRWrite()) + } + if safeOutputs.UpdateIssues != nil { + safeOutputsPermissionsLog.Print("Adding permissions for update-issue") + permissions.Merge(NewPermissionsContentsReadIssuesWrite()) + } + if safeOutputs.UpdateDiscussions != nil { + safeOutputsPermissionsLog.Print("Adding permissions for update-discussion") + permissions.Merge(NewPermissionsContentsReadDiscussionsWrite()) + } + if safeOutputs.LinkSubIssue != nil { + safeOutputsPermissionsLog.Print("Adding permissions for link-sub-issue") + permissions.Merge(NewPermissionsContentsReadIssuesWrite()) + } + if safeOutputs.UpdateRelease != nil { + safeOutputsPermissionsLog.Print("Adding permissions for update-release") + permissions.Merge(NewPermissionsContentsWrite()) + } + if safeOutputs.CreatePullRequestReviewComments != nil || safeOutputs.SubmitPullRequestReview != nil || + safeOutputs.ReplyToPullRequestReviewComment != nil || safeOutputs.ResolvePullRequestReviewThread != nil { + safeOutputsPermissionsLog.Print("Adding permissions for PR review operations") + permissions.Merge(NewPermissionsContentsReadPRWrite()) + } + if safeOutputs.CreatePullRequests != nil { + // Check fallback-as-issue setting to determine permissions + if getFallbackAsIssue(safeOutputs.CreatePullRequests) { + safeOutputsPermissionsLog.Print("Adding permissions for create-pull-request with fallback-as-issue") + permissions.Merge(NewPermissionsContentsWriteIssuesWritePRWrite()) + } else { + safeOutputsPermissionsLog.Print("Adding permissions for create-pull-request") + permissions.Merge(NewPermissionsContentsWritePRWrite()) + } + } + if safeOutputs.PushToPullRequestBranch != nil { + safeOutputsPermissionsLog.Print("Adding permissions for push-to-pull-request-branch") + permissions.Merge(NewPermissionsContentsWritePRWrite()) + } + if safeOutputs.UpdatePullRequests != nil { + safeOutputsPermissionsLog.Print("Adding permissions for update-pull-request") + permissions.Merge(NewPermissionsContentsReadPRWrite()) + } + if safeOutputs.ClosePullRequests != nil { + safeOutputsPermissionsLog.Print("Adding permissions for close-pull-request") + permissions.Merge(NewPermissionsContentsReadPRWrite()) + } + if safeOutputs.MarkPullRequestAsReadyForReview != nil { + safeOutputsPermissionsLog.Print("Adding permissions for mark-pull-request-as-ready-for-review") + permissions.Merge(NewPermissionsContentsReadPRWrite()) + } + if safeOutputs.HideComment != nil { + safeOutputsPermissionsLog.Print("Adding permissions for hide-comment") + // Check if discussions permission should be excluded (discussions: false) + // Default (nil or true) includes discussions:write for GitHub Apps with Discussions permission + // Note: Hiding comments (issue/PR/discussion) only needs issues:write, not pull_requests:write + if safeOutputs.HideComment.Discussions != nil && !*safeOutputs.HideComment.Discussions { + permissions.Merge(NewPermissionsContentsReadIssuesWrite()) + } else { + permissions.Merge(NewPermissionsContentsReadIssuesWriteDiscussionsWrite()) + } + } + if safeOutputs.DispatchWorkflow != nil { + safeOutputsPermissionsLog.Print("Adding permissions for dispatch-workflow") + permissions.Merge(NewPermissionsActionsWrite()) + } + // Project-related types + if safeOutputs.CreateProjects != nil { + safeOutputsPermissionsLog.Print("Adding permissions for create-project") + permissions.Merge(NewPermissionsContentsReadProjectsWrite()) + } + if safeOutputs.UpdateProjects != nil { + safeOutputsPermissionsLog.Print("Adding permissions for update-project") + permissions.Merge(NewPermissionsContentsReadProjectsWrite()) + } + if safeOutputs.CreateProjectStatusUpdates != nil { + safeOutputsPermissionsLog.Print("Adding permissions for create-project-status-update") + permissions.Merge(NewPermissionsContentsReadProjectsWrite()) + } + if safeOutputs.AssignToAgent != nil { + safeOutputsPermissionsLog.Print("Adding permissions for assign-to-agent") + permissions.Merge(NewPermissionsContentsReadIssuesWrite()) + } + if safeOutputs.CreateAgentSessions != nil { + safeOutputsPermissionsLog.Print("Adding permissions for create-agent-session") + permissions.Merge(NewPermissionsContentsReadIssuesWrite()) + } + if safeOutputs.CreateCodeScanningAlerts != nil { + safeOutputsPermissionsLog.Print("Adding permissions for create-code-scanning-alert") + permissions.Merge(NewPermissionsContentsReadSecurityEventsWrite()) + } + if safeOutputs.AutofixCodeScanningAlert != nil { + safeOutputsPermissionsLog.Print("Adding permissions for autofix-code-scanning-alert") + permissions.Merge(NewPermissionsContentsReadSecurityEventsWriteActionsRead()) + } + if safeOutputs.AssignToUser != nil { + safeOutputsPermissionsLog.Print("Adding permissions for assign-to-user") + permissions.Merge(NewPermissionsContentsReadIssuesWrite()) + } + if safeOutputs.UnassignFromUser != nil { + safeOutputsPermissionsLog.Print("Adding permissions for unassign-from-user") + permissions.Merge(NewPermissionsContentsReadIssuesWrite()) + } + if safeOutputs.AssignMilestone != nil { + safeOutputsPermissionsLog.Print("Adding permissions for assign-milestone") + permissions.Merge(NewPermissionsContentsReadIssuesWrite()) + } + if safeOutputs.SetIssueType != nil { + safeOutputsPermissionsLog.Print("Adding permissions for set-issue-type") + permissions.Merge(NewPermissionsContentsReadIssuesWrite()) + } + if safeOutputs.AddReviewer != nil { + safeOutputsPermissionsLog.Print("Adding permissions for add-reviewer") + permissions.Merge(NewPermissionsContentsReadPRWrite()) + } + if safeOutputs.UploadAssets != nil { + safeOutputsPermissionsLog.Print("Adding permissions for upload-asset") + permissions.Merge(NewPermissionsContentsWrite()) + } + + // NoOp and MissingTool don't require write permissions beyond what's already included + // They only need to comment if add-comment is already configured + + // Handle id-token permission for OIDC/secret vault actions in user-provided steps. + // Explicit "none" disables auto-detection; explicit "write" always adds it; + // otherwise auto-detect from the steps list. + if safeOutputs.IDToken != nil && *safeOutputs.IDToken == "none" { + safeOutputsPermissionsLog.Print("id-token permission explicitly disabled (none)") + } else if safeOutputs.IDToken != nil && *safeOutputs.IDToken == "write" { + safeOutputsPermissionsLog.Print("id-token: write explicitly requested") + permissions.Set(PermissionIdToken, PermissionWrite) + } else if stepsRequireIDToken(safeOutputs.Steps) { + safeOutputsPermissionsLog.Print("Auto-detected OIDC/vault action in steps; adding id-token: write") + permissions.Set(PermissionIdToken, PermissionWrite) + } + + safeOutputsPermissionsLog.Printf("Computed permissions with %d scopes", len(permissions.permissions)) + return permissions +} + +// SafeOutputsConfigFromKeys builds a minimal SafeOutputsConfig from a list of safe-output +// key names (e.g. "create-issue", "add-comment"). Only the fields needed for permission +// computation are populated. This is used by external callers (e.g. the interactive wizard) +// that want to call ComputePermissionsForSafeOutputs without constructing a full config. +func SafeOutputsConfigFromKeys(keys []string) *SafeOutputsConfig { + config := &SafeOutputsConfig{} + for _, key := range keys { + switch key { + case "create-issue": + config.CreateIssues = &CreateIssuesConfig{} + case "create-agent-session": + config.CreateAgentSessions = &CreateAgentSessionConfig{} + case "create-discussion": + config.CreateDiscussions = &CreateDiscussionsConfig{} + case "update-discussion": + config.UpdateDiscussions = &UpdateDiscussionsConfig{} + case "close-discussion": + config.CloseDiscussions = &CloseDiscussionsConfig{} + case "add-comment": + config.AddComments = &AddCommentsConfig{} + case "close-issue": + config.CloseIssues = &CloseIssuesConfig{} + case "close-pull-request": + config.ClosePullRequests = &ClosePullRequestsConfig{} + case "create-pull-request": + config.CreatePullRequests = &CreatePullRequestsConfig{} + case "create-pull-request-review-comment": + config.CreatePullRequestReviewComments = &CreatePullRequestReviewCommentsConfig{} + case "submit-pull-request-review": + config.SubmitPullRequestReview = &SubmitPullRequestReviewConfig{} + case "reply-to-pull-request-review-comment": + config.ReplyToPullRequestReviewComment = &ReplyToPullRequestReviewCommentConfig{} + case "resolve-pull-request-review-thread": + config.ResolvePullRequestReviewThread = &ResolvePullRequestReviewThreadConfig{} + case "create-code-scanning-alert": + config.CreateCodeScanningAlerts = &CreateCodeScanningAlertsConfig{} + case "autofix-code-scanning-alert": + config.AutofixCodeScanningAlert = &AutofixCodeScanningAlertConfig{} + case "add-labels": + config.AddLabels = &AddLabelsConfig{} + case "remove-labels": + config.RemoveLabels = &RemoveLabelsConfig{} + case "add-reviewer": + config.AddReviewer = &AddReviewerConfig{} + case "assign-milestone": + config.AssignMilestone = &AssignMilestoneConfig{} + case "assign-to-agent": + config.AssignToAgent = &AssignToAgentConfig{} + case "assign-to-user": + config.AssignToUser = &AssignToUserConfig{} + case "unassign-from-user": + config.UnassignFromUser = &UnassignFromUserConfig{} + case "update-issue": + config.UpdateIssues = &UpdateIssuesConfig{} + case "update-pull-request": + config.UpdatePullRequests = &UpdatePullRequestsConfig{} + case "push-to-pull-request-branch": + config.PushToPullRequestBranch = &PushToPullRequestBranchConfig{} + case "upload-asset": + config.UploadAssets = &UploadAssetsConfig{} + case "update-release": + config.UpdateRelease = &UpdateReleaseConfig{} + case "hide-comment": + config.HideComment = &HideCommentConfig{} + case "link-sub-issue": + config.LinkSubIssue = &LinkSubIssueConfig{} + case "update-project": + config.UpdateProjects = &UpdateProjectConfig{} + case "create-project": + config.CreateProjects = &CreateProjectsConfig{} + case "create-project-status-update": + config.CreateProjectStatusUpdates = &CreateProjectStatusUpdateConfig{} + case "mark-pull-request-as-ready-for-review": + config.MarkPullRequestAsReadyForReview = &MarkPullRequestAsReadyForReviewConfig{} + } + } + return config +} + +var safeOutputsStepsLog = logger.New("workflow:safe_outputs_steps") + +// ======================================== +// Safe Output Step Builders +// ======================================== + +// buildCustomActionStep creates a step that uses a custom action reference +// instead of inline JavaScript via actions/github-script +func (c *Compiler) buildCustomActionStep(data *WorkflowData, config GitHubScriptStepConfig, scriptName string) []string { + safeOutputsStepsLog.Printf("Building custom action step: %s (scriptName=%s, actionMode=%s)", config.StepName, scriptName, c.actionMode) + + var steps []string + + // Get the action path from the script registry + actionPath := DefaultScriptRegistry.GetActionPath(scriptName) + if actionPath == "" { + safeOutputsStepsLog.Printf("WARNING: No action path found for script %s, falling back to inline mode", scriptName) + // Set ScriptFile for inline mode fallback + config.ScriptFile = scriptName + ".cjs" + return c.buildGitHubScriptStep(data, config) + } + + // Resolve the action reference based on mode + actionRef := c.resolveActionReference(actionPath, data) + if actionRef == "" { + safeOutputsStepsLog.Printf("WARNING: Could not resolve action reference for %s, falling back to inline mode", actionPath) + // Set ScriptFile for inline mode fallback + config.ScriptFile = scriptName + ".cjs" + return c.buildGitHubScriptStep(data, config) + } + + // Add artifact download steps before the custom action step + steps = append(steps, buildAgentOutputDownloadSteps()...) + + // Step name and metadata + steps = append(steps, fmt.Sprintf(" - name: %s\n", config.StepName)) + steps = append(steps, fmt.Sprintf(" id: %s\n", config.StepID)) + steps = append(steps, fmt.Sprintf(" uses: %s\n", actionRef)) + + // Environment variables section + steps = append(steps, " env:\n") + steps = append(steps, " GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}\n") + steps = append(steps, config.CustomEnvVars...) + c.addCustomSafeOutputEnvVars(&steps, data) + + // With section for inputs (replaces github-token in actions/github-script) + steps = append(steps, " with:\n") + + // Map github-token to token input for custom actions + c.addCustomActionGitHubToken(&steps, data, config) + + return steps +} + +// addCustomActionGitHubToken adds a GitHub token as action input. +// The token precedence depends on the tokenType flags: +// - UseCopilotCodingAgentToken: customToken > SafeOutputs.GitHubToken > GH_AW_AGENT_TOKEN || GH_AW_GITHUB_TOKEN || GITHUB_TOKEN +// - UseCopilotRequestsToken: customToken > SafeOutputs.GitHubToken > COPILOT_GITHUB_TOKEN +// - Default: customToken > SafeOutputs.GitHubToken > GH_AW_GITHUB_TOKEN || GITHUB_TOKEN +func (c *Compiler) addCustomActionGitHubToken(steps *[]string, data *WorkflowData, config GitHubScriptStepConfig) { + var token string + + // Get safe-outputs level token + var safeOutputsToken string + if data.SafeOutputs != nil { + safeOutputsToken = data.SafeOutputs.GitHubToken + } + + // Choose the first non-empty custom token for precedence + effectiveCustomToken := config.CustomToken + if effectiveCustomToken == "" { + effectiveCustomToken = safeOutputsToken + } + + // Agent token mode: use full precedence chain for agent assignment + if config.UseCopilotCodingAgentToken { + token = getEffectiveCopilotCodingAgentGitHubToken(effectiveCustomToken) + } else if config.UseCopilotRequestsToken { + // Copilot mode: use getEffectiveCopilotRequestsToken with safe-outputs token precedence + token = getEffectiveCopilotRequestsToken(effectiveCustomToken) + } else { + // Standard mode: use safe output token chain + token = getEffectiveSafeOutputGitHubToken(effectiveCustomToken) + } + + *steps = append(*steps, fmt.Sprintf(" token: %s\n", token)) +} + +// GitHubScriptStepConfig holds configuration for building a GitHub Script step +type GitHubScriptStepConfig struct { + // Step metadata + StepName string // e.g., "Create Output Issue" + StepID string // e.g., "create_issue" + + // Main job reference for agent output + MainJobName string + + // Environment variables specific to this safe output type + // These are added after GH_AW_AGENT_OUTPUT + CustomEnvVars []string + + // JavaScript script constant to format and include (for inline mode) + Script string + + // ScriptFile is the .cjs filename to require (e.g., "noop.cjs") + // If empty, Script will be inlined instead + ScriptFile string + + // CustomToken configuration (passed to addSafeOutputGitHubTokenForConfig or addSafeOutputCopilotGitHubTokenForConfig) + CustomToken string + + // UseCopilotRequestsToken indicates whether to use the Copilot token preference chain + // custom token > COPILOT_GITHUB_TOKEN + // This should be true for Copilot-related operations like creating agent tasks, + // assigning copilot to issues, or adding copilot as PR reviewer + UseCopilotRequestsToken bool + + // UseCopilotCodingAgentToken indicates whether to use the agent token preference chain + // (config token > GH_AW_AGENT_TOKEN) + // This should be true for agent assignment operations (assign-to-agent) + UseCopilotCodingAgentToken bool +} + +// buildGitHubScriptStep creates a GitHub Script step with common scaffolding +// This extracts the repeated pattern found across safe output job builders +func (c *Compiler) buildGitHubScriptStep(data *WorkflowData, config GitHubScriptStepConfig) []string { + safeOutputsStepsLog.Printf("Building GitHub Script step: %s (useCopilotRequestsToken=%v, useCopilotCodingAgentToken=%v)", config.StepName, config.UseCopilotRequestsToken, config.UseCopilotCodingAgentToken) + + var steps []string + + // Add artifact download steps before the GitHub Script step + steps = append(steps, buildAgentOutputDownloadSteps()...) + + // Step name and metadata + steps = append(steps, fmt.Sprintf(" - name: %s\n", config.StepName)) + steps = append(steps, fmt.Sprintf(" id: %s\n", config.StepID)) + steps = append(steps, fmt.Sprintf(" uses: %s\n", GetActionPin("actions/github-script"))) + + // Environment variables section + steps = append(steps, " env:\n") + + // Read GH_AW_AGENT_OUTPUT from environment (set by artifact download step) + // instead of directly from job outputs which may be masked by GitHub Actions + steps = append(steps, " GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}\n") + + // Add custom environment variables specific to this safe output type + steps = append(steps, config.CustomEnvVars...) + + // Add custom environment variables from safe-outputs.env + c.addCustomSafeOutputEnvVars(&steps, data) + + // With section for github-token + steps = append(steps, " with:\n") + if config.UseCopilotCodingAgentToken { + c.addSafeOutputAgentGitHubTokenForConfig(&steps, data, config.CustomToken) + } else if config.UseCopilotRequestsToken { + c.addSafeOutputCopilotGitHubTokenForConfig(&steps, data, config.CustomToken) + } else { + c.addSafeOutputGitHubTokenForConfig(&steps, data, config.CustomToken) + } + + steps = append(steps, " script: |\n") + + // Use require() if ScriptFile is specified, otherwise inline the script + if config.ScriptFile != "" { + steps = append(steps, " const { setupGlobals } = require('"+SetupActionDestination+"/setup_globals.cjs');\n") + steps = append(steps, " setupGlobals(core, github, context, exec, io);\n") + steps = append(steps, fmt.Sprintf(" const { main } = require('"+SetupActionDestination+"/%s');\n", config.ScriptFile)) + steps = append(steps, " await main();\n") + } else { + // Add the formatted JavaScript script (inline) + formattedScript := FormatJavaScriptForYAML(config.Script) + steps = append(steps, formattedScript...) + } + + return steps +} + +// buildGitHubScriptStepWithoutDownload creates a GitHub Script step without artifact download steps +// This is useful when multiple script steps are needed in the same job and artifact downloads +// should only happen once at the beginning +func (c *Compiler) buildGitHubScriptStepWithoutDownload(data *WorkflowData, config GitHubScriptStepConfig) []string { + safeOutputsStepsLog.Printf("Building GitHub Script step without download: %s", config.StepName) + + var steps []string + + // Step name and metadata (no artifact download steps) + steps = append(steps, fmt.Sprintf(" - name: %s\n", config.StepName)) + steps = append(steps, fmt.Sprintf(" id: %s\n", config.StepID)) + steps = append(steps, fmt.Sprintf(" uses: %s\n", GetActionPin("actions/github-script"))) + + // Environment variables section + steps = append(steps, " env:\n") + + // Read GH_AW_AGENT_OUTPUT from environment (set by artifact download step) + // instead of directly from job outputs which may be masked by GitHub Actions + steps = append(steps, " GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}\n") + + // Add custom environment variables specific to this safe output type + steps = append(steps, config.CustomEnvVars...) + + // Add custom environment variables from safe-outputs.env + c.addCustomSafeOutputEnvVars(&steps, data) + + // With section for github-token + steps = append(steps, " with:\n") + if config.UseCopilotCodingAgentToken { + c.addSafeOutputAgentGitHubTokenForConfig(&steps, data, config.CustomToken) + } else if config.UseCopilotRequestsToken { + c.addSafeOutputCopilotGitHubTokenForConfig(&steps, data, config.CustomToken) + } else { + c.addSafeOutputGitHubTokenForConfig(&steps, data, config.CustomToken) + } + + steps = append(steps, " script: |\n") + + // Use require() if ScriptFile is specified, otherwise inline the script + if config.ScriptFile != "" { + steps = append(steps, " const { setupGlobals } = require('"+SetupActionDestination+"/setup_globals.cjs');\n") + steps = append(steps, " setupGlobals(core, github, context, exec, io);\n") + steps = append(steps, fmt.Sprintf(" const { main } = require('"+SetupActionDestination+"/%s');\n", config.ScriptFile)) + steps = append(steps, " await main();\n") + } else { + // Add the formatted JavaScript script (inline) + formattedScript := FormatJavaScriptForYAML(config.Script) + steps = append(steps, formattedScript...) + } + + return steps +} + +// buildAgentOutputDownloadSteps creates steps to download the agent output artifact +// and set the GH_AW_AGENT_OUTPUT environment variable for safe-output jobs. +// GH_AW_AGENT_OUTPUT is only set when the artifact was actually downloaded successfully. +func buildAgentOutputDownloadSteps() []string { + return buildArtifactDownloadSteps(ArtifactDownloadConfig{ + ArtifactName: "agent-output", // Use hyphenated name without extension + ArtifactFilename: constants.AgentOutputFilename, // Filename inside the artifact directory + DownloadPath: "/tmp/gh-aw/safeoutputs/", + SetupEnvStep: true, + EnvVarName: "GH_AW_AGENT_OUTPUT", + StepName: "Download agent output artifact", + StepID: "download-agent-output", + }) +} + +var safeOutputsEnvLog = logger.New("workflow:safe_outputs_env") + +// ======================================== +// Safe Output Environment Variables +// ======================================== + +// applySafeOutputEnvToMap adds safe-output related environment variables to an env map +// This extracts the duplicated safe-output env setup logic across all engines (copilot, codex, claude, custom) +func applySafeOutputEnvToMap(env map[string]string, data *WorkflowData) { + if data.SafeOutputs == nil { + return + } + + safeOutputsEnvLog.Printf("Applying safe output env vars: trial_mode=%t, staged=%t", data.TrialMode, data.SafeOutputs.Staged) + + env["GH_AW_SAFE_OUTPUTS"] = "${{ env.GH_AW_SAFE_OUTPUTS }}" + + // Add staged flag if specified + if data.TrialMode || data.SafeOutputs.Staged { + env["GH_AW_SAFE_OUTPUTS_STAGED"] = "true" + } + if data.TrialMode && data.TrialLogicalRepo != "" { + env["GH_AW_TARGET_REPO_SLUG"] = data.TrialLogicalRepo + } + + // Add branch name if upload assets is configured + if data.SafeOutputs.UploadAssets != nil { + safeOutputsEnvLog.Printf("Adding upload assets env vars: branch=%s", data.SafeOutputs.UploadAssets.BranchName) + env["GH_AW_ASSETS_BRANCH"] = fmt.Sprintf("%q", data.SafeOutputs.UploadAssets.BranchName) + env["GH_AW_ASSETS_MAX_SIZE_KB"] = strconv.Itoa(data.SafeOutputs.UploadAssets.MaxSizeKB) + env["GH_AW_ASSETS_ALLOWED_EXTS"] = fmt.Sprintf("%q", strings.Join(data.SafeOutputs.UploadAssets.AllowedExts, ",")) + } +} + +// applySafeOutputEnvToSlice adds safe-output related environment variables to a YAML string slice +// This is for engines that build YAML line-by-line (like Claude) +func applySafeOutputEnvToSlice(stepLines *[]string, workflowData *WorkflowData) { + if workflowData.SafeOutputs == nil { + return + } + + *stepLines = append(*stepLines, " GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}") + + // Add staged flag if specified + if workflowData.TrialMode || workflowData.SafeOutputs.Staged { + *stepLines = append(*stepLines, " GH_AW_SAFE_OUTPUTS_STAGED: \"true\"") + } + if workflowData.TrialMode && workflowData.TrialLogicalRepo != "" { + *stepLines = append(*stepLines, fmt.Sprintf(" GH_AW_TARGET_REPO_SLUG: %q", workflowData.TrialLogicalRepo)) + } + + // Add branch name if upload assets is configured + if workflowData.SafeOutputs.UploadAssets != nil { + *stepLines = append(*stepLines, fmt.Sprintf(" GH_AW_ASSETS_BRANCH: %q", workflowData.SafeOutputs.UploadAssets.BranchName)) + *stepLines = append(*stepLines, fmt.Sprintf(" GH_AW_ASSETS_MAX_SIZE_KB: %d", workflowData.SafeOutputs.UploadAssets.MaxSizeKB)) + *stepLines = append(*stepLines, fmt.Sprintf(" GH_AW_ASSETS_ALLOWED_EXTS: %q", strings.Join(workflowData.SafeOutputs.UploadAssets.AllowedExts, ","))) + } +} + +// buildWorkflowMetadataEnvVars builds workflow name and source environment variables +// This extracts the duplicated workflow metadata setup logic from safe-output job builders +func buildWorkflowMetadataEnvVars(workflowName string, workflowSource string) []string { + var customEnvVars []string + + // Add workflow name + customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_WORKFLOW_NAME: %q\n", workflowName)) + + // Add workflow source and source URL if present + if workflowSource != "" { + customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_WORKFLOW_SOURCE: %q\n", workflowSource)) + sourceURL := buildSourceURL(workflowSource) + if sourceURL != "" { + customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_WORKFLOW_SOURCE_URL: %q\n", sourceURL)) + } + } + + return customEnvVars +} + +// buildWorkflowMetadataEnvVarsWithTrackerID builds workflow metadata env vars including tracker-id +func buildWorkflowMetadataEnvVarsWithTrackerID(workflowName string, workflowSource string, trackerID string) []string { + customEnvVars := buildWorkflowMetadataEnvVars(workflowName, workflowSource) + + // Add tracker-id if present + if trackerID != "" { + customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_TRACKER_ID: %q\n", trackerID)) + } + + return customEnvVars +} + +// buildSafeOutputJobEnvVars builds environment variables for safe-output jobs with staged/target repo handling +// This extracts the duplicated env setup logic in safe-output job builders (create_issue, add_comment, etc.) +func buildSafeOutputJobEnvVars(trialMode bool, trialLogicalRepoSlug string, staged bool, targetRepoSlug string) []string { + var customEnvVars []string + + // Pass the staged flag if it's set to true + if trialMode || staged { + safeOutputsEnvLog.Printf("Setting staged flag: trial_mode=%t, staged=%t", trialMode, staged) + customEnvVars = append(customEnvVars, " GH_AW_SAFE_OUTPUTS_STAGED: \"true\"\n") + } + + // Set GH_AW_TARGET_REPO_SLUG - prefer target-repo config over trial target repo + if targetRepoSlug != "" { + safeOutputsEnvLog.Printf("Setting target repo slug from config: %s", targetRepoSlug) + customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_TARGET_REPO_SLUG: %q\n", targetRepoSlug)) + } else if trialMode && trialLogicalRepoSlug != "" { + safeOutputsEnvLog.Printf("Setting target repo slug from trial mode: %s", trialLogicalRepoSlug) + customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_TARGET_REPO_SLUG: %q\n", trialLogicalRepoSlug)) + } + + return customEnvVars +} + +// buildStandardSafeOutputEnvVars builds the standard set of environment variables +// that all safe-output job builders need: metadata + staged/target repo handling +// This reduces duplication in safe-output job builders +func (c *Compiler) buildStandardSafeOutputEnvVars(data *WorkflowData, targetRepoSlug string) []string { + var customEnvVars []string + + // Add workflow metadata (name, source, and tracker-id) + customEnvVars = append(customEnvVars, buildWorkflowMetadataEnvVarsWithTrackerID(data.Name, data.Source, data.TrackerID)...) + + // Add engine metadata (id, version, model) for XML comment marker + customEnvVars = append(customEnvVars, buildEngineMetadataEnvVars(data.EngineConfig)...) + + // Add common safe output job environment variables (staged/target repo) + customEnvVars = append(customEnvVars, buildSafeOutputJobEnvVars( + c.trialMode, + c.trialLogicalRepoSlug, + data.SafeOutputs.Staged, + targetRepoSlug, + )...) + + // Add messages config if present + if data.SafeOutputs.Messages != nil { + messagesJSON, err := serializeMessagesConfig(data.SafeOutputs.Messages) + if err != nil { + safeOutputsEnvLog.Printf("Warning: failed to serialize messages config: %v", err) + } else if messagesJSON != "" { + customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_SAFE_OUTPUT_MESSAGES: %q\n", messagesJSON)) + } + } + + return customEnvVars +} + +// buildStepLevelSafeOutputEnvVars builds environment variables for consolidated safe output steps +// This excludes variables that are already set at the job level in consolidated jobs +func (c *Compiler) buildStepLevelSafeOutputEnvVars(data *WorkflowData, targetRepoSlug string) []string { + var customEnvVars []string + + // Only add target repo slug if it's different from the job-level setting + // (i.e., this step has a specific target-repo config that overrides the global trial mode target) + if targetRepoSlug != "" { + // Step-specific target repo overrides job-level setting + customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_TARGET_REPO_SLUG: %q\n", targetRepoSlug)) + } else if !c.trialMode && data.SafeOutputs.Staged { + // Step needs staged flag but there's no job-level target repo (not in trial mode) + // Job level only sets this if trialMode is true + customEnvVars = append(customEnvVars, " GH_AW_SAFE_OUTPUTS_STAGED: \"true\"\n") + } + + // Note: The following are now set at job level and should NOT be included here: + // - GH_AW_WORKFLOW_NAME + // - GH_AW_WORKFLOW_SOURCE + // - GH_AW_WORKFLOW_SOURCE_URL + // - GH_AW_TRACKER_ID + // - GH_AW_ENGINE_ID + // - GH_AW_ENGINE_VERSION + // - GH_AW_ENGINE_MODEL + // - GH_AW_SAFE_OUTPUTS_STAGED (if in trial mode) + // - GH_AW_TARGET_REPO_SLUG (if in trial mode and no step override) + // - GH_AW_SAFE_OUTPUT_MESSAGES + + return customEnvVars +} + +// buildEngineMetadataEnvVars builds engine metadata environment variables (id, version, model) +// These are used by the JavaScript footer generation to create XML comment markers for traceability +func buildEngineMetadataEnvVars(engineConfig *EngineConfig) []string { + var customEnvVars []string + + if engineConfig == nil { + return customEnvVars + } + + safeOutputsEnvLog.Printf("Building engine metadata env vars: id=%s, version=%s", engineConfig.ID, engineConfig.Version) + + // Add engine ID if present + if engineConfig.ID != "" { + customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_ENGINE_ID: %q\n", engineConfig.ID)) + } + + // Add engine version if present + if engineConfig.Version != "" { + customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_ENGINE_VERSION: %q\n", engineConfig.Version)) + } + + // Add engine model if present + if engineConfig.Model != "" { + customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_ENGINE_MODEL: %q\n", engineConfig.Model)) + } + + return customEnvVars +} + +// ======================================== +// Safe Output Environment Helpers +// ======================================== + +// addCustomSafeOutputEnvVars adds custom environment variables to safe output job steps +func (c *Compiler) addCustomSafeOutputEnvVars(steps *[]string, data *WorkflowData) { + if data.SafeOutputs != nil && len(data.SafeOutputs.Env) > 0 { + for key, value := range data.SafeOutputs.Env { + *steps = append(*steps, fmt.Sprintf(" %s: %s\n", key, value)) + } + } +} + +// addSafeOutputGitHubTokenForConfig adds github-token to the with section, preferring per-config token over global +// Uses precedence: config token > safe-outputs global github-token > GH_AW_GITHUB_TOKEN || GITHUB_TOKEN +func (c *Compiler) addSafeOutputGitHubTokenForConfig(steps *[]string, data *WorkflowData, configToken string) { + var safeOutputsToken string + if data.SafeOutputs != nil { + safeOutputsToken = data.SafeOutputs.GitHubToken + } + + // If app is configured, use app token + if data.SafeOutputs != nil && data.SafeOutputs.GitHubApp != nil { + *steps = append(*steps, " github-token: ${{ steps.safe-outputs-app-token.outputs.token }}\n") + return + } + + // Choose the first non-empty custom token for precedence + effectiveCustomToken := configToken + if effectiveCustomToken == "" { + effectiveCustomToken = safeOutputsToken + } + + // Get effective token + effectiveToken := getEffectiveSafeOutputGitHubToken(effectiveCustomToken) + *steps = append(*steps, fmt.Sprintf(" github-token: %s\n", effectiveToken)) +} + +// addSafeOutputCopilotGitHubTokenForConfig adds github-token to the with section for Copilot-related operations +// Uses precedence: config token > safe-outputs global github-token > COPILOT_GITHUB_TOKEN +func (c *Compiler) addSafeOutputCopilotGitHubTokenForConfig(steps *[]string, data *WorkflowData, configToken string) { + var safeOutputsToken string + if data.SafeOutputs != nil { + safeOutputsToken = data.SafeOutputs.GitHubToken + } + + // If app is configured, use app token + if data.SafeOutputs != nil && data.SafeOutputs.GitHubApp != nil { + *steps = append(*steps, " github-token: ${{ steps.safe-outputs-app-token.outputs.token }}\n") + return + } + + // Choose the first non-empty custom token for precedence + effectiveCustomToken := configToken + if effectiveCustomToken == "" { + effectiveCustomToken = safeOutputsToken + } + + // Get effective token + effectiveToken := getEffectiveCopilotRequestsToken(effectiveCustomToken) + *steps = append(*steps, fmt.Sprintf(" github-token: %s\n", effectiveToken)) +} + +// addSafeOutputAgentGitHubTokenForConfig adds github-token to the with section for agent assignment operations +// Uses precedence: config token > safe-outputs token > GH_AW_AGENT_TOKEN || GH_AW_GITHUB_TOKEN || GITHUB_TOKEN +// This is specifically for assign-to-agent operations which require elevated permissions. +// +// Note: GitHub App tokens are intentionally NOT used here, even when github-app: is configured. +// The Copilot assignment API only accepts PATs (fine-grained or classic), not GitHub App +// installation tokens. Callers must provide an explicit github-token or rely on GH_AW_AGENT_TOKEN. +func (c *Compiler) addSafeOutputAgentGitHubTokenForConfig(steps *[]string, data *WorkflowData, configToken string) { + // Get safe-outputs level token + var safeOutputsToken string + if data.SafeOutputs != nil { + safeOutputsToken = data.SafeOutputs.GitHubToken + } + + // Choose the first non-empty custom token for precedence + effectiveCustomToken := configToken + if effectiveCustomToken == "" { + effectiveCustomToken = safeOutputsToken + } + + // Get effective token - falls back to ${{ secrets.GH_AW_AGENT_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + // when no explicit token is provided. GitHub App tokens are never used here because the + // Copilot assignment API rejects them. + effectiveToken := getEffectiveCopilotCodingAgentGitHubToken(effectiveCustomToken) + *steps = append(*steps, fmt.Sprintf(" github-token: %s\n", effectiveToken)) +} + +// buildTitlePrefixEnvVar builds a title-prefix environment variable line for safe-output jobs. +// envVarName should be the full env var name like "GH_AW_ISSUE_TITLE_PREFIX" or "GH_AW_DISCUSSION_TITLE_PREFIX". +// Returns an empty slice if titlePrefix is empty. +func buildTitlePrefixEnvVar(envVarName string, titlePrefix string) []string { + if titlePrefix == "" { + return nil + } + return []string{fmt.Sprintf(" %s: %q\n", envVarName, titlePrefix)} +} + +// buildLabelsEnvVar builds a labels environment variable line for safe-output jobs. +// envVarName should be the full env var name like "GH_AW_ISSUE_LABELS" or "GH_AW_PR_LABELS". +// Returns an empty slice if labels is empty. +func buildLabelsEnvVar(envVarName string, labels []string) []string { + if len(labels) == 0 { + return nil + } + labelsStr := strings.Join(labels, ",") + return []string{fmt.Sprintf(" %s: %q\n", envVarName, labelsStr)} +} + +// buildCategoryEnvVar builds a category environment variable line for discussion safe-output jobs. +// envVarName should be the full env var name like "GH_AW_DISCUSSION_CATEGORY". +// Returns an empty slice if category is empty. +func buildCategoryEnvVar(envVarName string, category string) []string { + if category == "" { + return nil + } + return []string{fmt.Sprintf(" %s: %q\n", envVarName, category)} +} + +// buildAllowedReposEnvVar builds an allowed-repos environment variable line for safe-output jobs. +// envVarName should be the full env var name like "GH_AW_ALLOWED_REPOS". +// Returns an empty slice if allowedRepos is empty. +func buildAllowedReposEnvVar(envVarName string, allowedRepos []string) []string { + if len(allowedRepos) == 0 { + return nil + } + reposStr := strings.Join(allowedRepos, ",") + return []string{fmt.Sprintf(" %s: %q\n", envVarName, reposStr)} +} diff --git a/pkg/workflow/safe_outputs_permissions.go b/pkg/workflow/safe_outputs_permissions.go deleted file mode 100644 index 833eea6493c..00000000000 --- a/pkg/workflow/safe_outputs_permissions.go +++ /dev/null @@ -1,303 +0,0 @@ -package workflow - -import ( - "slices" - "strings" - - "github.com/github/gh-aw/pkg/logger" -) - -var safeOutputsPermissionsLog = logger.New("workflow:safe_outputs_permissions") - -// oidcVaultActions is the list of known GitHub Actions that require id-token: write -// to authenticate with secret vaults or cloud providers via OIDC (OpenID Connect). -// Inclusion criteria: actions that use the GitHub OIDC token to authenticate to -// external cloud providers or secret management systems. Add new entries when -// a well-known action is identified that exchanges an OIDC JWT for cloud credentials. -var oidcVaultActions = []string{ - "aws-actions/configure-aws-credentials", // AWS OIDC / Secrets Manager - "azure/login", // Azure Key Vault / OIDC - "google-github-actions/auth", // GCP Secret Manager / OIDC - "hashicorp/vault-action", // HashiCorp Vault - "cyberark/conjur-action", // CyberArk Conjur -} - -// stepsRequireIDToken returns true if any of the provided steps use a known -// OIDC/secret-vault action that requires the id-token: write permission. -func stepsRequireIDToken(steps []any) bool { - for _, step := range steps { - stepMap, ok := step.(map[string]any) - if !ok { - continue - } - uses, ok := stepMap["uses"].(string) - if !ok || uses == "" { - continue - } - // Strip the @version suffix before matching - actionRef, _, _ := strings.Cut(uses, "@") - if slices.Contains(oidcVaultActions, actionRef) { - return true - } - } - return false -} - -// ComputePermissionsForSafeOutputs computes the minimal required permissions -// based on the configured safe-outputs. This function is used by both the -// consolidated safe outputs job and the conclusion job to ensure they only -// request the permissions they actually need. -// -// This implements the principle of least privilege by only including -// permissions that are required by the configured safe outputs. -func ComputePermissionsForSafeOutputs(safeOutputs *SafeOutputsConfig) *Permissions { - if safeOutputs == nil { - safeOutputsPermissionsLog.Print("No safe outputs configured, returning empty permissions") - return NewPermissions() - } - - permissions := NewPermissions() - - // Merge permissions for all handler-managed types - if safeOutputs.CreateIssues != nil { - safeOutputsPermissionsLog.Print("Adding permissions for create-issue") - permissions.Merge(NewPermissionsContentsReadIssuesWrite()) - } - if safeOutputs.CreateDiscussions != nil { - safeOutputsPermissionsLog.Print("Adding permissions for create-discussion") - permissions.Merge(NewPermissionsContentsReadIssuesWriteDiscussionsWrite()) - } - if safeOutputs.AddComments != nil { - safeOutputsPermissionsLog.Print("Adding permissions for add-comment") - permissions.Merge(buildAddCommentPermissions(safeOutputs.AddComments)) - } - if safeOutputs.CloseIssues != nil { - safeOutputsPermissionsLog.Print("Adding permissions for close-issue") - permissions.Merge(NewPermissionsContentsReadIssuesWrite()) - } - if safeOutputs.CloseDiscussions != nil { - safeOutputsPermissionsLog.Print("Adding permissions for close-discussion") - permissions.Merge(NewPermissionsContentsReadDiscussionsWrite()) - } - if safeOutputs.AddLabels != nil { - safeOutputsPermissionsLog.Print("Adding permissions for add-labels") - permissions.Merge(NewPermissionsContentsReadIssuesWritePRWrite()) - } - if safeOutputs.RemoveLabels != nil { - safeOutputsPermissionsLog.Print("Adding permissions for remove-labels") - permissions.Merge(NewPermissionsContentsReadIssuesWritePRWrite()) - } - if safeOutputs.UpdateIssues != nil { - safeOutputsPermissionsLog.Print("Adding permissions for update-issue") - permissions.Merge(NewPermissionsContentsReadIssuesWrite()) - } - if safeOutputs.UpdateDiscussions != nil { - safeOutputsPermissionsLog.Print("Adding permissions for update-discussion") - permissions.Merge(NewPermissionsContentsReadDiscussionsWrite()) - } - if safeOutputs.LinkSubIssue != nil { - safeOutputsPermissionsLog.Print("Adding permissions for link-sub-issue") - permissions.Merge(NewPermissionsContentsReadIssuesWrite()) - } - if safeOutputs.UpdateRelease != nil { - safeOutputsPermissionsLog.Print("Adding permissions for update-release") - permissions.Merge(NewPermissionsContentsWrite()) - } - if safeOutputs.CreatePullRequestReviewComments != nil || safeOutputs.SubmitPullRequestReview != nil || - safeOutputs.ReplyToPullRequestReviewComment != nil || safeOutputs.ResolvePullRequestReviewThread != nil { - safeOutputsPermissionsLog.Print("Adding permissions for PR review operations") - permissions.Merge(NewPermissionsContentsReadPRWrite()) - } - if safeOutputs.CreatePullRequests != nil { - // Check fallback-as-issue setting to determine permissions - if getFallbackAsIssue(safeOutputs.CreatePullRequests) { - safeOutputsPermissionsLog.Print("Adding permissions for create-pull-request with fallback-as-issue") - permissions.Merge(NewPermissionsContentsWriteIssuesWritePRWrite()) - } else { - safeOutputsPermissionsLog.Print("Adding permissions for create-pull-request") - permissions.Merge(NewPermissionsContentsWritePRWrite()) - } - } - if safeOutputs.PushToPullRequestBranch != nil { - safeOutputsPermissionsLog.Print("Adding permissions for push-to-pull-request-branch") - permissions.Merge(NewPermissionsContentsWritePRWrite()) - } - if safeOutputs.UpdatePullRequests != nil { - safeOutputsPermissionsLog.Print("Adding permissions for update-pull-request") - permissions.Merge(NewPermissionsContentsReadPRWrite()) - } - if safeOutputs.ClosePullRequests != nil { - safeOutputsPermissionsLog.Print("Adding permissions for close-pull-request") - permissions.Merge(NewPermissionsContentsReadPRWrite()) - } - if safeOutputs.MarkPullRequestAsReadyForReview != nil { - safeOutputsPermissionsLog.Print("Adding permissions for mark-pull-request-as-ready-for-review") - permissions.Merge(NewPermissionsContentsReadPRWrite()) - } - if safeOutputs.HideComment != nil { - safeOutputsPermissionsLog.Print("Adding permissions for hide-comment") - // Check if discussions permission should be excluded (discussions: false) - // Default (nil or true) includes discussions:write for GitHub Apps with Discussions permission - // Note: Hiding comments (issue/PR/discussion) only needs issues:write, not pull_requests:write - if safeOutputs.HideComment.Discussions != nil && !*safeOutputs.HideComment.Discussions { - permissions.Merge(NewPermissionsContentsReadIssuesWrite()) - } else { - permissions.Merge(NewPermissionsContentsReadIssuesWriteDiscussionsWrite()) - } - } - if safeOutputs.DispatchWorkflow != nil { - safeOutputsPermissionsLog.Print("Adding permissions for dispatch-workflow") - permissions.Merge(NewPermissionsActionsWrite()) - } - // Project-related types - if safeOutputs.CreateProjects != nil { - safeOutputsPermissionsLog.Print("Adding permissions for create-project") - permissions.Merge(NewPermissionsContentsReadProjectsWrite()) - } - if safeOutputs.UpdateProjects != nil { - safeOutputsPermissionsLog.Print("Adding permissions for update-project") - permissions.Merge(NewPermissionsContentsReadProjectsWrite()) - } - if safeOutputs.CreateProjectStatusUpdates != nil { - safeOutputsPermissionsLog.Print("Adding permissions for create-project-status-update") - permissions.Merge(NewPermissionsContentsReadProjectsWrite()) - } - if safeOutputs.AssignToAgent != nil { - safeOutputsPermissionsLog.Print("Adding permissions for assign-to-agent") - permissions.Merge(NewPermissionsContentsReadIssuesWrite()) - } - if safeOutputs.CreateAgentSessions != nil { - safeOutputsPermissionsLog.Print("Adding permissions for create-agent-session") - permissions.Merge(NewPermissionsContentsReadIssuesWrite()) - } - if safeOutputs.CreateCodeScanningAlerts != nil { - safeOutputsPermissionsLog.Print("Adding permissions for create-code-scanning-alert") - permissions.Merge(NewPermissionsContentsReadSecurityEventsWrite()) - } - if safeOutputs.AutofixCodeScanningAlert != nil { - safeOutputsPermissionsLog.Print("Adding permissions for autofix-code-scanning-alert") - permissions.Merge(NewPermissionsContentsReadSecurityEventsWriteActionsRead()) - } - if safeOutputs.AssignToUser != nil { - safeOutputsPermissionsLog.Print("Adding permissions for assign-to-user") - permissions.Merge(NewPermissionsContentsReadIssuesWrite()) - } - if safeOutputs.UnassignFromUser != nil { - safeOutputsPermissionsLog.Print("Adding permissions for unassign-from-user") - permissions.Merge(NewPermissionsContentsReadIssuesWrite()) - } - if safeOutputs.AssignMilestone != nil { - safeOutputsPermissionsLog.Print("Adding permissions for assign-milestone") - permissions.Merge(NewPermissionsContentsReadIssuesWrite()) - } - if safeOutputs.SetIssueType != nil { - safeOutputsPermissionsLog.Print("Adding permissions for set-issue-type") - permissions.Merge(NewPermissionsContentsReadIssuesWrite()) - } - if safeOutputs.AddReviewer != nil { - safeOutputsPermissionsLog.Print("Adding permissions for add-reviewer") - permissions.Merge(NewPermissionsContentsReadPRWrite()) - } - if safeOutputs.UploadAssets != nil { - safeOutputsPermissionsLog.Print("Adding permissions for upload-asset") - permissions.Merge(NewPermissionsContentsWrite()) - } - - // NoOp and MissingTool don't require write permissions beyond what's already included - // They only need to comment if add-comment is already configured - - // Handle id-token permission for OIDC/secret vault actions in user-provided steps. - // Explicit "none" disables auto-detection; explicit "write" always adds it; - // otherwise auto-detect from the steps list. - if safeOutputs.IDToken != nil && *safeOutputs.IDToken == "none" { - safeOutputsPermissionsLog.Print("id-token permission explicitly disabled (none)") - } else if safeOutputs.IDToken != nil && *safeOutputs.IDToken == "write" { - safeOutputsPermissionsLog.Print("id-token: write explicitly requested") - permissions.Set(PermissionIdToken, PermissionWrite) - } else if stepsRequireIDToken(safeOutputs.Steps) { - safeOutputsPermissionsLog.Print("Auto-detected OIDC/vault action in steps; adding id-token: write") - permissions.Set(PermissionIdToken, PermissionWrite) - } - - safeOutputsPermissionsLog.Printf("Computed permissions with %d scopes", len(permissions.permissions)) - return permissions -} - -// SafeOutputsConfigFromKeys builds a minimal SafeOutputsConfig from a list of safe-output -// key names (e.g. "create-issue", "add-comment"). Only the fields needed for permission -// computation are populated. This is used by external callers (e.g. the interactive wizard) -// that want to call ComputePermissionsForSafeOutputs without constructing a full config. -func SafeOutputsConfigFromKeys(keys []string) *SafeOutputsConfig { - config := &SafeOutputsConfig{} - for _, key := range keys { - switch key { - case "create-issue": - config.CreateIssues = &CreateIssuesConfig{} - case "create-agent-session": - config.CreateAgentSessions = &CreateAgentSessionConfig{} - case "create-discussion": - config.CreateDiscussions = &CreateDiscussionsConfig{} - case "update-discussion": - config.UpdateDiscussions = &UpdateDiscussionsConfig{} - case "close-discussion": - config.CloseDiscussions = &CloseDiscussionsConfig{} - case "add-comment": - config.AddComments = &AddCommentsConfig{} - case "close-issue": - config.CloseIssues = &CloseIssuesConfig{} - case "close-pull-request": - config.ClosePullRequests = &ClosePullRequestsConfig{} - case "create-pull-request": - config.CreatePullRequests = &CreatePullRequestsConfig{} - case "create-pull-request-review-comment": - config.CreatePullRequestReviewComments = &CreatePullRequestReviewCommentsConfig{} - case "submit-pull-request-review": - config.SubmitPullRequestReview = &SubmitPullRequestReviewConfig{} - case "reply-to-pull-request-review-comment": - config.ReplyToPullRequestReviewComment = &ReplyToPullRequestReviewCommentConfig{} - case "resolve-pull-request-review-thread": - config.ResolvePullRequestReviewThread = &ResolvePullRequestReviewThreadConfig{} - case "create-code-scanning-alert": - config.CreateCodeScanningAlerts = &CreateCodeScanningAlertsConfig{} - case "autofix-code-scanning-alert": - config.AutofixCodeScanningAlert = &AutofixCodeScanningAlertConfig{} - case "add-labels": - config.AddLabels = &AddLabelsConfig{} - case "remove-labels": - config.RemoveLabels = &RemoveLabelsConfig{} - case "add-reviewer": - config.AddReviewer = &AddReviewerConfig{} - case "assign-milestone": - config.AssignMilestone = &AssignMilestoneConfig{} - case "assign-to-agent": - config.AssignToAgent = &AssignToAgentConfig{} - case "assign-to-user": - config.AssignToUser = &AssignToUserConfig{} - case "unassign-from-user": - config.UnassignFromUser = &UnassignFromUserConfig{} - case "update-issue": - config.UpdateIssues = &UpdateIssuesConfig{} - case "update-pull-request": - config.UpdatePullRequests = &UpdatePullRequestsConfig{} - case "push-to-pull-request-branch": - config.PushToPullRequestBranch = &PushToPullRequestBranchConfig{} - case "upload-asset": - config.UploadAssets = &UploadAssetsConfig{} - case "update-release": - config.UpdateRelease = &UpdateReleaseConfig{} - case "hide-comment": - config.HideComment = &HideCommentConfig{} - case "link-sub-issue": - config.LinkSubIssue = &LinkSubIssueConfig{} - case "update-project": - config.UpdateProjects = &UpdateProjectConfig{} - case "create-project": - config.CreateProjects = &CreateProjectsConfig{} - case "create-project-status-update": - config.CreateProjectStatusUpdates = &CreateProjectStatusUpdateConfig{} - case "mark-pull-request-as-ready-for-review": - config.MarkPullRequestAsReadyForReview = &MarkPullRequestAsReadyForReviewConfig{} - } - } - return config -} diff --git a/pkg/workflow/safe_outputs_steps.go b/pkg/workflow/safe_outputs_steps.go deleted file mode 100644 index 33cfd0a1985..00000000000 --- a/pkg/workflow/safe_outputs_steps.go +++ /dev/null @@ -1,254 +0,0 @@ -package workflow - -import ( - "fmt" - - "github.com/github/gh-aw/pkg/constants" - "github.com/github/gh-aw/pkg/logger" -) - -var safeOutputsStepsLog = logger.New("workflow:safe_outputs_steps") - -// ======================================== -// Safe Output Step Builders -// ======================================== - -// buildCustomActionStep creates a step that uses a custom action reference -// instead of inline JavaScript via actions/github-script -func (c *Compiler) buildCustomActionStep(data *WorkflowData, config GitHubScriptStepConfig, scriptName string) []string { - safeOutputsStepsLog.Printf("Building custom action step: %s (scriptName=%s, actionMode=%s)", config.StepName, scriptName, c.actionMode) - - var steps []string - - // Get the action path from the script registry - actionPath := DefaultScriptRegistry.GetActionPath(scriptName) - if actionPath == "" { - safeOutputsStepsLog.Printf("WARNING: No action path found for script %s, falling back to inline mode", scriptName) - // Set ScriptFile for inline mode fallback - config.ScriptFile = scriptName + ".cjs" - return c.buildGitHubScriptStep(data, config) - } - - // Resolve the action reference based on mode - actionRef := c.resolveActionReference(actionPath, data) - if actionRef == "" { - safeOutputsStepsLog.Printf("WARNING: Could not resolve action reference for %s, falling back to inline mode", actionPath) - // Set ScriptFile for inline mode fallback - config.ScriptFile = scriptName + ".cjs" - return c.buildGitHubScriptStep(data, config) - } - - // Add artifact download steps before the custom action step - steps = append(steps, buildAgentOutputDownloadSteps()...) - - // Step name and metadata - steps = append(steps, fmt.Sprintf(" - name: %s\n", config.StepName)) - steps = append(steps, fmt.Sprintf(" id: %s\n", config.StepID)) - steps = append(steps, fmt.Sprintf(" uses: %s\n", actionRef)) - - // Environment variables section - steps = append(steps, " env:\n") - steps = append(steps, " GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}\n") - steps = append(steps, config.CustomEnvVars...) - c.addCustomSafeOutputEnvVars(&steps, data) - - // With section for inputs (replaces github-token in actions/github-script) - steps = append(steps, " with:\n") - - // Map github-token to token input for custom actions - c.addCustomActionGitHubToken(&steps, data, config) - - return steps -} - -// addCustomActionGitHubToken adds a GitHub token as action input. -// The token precedence depends on the tokenType flags: -// - UseCopilotCodingAgentToken: customToken > SafeOutputs.GitHubToken > GH_AW_AGENT_TOKEN || GH_AW_GITHUB_TOKEN || GITHUB_TOKEN -// - UseCopilotRequestsToken: customToken > SafeOutputs.GitHubToken > COPILOT_GITHUB_TOKEN -// - Default: customToken > SafeOutputs.GitHubToken > GH_AW_GITHUB_TOKEN || GITHUB_TOKEN -func (c *Compiler) addCustomActionGitHubToken(steps *[]string, data *WorkflowData, config GitHubScriptStepConfig) { - var token string - - // Get safe-outputs level token - var safeOutputsToken string - if data.SafeOutputs != nil { - safeOutputsToken = data.SafeOutputs.GitHubToken - } - - // Choose the first non-empty custom token for precedence - effectiveCustomToken := config.CustomToken - if effectiveCustomToken == "" { - effectiveCustomToken = safeOutputsToken - } - - // Agent token mode: use full precedence chain for agent assignment - if config.UseCopilotCodingAgentToken { - token = getEffectiveCopilotCodingAgentGitHubToken(effectiveCustomToken) - } else if config.UseCopilotRequestsToken { - // Copilot mode: use getEffectiveCopilotRequestsToken with safe-outputs token precedence - token = getEffectiveCopilotRequestsToken(effectiveCustomToken) - } else { - // Standard mode: use safe output token chain - token = getEffectiveSafeOutputGitHubToken(effectiveCustomToken) - } - - *steps = append(*steps, fmt.Sprintf(" token: %s\n", token)) -} - -// GitHubScriptStepConfig holds configuration for building a GitHub Script step -type GitHubScriptStepConfig struct { - // Step metadata - StepName string // e.g., "Create Output Issue" - StepID string // e.g., "create_issue" - - // Main job reference for agent output - MainJobName string - - // Environment variables specific to this safe output type - // These are added after GH_AW_AGENT_OUTPUT - CustomEnvVars []string - - // JavaScript script constant to format and include (for inline mode) - Script string - - // ScriptFile is the .cjs filename to require (e.g., "noop.cjs") - // If empty, Script will be inlined instead - ScriptFile string - - // CustomToken configuration (passed to addSafeOutputGitHubTokenForConfig or addSafeOutputCopilotGitHubTokenForConfig) - CustomToken string - - // UseCopilotRequestsToken indicates whether to use the Copilot token preference chain - // custom token > COPILOT_GITHUB_TOKEN - // This should be true for Copilot-related operations like creating agent tasks, - // assigning copilot to issues, or adding copilot as PR reviewer - UseCopilotRequestsToken bool - - // UseCopilotCodingAgentToken indicates whether to use the agent token preference chain - // (config token > GH_AW_AGENT_TOKEN) - // This should be true for agent assignment operations (assign-to-agent) - UseCopilotCodingAgentToken bool -} - -// buildGitHubScriptStep creates a GitHub Script step with common scaffolding -// This extracts the repeated pattern found across safe output job builders -func (c *Compiler) buildGitHubScriptStep(data *WorkflowData, config GitHubScriptStepConfig) []string { - safeOutputsStepsLog.Printf("Building GitHub Script step: %s (useCopilotRequestsToken=%v, useCopilotCodingAgentToken=%v)", config.StepName, config.UseCopilotRequestsToken, config.UseCopilotCodingAgentToken) - - var steps []string - - // Add artifact download steps before the GitHub Script step - steps = append(steps, buildAgentOutputDownloadSteps()...) - - // Step name and metadata - steps = append(steps, fmt.Sprintf(" - name: %s\n", config.StepName)) - steps = append(steps, fmt.Sprintf(" id: %s\n", config.StepID)) - steps = append(steps, fmt.Sprintf(" uses: %s\n", GetActionPin("actions/github-script"))) - - // Environment variables section - steps = append(steps, " env:\n") - - // Read GH_AW_AGENT_OUTPUT from environment (set by artifact download step) - // instead of directly from job outputs which may be masked by GitHub Actions - steps = append(steps, " GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}\n") - - // Add custom environment variables specific to this safe output type - steps = append(steps, config.CustomEnvVars...) - - // Add custom environment variables from safe-outputs.env - c.addCustomSafeOutputEnvVars(&steps, data) - - // With section for github-token - steps = append(steps, " with:\n") - if config.UseCopilotCodingAgentToken { - c.addSafeOutputAgentGitHubTokenForConfig(&steps, data, config.CustomToken) - } else if config.UseCopilotRequestsToken { - c.addSafeOutputCopilotGitHubTokenForConfig(&steps, data, config.CustomToken) - } else { - c.addSafeOutputGitHubTokenForConfig(&steps, data, config.CustomToken) - } - - steps = append(steps, " script: |\n") - - // Use require() if ScriptFile is specified, otherwise inline the script - if config.ScriptFile != "" { - steps = append(steps, " const { setupGlobals } = require('"+SetupActionDestination+"/setup_globals.cjs');\n") - steps = append(steps, " setupGlobals(core, github, context, exec, io);\n") - steps = append(steps, fmt.Sprintf(" const { main } = require('"+SetupActionDestination+"/%s');\n", config.ScriptFile)) - steps = append(steps, " await main();\n") - } else { - // Add the formatted JavaScript script (inline) - formattedScript := FormatJavaScriptForYAML(config.Script) - steps = append(steps, formattedScript...) - } - - return steps -} - -// buildGitHubScriptStepWithoutDownload creates a GitHub Script step without artifact download steps -// This is useful when multiple script steps are needed in the same job and artifact downloads -// should only happen once at the beginning -func (c *Compiler) buildGitHubScriptStepWithoutDownload(data *WorkflowData, config GitHubScriptStepConfig) []string { - safeOutputsStepsLog.Printf("Building GitHub Script step without download: %s", config.StepName) - - var steps []string - - // Step name and metadata (no artifact download steps) - steps = append(steps, fmt.Sprintf(" - name: %s\n", config.StepName)) - steps = append(steps, fmt.Sprintf(" id: %s\n", config.StepID)) - steps = append(steps, fmt.Sprintf(" uses: %s\n", GetActionPin("actions/github-script"))) - - // Environment variables section - steps = append(steps, " env:\n") - - // Read GH_AW_AGENT_OUTPUT from environment (set by artifact download step) - // instead of directly from job outputs which may be masked by GitHub Actions - steps = append(steps, " GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}\n") - - // Add custom environment variables specific to this safe output type - steps = append(steps, config.CustomEnvVars...) - - // Add custom environment variables from safe-outputs.env - c.addCustomSafeOutputEnvVars(&steps, data) - - // With section for github-token - steps = append(steps, " with:\n") - if config.UseCopilotCodingAgentToken { - c.addSafeOutputAgentGitHubTokenForConfig(&steps, data, config.CustomToken) - } else if config.UseCopilotRequestsToken { - c.addSafeOutputCopilotGitHubTokenForConfig(&steps, data, config.CustomToken) - } else { - c.addSafeOutputGitHubTokenForConfig(&steps, data, config.CustomToken) - } - - steps = append(steps, " script: |\n") - - // Use require() if ScriptFile is specified, otherwise inline the script - if config.ScriptFile != "" { - steps = append(steps, " const { setupGlobals } = require('"+SetupActionDestination+"/setup_globals.cjs');\n") - steps = append(steps, " setupGlobals(core, github, context, exec, io);\n") - steps = append(steps, fmt.Sprintf(" const { main } = require('"+SetupActionDestination+"/%s');\n", config.ScriptFile)) - steps = append(steps, " await main();\n") - } else { - // Add the formatted JavaScript script (inline) - formattedScript := FormatJavaScriptForYAML(config.Script) - steps = append(steps, formattedScript...) - } - - return steps -} - -// buildAgentOutputDownloadSteps creates steps to download the agent output artifact -// and set the GH_AW_AGENT_OUTPUT environment variable for safe-output jobs. -// GH_AW_AGENT_OUTPUT is only set when the artifact was actually downloaded successfully. -func buildAgentOutputDownloadSteps() []string { - return buildArtifactDownloadSteps(ArtifactDownloadConfig{ - ArtifactName: "agent-output", // Use hyphenated name without extension - ArtifactFilename: constants.AgentOutputFilename, // Filename inside the artifact directory - DownloadPath: "/tmp/gh-aw/safeoutputs/", - SetupEnvStep: true, - EnvVarName: "GH_AW_AGENT_OUTPUT", - StepName: "Download agent output artifact", - StepID: "download-agent-output", - }) -} diff --git a/pkg/workflow/safe_outputs_target_validation.go b/pkg/workflow/safe_outputs_target_validation.go deleted file mode 100644 index 686245b339c..00000000000 --- a/pkg/workflow/safe_outputs_target_validation.go +++ /dev/null @@ -1,161 +0,0 @@ -package workflow - -import ( - "fmt" - "strings" - - "github.com/github/gh-aw/pkg/logger" - "github.com/github/gh-aw/pkg/stringutil" -) - -var safeOutputsTargetValidationLog = logger.New("workflow:safe_outputs_target_validation") - -// validateSafeOutputsTarget validates target fields in all safe-outputs configurations -// Valid target values: -// - "" (empty/default) - uses "triggering" behavior -// - "triggering" - targets the triggering issue/PR/discussion -// - "*" - targets any item specified in the output -// - A positive integer as a string (e.g., "123") -// - A GitHub Actions expression (e.g., "${{ github.event.issue.number }}") -func validateSafeOutputsTarget(config *SafeOutputsConfig) error { - if config == nil { - return nil - } - - safeOutputsTargetValidationLog.Print("Validating safe-outputs target fields") - - // List of configs to validate - each with a name for error messages - type targetConfig struct { - name string - target string - } - - var configs []targetConfig - - // Collect all target fields from various safe-output configurations - if config.UpdateIssues != nil { - configs = append(configs, targetConfig{"update-issue", config.UpdateIssues.Target}) - } - if config.UpdateDiscussions != nil { - configs = append(configs, targetConfig{"update-discussion", config.UpdateDiscussions.Target}) - } - if config.UpdatePullRequests != nil { - configs = append(configs, targetConfig{"update-pull-request", config.UpdatePullRequests.Target}) - } - if config.CloseIssues != nil { - configs = append(configs, targetConfig{"close-issue", config.CloseIssues.Target}) - } - if config.CloseDiscussions != nil { - configs = append(configs, targetConfig{"close-discussion", config.CloseDiscussions.Target}) - } - if config.ClosePullRequests != nil { - configs = append(configs, targetConfig{"close-pull-request", config.ClosePullRequests.Target}) - } - if config.AddLabels != nil { - configs = append(configs, targetConfig{"add-labels", config.AddLabels.Target}) - } - if config.RemoveLabels != nil { - configs = append(configs, targetConfig{"remove-labels", config.RemoveLabels.Target}) - } - if config.AddReviewer != nil { - configs = append(configs, targetConfig{"add-reviewer", config.AddReviewer.Target}) - } - if config.AssignMilestone != nil { - configs = append(configs, targetConfig{"assign-milestone", config.AssignMilestone.Target}) - } - if config.AssignToAgent != nil { - configs = append(configs, targetConfig{"assign-to-agent", config.AssignToAgent.Target}) - } - if config.AssignToUser != nil { - configs = append(configs, targetConfig{"assign-to-user", config.AssignToUser.Target}) - } - if config.LinkSubIssue != nil { - configs = append(configs, targetConfig{"link-sub-issue", config.LinkSubIssue.Target}) - } - if config.HideComment != nil { - configs = append(configs, targetConfig{"hide-comment", config.HideComment.Target}) - } - if config.MarkPullRequestAsReadyForReview != nil { - configs = append(configs, targetConfig{"mark-pull-request-as-ready-for-review", config.MarkPullRequestAsReadyForReview.Target}) - } - if config.AddComments != nil { - configs = append(configs, targetConfig{"add-comment", config.AddComments.Target}) - } - if config.CreatePullRequestReviewComments != nil { - configs = append(configs, targetConfig{"create-pull-request-review-comment", config.CreatePullRequestReviewComments.Target}) - } - if config.SubmitPullRequestReview != nil { - configs = append(configs, targetConfig{"submit-pull-request-review", config.SubmitPullRequestReview.Target}) - } - if config.ReplyToPullRequestReviewComment != nil { - configs = append(configs, targetConfig{"reply-to-pull-request-review-comment", config.ReplyToPullRequestReviewComment.Target}) - } - if config.PushToPullRequestBranch != nil { - configs = append(configs, targetConfig{"push-to-pull-request-branch", config.PushToPullRequestBranch.Target}) - } - // Validate each target field - for _, cfg := range configs { - if err := validateTargetValue(cfg.name, cfg.target); err != nil { - return err - } - } - - safeOutputsTargetValidationLog.Printf("Validated %d target fields", len(configs)) - return nil -} - -// validateTargetValue validates a single target value -func validateTargetValue(configName, target string) error { - // Empty or "triggering" are always valid - if target == "" || target == "triggering" { - return nil - } - - // "*" is valid (any item) - if target == "*" { - return nil - } - - // Check if it's a GitHub Actions expression - if isGitHubExpression(target) { - safeOutputsTargetValidationLog.Printf("Target for %s is a GitHub Actions expression", configName) - return nil - } - - // Check if it's a positive integer - if stringutil.IsPositiveInteger(target) { - safeOutputsTargetValidationLog.Printf("Target for %s is a valid number: %s", configName, target) - return nil - } - - // Build a helpful suggestion based on the invalid value - suggestion := "" - if target == "event" || strings.Contains(target, "github.event") { - suggestion = "\n\nDid you mean to use \"${{ github.event.issue.number }}\" instead of \"" + target + "\"?" - } - - // Invalid target value - return fmt.Errorf( - "invalid target value for %s: %q\n\nValid target values are:\n - \"triggering\" (default) - targets the triggering issue/PR/discussion\n - \"*\" - targets any item specified in the output\n - A positive integer (e.g., \"123\")\n - A GitHub Actions expression (e.g., \"${{ github.event.issue.number }}\")%s", - configName, - target, - suggestion, - ) -} - -// isGitHubExpression checks if a string is a valid GitHub Actions expression -// A valid expression must have properly balanced ${{ and }} markers -func isGitHubExpression(s string) bool { - // Must contain both opening and closing markers - if !strings.Contains(s, "${{") || !strings.Contains(s, "}}") { - return false - } - - // Basic validation: opening marker must come before closing marker - openIndex := strings.Index(s, "${{") - closeIndex := strings.Index(s, "}}") - - // The closing marker must come after the opening marker - // and there must be something between them - return openIndex >= 0 && closeIndex > openIndex+3 -} diff --git a/pkg/workflow/safe_outputs_tools_generation.go b/pkg/workflow/safe_outputs_tools_generation.go deleted file mode 100644 index 028ad55118e..00000000000 --- a/pkg/workflow/safe_outputs_tools_generation.go +++ /dev/null @@ -1,572 +0,0 @@ -package workflow - -import ( - "encoding/json" - "fmt" - "maps" - "sort" - - "github.com/github/gh-aw/pkg/stringutil" -) - -// ======================================== -// Safe Output Tools Generation -// ======================================== - -// checkAllEnabledToolsPresent verifies that every tool in enabledTools has a matching entry -// in filteredTools. This is a compiler error check: if a safe-output type is registered in -// Go code but its definition is missing from safe-output-tools.json, it will not appear in -// filteredTools and this function returns an error. -// -// Dispatch-workflow and custom-job tools are intentionally excluded from this check because -// they are generated dynamically and are never part of the static tools JSON. -func checkAllEnabledToolsPresent(enabledTools map[string]bool, filteredTools []map[string]any) error { - presentTools := make(map[string]bool, len(filteredTools)) - for _, tool := range filteredTools { - if name, ok := tool["name"].(string); ok { - presentTools[name] = true - } - } - - var missingTools []string - for toolName := range enabledTools { - if !presentTools[toolName] { - missingTools = append(missingTools, toolName) - } - } - - if len(missingTools) == 0 { - return nil - } - - sort.Strings(missingTools) - return fmt.Errorf("compiler error: safe-output tool(s) %v are registered but missing from safe-output-tools.json; please report this issue to the developer", missingTools) -} - -// generateFilteredToolsJSON filters the ALL_TOOLS array based on enabled safe outputs -// Returns a JSON string containing only the tools that are enabled in the workflow -func generateFilteredToolsJSON(data *WorkflowData, markdownPath string) (string, error) { - if data.SafeOutputs == nil { - return "[]", nil - } - - safeOutputsConfigLog.Print("Generating filtered tools JSON for workflow") - - // Load the full tools JSON - allToolsJSON := GetSafeOutputsToolsJSON() - - // Parse the JSON to get all tools - var allTools []map[string]any - if err := json.Unmarshal([]byte(allToolsJSON), &allTools); err != nil { - safeOutputsConfigLog.Printf("Failed to parse safe outputs tools JSON: %v", err) - return "", fmt.Errorf("failed to parse safe outputs tools JSON: %w", err) - } - - // Create a set of enabled tool names - enabledTools := make(map[string]bool) - - // Check which safe outputs are enabled and add their corresponding tool names - if data.SafeOutputs.CreateIssues != nil { - enabledTools["create_issue"] = true - } - if data.SafeOutputs.CreateAgentSessions != nil { - enabledTools["create_agent_session"] = true - } - if data.SafeOutputs.CreateDiscussions != nil { - enabledTools["create_discussion"] = true - } - if data.SafeOutputs.UpdateDiscussions != nil { - enabledTools["update_discussion"] = true - } - if data.SafeOutputs.CloseDiscussions != nil { - enabledTools["close_discussion"] = true - } - if data.SafeOutputs.CloseIssues != nil { - enabledTools["close_issue"] = true - } - if data.SafeOutputs.ClosePullRequests != nil { - enabledTools["close_pull_request"] = true - } - if data.SafeOutputs.MarkPullRequestAsReadyForReview != nil { - enabledTools["mark_pull_request_as_ready_for_review"] = true - } - if data.SafeOutputs.AddComments != nil { - enabledTools["add_comment"] = true - } - if data.SafeOutputs.CreatePullRequests != nil { - enabledTools["create_pull_request"] = true - } - if data.SafeOutputs.CreatePullRequestReviewComments != nil { - enabledTools["create_pull_request_review_comment"] = true - } - if data.SafeOutputs.SubmitPullRequestReview != nil { - enabledTools["submit_pull_request_review"] = true - } - if data.SafeOutputs.ReplyToPullRequestReviewComment != nil { - enabledTools["reply_to_pull_request_review_comment"] = true - } - if data.SafeOutputs.ResolvePullRequestReviewThread != nil { - enabledTools["resolve_pull_request_review_thread"] = true - } - if data.SafeOutputs.CreateCodeScanningAlerts != nil { - enabledTools["create_code_scanning_alert"] = true - } - if data.SafeOutputs.AutofixCodeScanningAlert != nil { - enabledTools["autofix_code_scanning_alert"] = true - } - if data.SafeOutputs.AddLabels != nil { - enabledTools["add_labels"] = true - } - if data.SafeOutputs.RemoveLabels != nil { - enabledTools["remove_labels"] = true - } - if data.SafeOutputs.AddReviewer != nil { - enabledTools["add_reviewer"] = true - } - if data.SafeOutputs.AssignMilestone != nil { - enabledTools["assign_milestone"] = true - } - if data.SafeOutputs.AssignToAgent != nil { - enabledTools["assign_to_agent"] = true - } - if data.SafeOutputs.AssignToUser != nil { - enabledTools["assign_to_user"] = true - } - if data.SafeOutputs.UnassignFromUser != nil { - enabledTools["unassign_from_user"] = true - } - if data.SafeOutputs.UpdateIssues != nil { - enabledTools["update_issue"] = true - } - if data.SafeOutputs.UpdatePullRequests != nil { - enabledTools["update_pull_request"] = true - } - if data.SafeOutputs.PushToPullRequestBranch != nil { - enabledTools["push_to_pull_request_branch"] = true - } - if data.SafeOutputs.UploadAssets != nil { - enabledTools["upload_asset"] = true - } - if data.SafeOutputs.MissingTool != nil { - enabledTools["missing_tool"] = true - } - if data.SafeOutputs.MissingData != nil { - enabledTools["missing_data"] = true - } - if data.SafeOutputs.UpdateRelease != nil { - enabledTools["update_release"] = true - } - if data.SafeOutputs.NoOp != nil { - enabledTools["noop"] = true - } - if data.SafeOutputs.LinkSubIssue != nil { - enabledTools["link_sub_issue"] = true - } - if data.SafeOutputs.HideComment != nil { - enabledTools["hide_comment"] = true - } - if data.SafeOutputs.SetIssueType != nil { - enabledTools["set_issue_type"] = true - } - if data.SafeOutputs.UpdateProjects != nil { - enabledTools["update_project"] = true - } - if data.SafeOutputs.CreateProjectStatusUpdates != nil { - enabledTools["create_project_status_update"] = true - } - if data.SafeOutputs.CreateProjects != nil { - enabledTools["create_project"] = true - } - // Note: dispatch_workflow tools are generated dynamically below, not from the static tools list - - // Filter tools to only include enabled ones and enhance descriptions - var filteredTools []map[string]any - for _, tool := range allTools { - toolName, ok := tool["name"].(string) - if !ok { - continue - } - if enabledTools[toolName] { - // Create a copy of the tool to avoid modifying the original - enhancedTool := make(map[string]any) - maps.Copy(enhancedTool, tool) - - // Enhance the description with configuration details - if description, ok := enhancedTool["description"].(string); ok { - enhancedDescription := enhanceToolDescription(toolName, description, data.SafeOutputs) - enhancedTool["description"] = enhancedDescription - } - - // Add repo parameter to inputSchema if allowed-repos has entries - addRepoParameterIfNeeded(enhancedTool, toolName, data.SafeOutputs) - - filteredTools = append(filteredTools, enhancedTool) - } - } - - // Verify all registered safe-outputs are present in the static tools JSON. - // Dispatch-workflow and custom-job tools are excluded because they are generated dynamically. - if err := checkAllEnabledToolsPresent(enabledTools, filteredTools); err != nil { - return "", err - } - - // Add custom job tools from SafeOutputs.Jobs - if len(data.SafeOutputs.Jobs) > 0 { - safeOutputsConfigLog.Printf("Adding %d custom job tools", len(data.SafeOutputs.Jobs)) - - // Sort job names for deterministic output - // This ensures compiled workflows have consistent tool ordering - jobNames := make([]string, 0, len(data.SafeOutputs.Jobs)) - for jobName := range data.SafeOutputs.Jobs { - jobNames = append(jobNames, jobName) - } - sort.Strings(jobNames) - - // Iterate over jobs in sorted order - for _, jobName := range jobNames { - jobConfig := data.SafeOutputs.Jobs[jobName] - // Normalize job name to use underscores for consistency - normalizedJobName := stringutil.NormalizeSafeOutputIdentifier(jobName) - - // Create the tool definition for this custom job - customTool := generateCustomJobToolDefinition(normalizedJobName, jobConfig) - filteredTools = append(filteredTools, customTool) - } - } - - if safeOutputsConfigLog.Enabled() { - safeOutputsConfigLog.Printf("Filtered %d tools from %d total tools (including %d custom jobs)", len(filteredTools), len(allTools), len(data.SafeOutputs.Jobs)) - } - - // Add dynamic dispatch_workflow tools - if data.SafeOutputs.DispatchWorkflow != nil && len(data.SafeOutputs.DispatchWorkflow.Workflows) > 0 { - safeOutputsConfigLog.Printf("Adding %d dispatch_workflow tools", len(data.SafeOutputs.DispatchWorkflow.Workflows)) - - // Initialize WorkflowFiles map if not already initialized - if data.SafeOutputs.DispatchWorkflow.WorkflowFiles == nil { - data.SafeOutputs.DispatchWorkflow.WorkflowFiles = make(map[string]string) - } - - for _, workflowName := range data.SafeOutputs.DispatchWorkflow.Workflows { - // Find the workflow file in multiple locations - fileResult, err := findWorkflowFile(workflowName, markdownPath) - if err != nil { - safeOutputsConfigLog.Printf("Warning: error finding workflow %s: %v", workflowName, err) - // Continue with empty inputs - tool := generateDispatchWorkflowTool(workflowName, make(map[string]any)) - filteredTools = append(filteredTools, tool) - continue - } - - // Determine which file to use - priority: .lock.yml > .yml - var workflowPath string - var extension string - if fileResult.lockExists { - workflowPath = fileResult.lockPath - extension = ".lock.yml" - } else if fileResult.ymlExists { - workflowPath = fileResult.ymlPath - extension = ".yml" - } else { - safeOutputsConfigLog.Printf("Warning: workflow file not found for %s (only .md exists, needs compilation)", workflowName) - // Continue with empty inputs - tool := generateDispatchWorkflowTool(workflowName, make(map[string]any)) - filteredTools = append(filteredTools, tool) - continue - } - - // Store the file extension for runtime use - data.SafeOutputs.DispatchWorkflow.WorkflowFiles[workflowName] = extension - - // Extract workflow_dispatch inputs - workflowInputs, err := extractWorkflowDispatchInputs(workflowPath) - if err != nil { - safeOutputsConfigLog.Printf("Warning: failed to extract inputs for workflow %s from %s: %v", workflowName, workflowPath, err) - // Continue with empty inputs - workflowInputs = make(map[string]any) - } - - // Generate tool schema - tool := generateDispatchWorkflowTool(workflowName, workflowInputs) - filteredTools = append(filteredTools, tool) - } - } - - // Marshal the filtered tools back to JSON with indentation for better readability - // and to reduce merge conflicts in generated lockfiles - filteredJSON, err := json.MarshalIndent(filteredTools, "", " ") - if err != nil { - safeOutputsConfigLog.Printf("Failed to marshal filtered tools: %v", err) - return "", fmt.Errorf("failed to marshal filtered tools: %w", err) - } - - safeOutputsConfigLog.Printf("Successfully generated filtered tools JSON with %d tools", len(filteredTools)) - return string(filteredJSON), nil -} - -// addRepoParameterIfNeeded adds a "repo" parameter to the tool's inputSchema -// if the safe output configuration has allowed-repos entries -func addRepoParameterIfNeeded(tool map[string]any, toolName string, safeOutputs *SafeOutputsConfig) { - if safeOutputs == nil { - return - } - - // Determine if this tool should have a repo parameter based on allowed-repos configuration - var hasAllowedRepos bool - var targetRepoSlug string - - switch toolName { - case "create_issue": - if config := safeOutputs.CreateIssues; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "create_discussion": - if config := safeOutputs.CreateDiscussions; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "add_comment": - if config := safeOutputs.AddComments; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "create_pull_request": - if config := safeOutputs.CreatePullRequests; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "create_pull_request_review_comment": - if config := safeOutputs.CreatePullRequestReviewComments; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "reply_to_pull_request_review_comment": - if config := safeOutputs.ReplyToPullRequestReviewComment; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "create_agent_session": - if config := safeOutputs.CreateAgentSessions; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "close_issue", "update_issue": - if config := safeOutputs.CloseIssues; config != nil && toolName == "close_issue" { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } else if config := safeOutputs.UpdateIssues; config != nil && toolName == "update_issue" { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "close_discussion", "update_discussion": - if config := safeOutputs.CloseDiscussions; config != nil && toolName == "close_discussion" { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } else if config := safeOutputs.UpdateDiscussions; config != nil && toolName == "update_discussion" { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "close_pull_request", "update_pull_request": - if config := safeOutputs.ClosePullRequests; config != nil && toolName == "close_pull_request" { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } else if config := safeOutputs.UpdatePullRequests; config != nil && toolName == "update_pull_request" { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "add_labels", "remove_labels", "hide_comment", "link_sub_issue", "mark_pull_request_as_ready_for_review", - "add_reviewer", "assign_milestone", "assign_to_agent", "assign_to_user", "unassign_from_user", - "set_issue_type": - // These use SafeOutputTargetConfig - check the appropriate config - switch toolName { - case "add_labels": - if config := safeOutputs.AddLabels; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "remove_labels": - if config := safeOutputs.RemoveLabels; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "hide_comment": - if config := safeOutputs.HideComment; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "link_sub_issue": - if config := safeOutputs.LinkSubIssue; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "mark_pull_request_as_ready_for_review": - if config := safeOutputs.MarkPullRequestAsReadyForReview; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "add_reviewer": - if config := safeOutputs.AddReviewer; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "assign_milestone": - if config := safeOutputs.AssignMilestone; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "assign_to_agent": - if config := safeOutputs.AssignToAgent; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "assign_to_user": - if config := safeOutputs.AssignToUser; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "unassign_from_user": - if config := safeOutputs.UnassignFromUser; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - case "set_issue_type": - if config := safeOutputs.SetIssueType; config != nil { - hasAllowedRepos = len(config.AllowedRepos) > 0 - targetRepoSlug = config.TargetRepoSlug - } - } - } - - // Only add repo parameter if allowed-repos has entries or target-repo is wildcard ("*") - if !hasAllowedRepos && targetRepoSlug != "*" { - return - } - - // Get the inputSchema - inputSchema, ok := tool["inputSchema"].(map[string]any) - if !ok { - return - } - - properties, ok := inputSchema["properties"].(map[string]any) - if !ok { - return - } - - // Build repo parameter description - var repoDescription string - if targetRepoSlug == "*" { - repoDescription = "Target repository for this operation in 'owner/repo' format. Any repository can be targeted." - } else if targetRepoSlug != "" { - repoDescription = fmt.Sprintf("Target repository for this operation in 'owner/repo' format. Default is %q. Must be the target-repo or in the allowed-repos list.", targetRepoSlug) - } else { - repoDescription = "Target repository for this operation in 'owner/repo' format. Must be the target-repo or in the allowed-repos list." - } - - // Add repo parameter to properties - properties["repo"] = map[string]any{ - "type": "string", - "description": repoDescription, - } - - safeOutputsConfigLog.Printf("Added repo parameter to tool: %s (has allowed-repos or wildcard target-repo)", toolName) -} - -// generateDispatchWorkflowTool generates an MCP tool definition for a specific workflow -// The tool will be named after the workflow and accept the workflow's defined inputs -func generateDispatchWorkflowTool(workflowName string, workflowInputs map[string]any) map[string]any { - // Normalize workflow name to use underscores for tool name - toolName := stringutil.NormalizeSafeOutputIdentifier(workflowName) - - // Build the description - description := fmt.Sprintf("Dispatch the '%s' workflow with workflow_dispatch trigger. This workflow must support workflow_dispatch and be in .github/workflows/ directory in the same repository.", workflowName) - - // Build input schema properties - properties := make(map[string]any) - required := []string{} // No required fields by default - - // Convert GitHub Actions workflow_dispatch inputs to MCP tool schema - for inputName, inputDef := range workflowInputs { - inputDefMap, ok := inputDef.(map[string]any) - if !ok { - continue - } - - // Extract input properties - inputType := "string" // Default type - inputDescription := fmt.Sprintf("Input parameter '%s' for workflow %s", inputName, workflowName) - inputRequired := false - - if desc, ok := inputDefMap["description"].(string); ok && desc != "" { - inputDescription = desc - } - - if req, ok := inputDefMap["required"].(bool); ok { - inputRequired = req - } - - // GitHub Actions workflow_dispatch supports: string, number, boolean, choice, environment - // Map these to JSON schema types - if typeStr, ok := inputDefMap["type"].(string); ok { - switch typeStr { - case "number": - inputType = "number" - case "boolean": - inputType = "boolean" - case "choice": - inputType = "string" - // Add enum if options are provided - if options, ok := inputDefMap["options"].([]any); ok && len(options) > 0 { - properties[inputName] = map[string]any{ - "type": inputType, - "description": inputDescription, - "enum": options, - } - if inputRequired { - required = append(required, inputName) - } - continue - } - case "environment": - inputType = "string" - } - } - - properties[inputName] = map[string]any{ - "type": inputType, - "description": inputDescription, - } - - // Add default value if provided - if defaultVal, ok := inputDefMap["default"]; ok { - properties[inputName].(map[string]any)["default"] = defaultVal - } - - if inputRequired { - required = append(required, inputName) - } - } - - // Add internal workflow_name parameter (hidden from description but used internally) - // This will be injected by the safe output handler - - // Build the complete tool definition - tool := map[string]any{ - "name": toolName, - "description": description, - "_workflow_name": workflowName, // Internal metadata for handler routing - "inputSchema": map[string]any{ - "type": "object", - "properties": properties, - "additionalProperties": false, - }, - } - - if len(required) > 0 { - sort.Strings(required) - tool["inputSchema"].(map[string]any)["required"] = required - } - - return tool -} diff --git a/pkg/workflow/safe_outputs_domains_validation.go b/pkg/workflow/safe_outputs_validation.go similarity index 58% rename from pkg/workflow/safe_outputs_domains_validation.go rename to pkg/workflow/safe_outputs_validation.go index 5f186c2b9d3..338f09d1aa3 100644 --- a/pkg/workflow/safe_outputs_domains_validation.go +++ b/pkg/workflow/safe_outputs_validation.go @@ -5,10 +5,10 @@ import ( "regexp" "strings" - "github.com/github/gh-aw/pkg/logger" + "github.com/github/gh-aw/pkg/stringutil" ) -var safeOutputsDomainsValidationLog = logger.New("workflow:safe_outputs_domains_validation") +var safeOutputsDomainsValidationLog = newValidationLogger("safe_outputs_domains") // validateNetworkAllowedDomains validates the allowed domains in network configuration func (c *Compiler) validateNetworkAllowedDomains(network *NetworkPermissions) error { @@ -236,3 +236,155 @@ func validateDomainPattern(domain string) error { return nil } + +var safeOutputsTargetValidationLog = newValidationLogger("safe_outputs_target") + +// validateSafeOutputsTarget validates target fields in all safe-outputs configurations +// Valid target values: +// - "" (empty/default) - uses "triggering" behavior +// - "triggering" - targets the triggering issue/PR/discussion +// - "*" - targets any item specified in the output +// - A positive integer as a string (e.g., "123") +// - A GitHub Actions expression (e.g., "${{ github.event.issue.number }}") +func validateSafeOutputsTarget(config *SafeOutputsConfig) error { + if config == nil { + return nil + } + + safeOutputsTargetValidationLog.Print("Validating safe-outputs target fields") + + // List of configs to validate - each with a name for error messages + type targetConfig struct { + name string + target string + } + + var configs []targetConfig + + // Collect all target fields from various safe-output configurations + if config.UpdateIssues != nil { + configs = append(configs, targetConfig{"update-issue", config.UpdateIssues.Target}) + } + if config.UpdateDiscussions != nil { + configs = append(configs, targetConfig{"update-discussion", config.UpdateDiscussions.Target}) + } + if config.UpdatePullRequests != nil { + configs = append(configs, targetConfig{"update-pull-request", config.UpdatePullRequests.Target}) + } + if config.CloseIssues != nil { + configs = append(configs, targetConfig{"close-issue", config.CloseIssues.Target}) + } + if config.CloseDiscussions != nil { + configs = append(configs, targetConfig{"close-discussion", config.CloseDiscussions.Target}) + } + if config.ClosePullRequests != nil { + configs = append(configs, targetConfig{"close-pull-request", config.ClosePullRequests.Target}) + } + if config.AddLabels != nil { + configs = append(configs, targetConfig{"add-labels", config.AddLabels.Target}) + } + if config.RemoveLabels != nil { + configs = append(configs, targetConfig{"remove-labels", config.RemoveLabels.Target}) + } + if config.AddReviewer != nil { + configs = append(configs, targetConfig{"add-reviewer", config.AddReviewer.Target}) + } + if config.AssignMilestone != nil { + configs = append(configs, targetConfig{"assign-milestone", config.AssignMilestone.Target}) + } + if config.AssignToAgent != nil { + configs = append(configs, targetConfig{"assign-to-agent", config.AssignToAgent.Target}) + } + if config.AssignToUser != nil { + configs = append(configs, targetConfig{"assign-to-user", config.AssignToUser.Target}) + } + if config.LinkSubIssue != nil { + configs = append(configs, targetConfig{"link-sub-issue", config.LinkSubIssue.Target}) + } + if config.HideComment != nil { + configs = append(configs, targetConfig{"hide-comment", config.HideComment.Target}) + } + if config.MarkPullRequestAsReadyForReview != nil { + configs = append(configs, targetConfig{"mark-pull-request-as-ready-for-review", config.MarkPullRequestAsReadyForReview.Target}) + } + if config.AddComments != nil { + configs = append(configs, targetConfig{"add-comment", config.AddComments.Target}) + } + if config.CreatePullRequestReviewComments != nil { + configs = append(configs, targetConfig{"create-pull-request-review-comment", config.CreatePullRequestReviewComments.Target}) + } + if config.SubmitPullRequestReview != nil { + configs = append(configs, targetConfig{"submit-pull-request-review", config.SubmitPullRequestReview.Target}) + } + if config.ReplyToPullRequestReviewComment != nil { + configs = append(configs, targetConfig{"reply-to-pull-request-review-comment", config.ReplyToPullRequestReviewComment.Target}) + } + if config.PushToPullRequestBranch != nil { + configs = append(configs, targetConfig{"push-to-pull-request-branch", config.PushToPullRequestBranch.Target}) + } + // Validate each target field + for _, cfg := range configs { + if err := validateTargetValue(cfg.name, cfg.target); err != nil { + return err + } + } + + safeOutputsTargetValidationLog.Printf("Validated %d target fields", len(configs)) + return nil +} + +// validateTargetValue validates a single target value +func validateTargetValue(configName, target string) error { + // Empty or "triggering" are always valid + if target == "" || target == "triggering" { + return nil + } + + // "*" is valid (any item) + if target == "*" { + return nil + } + + // Check if it's a GitHub Actions expression + if isGitHubExpression(target) { + safeOutputsTargetValidationLog.Printf("Target for %s is a GitHub Actions expression", configName) + return nil + } + + // Check if it's a positive integer + if stringutil.IsPositiveInteger(target) { + safeOutputsTargetValidationLog.Printf("Target for %s is a valid number: %s", configName, target) + return nil + } + + // Build a helpful suggestion based on the invalid value + suggestion := "" + if target == "event" || strings.Contains(target, "github.event") { + suggestion = "\n\nDid you mean to use \"${{ github.event.issue.number }}\" instead of \"" + target + "\"?" + } + + // Invalid target value + return fmt.Errorf( + "invalid target value for %s: %q\n\nValid target values are:\n - \"triggering\" (default) - targets the triggering issue/PR/discussion\n - \"*\" - targets any item specified in the output\n - A positive integer (e.g., \"123\")\n - A GitHub Actions expression (e.g., \"${{ github.event.issue.number }}\")%s", + configName, + target, + suggestion, + ) +} + +// isGitHubExpression checks if a string is a valid GitHub Actions expression +// A valid expression must have properly balanced ${{ and }} markers +func isGitHubExpression(s string) bool { + // Must contain both opening and closing markers + if !strings.Contains(s, "${{") || !strings.Contains(s, "}}") { + return false + } + + // Basic validation: opening marker must come before closing marker + openIndex := strings.Index(s, "${{") + closeIndex := strings.Index(s, "}}") + + // The closing marker must come after the opening marker + // and there must be something between them + return openIndex >= 0 && closeIndex > openIndex+3 +} diff --git a/pkg/workflow/sandbox_validation.go b/pkg/workflow/sandbox_validation.go index 9ae96c95c5f..0915265fcc7 100644 --- a/pkg/workflow/sandbox_validation.go +++ b/pkg/workflow/sandbox_validation.go @@ -14,10 +14,9 @@ import ( "fmt" "github.com/github/gh-aw/pkg/constants" - "github.com/github/gh-aw/pkg/logger" ) -var sandboxValidationLog = logger.New("workflow:sandbox_validation") +var sandboxValidationLog = newValidationLogger("sandbox") // validateMountsSyntax validates that mount strings follow the correct syntax // Expected format: "source:destination:mode" where mode is either "ro" or "rw" diff --git a/pkg/workflow/schema_validation.go b/pkg/workflow/schema_validation.go index 18468c0d9b8..3e73178a605 100644 --- a/pkg/workflow/schema_validation.go +++ b/pkg/workflow/schema_validation.go @@ -45,12 +45,11 @@ import ( "strings" "sync" - "github.com/github/gh-aw/pkg/logger" "github.com/goccy/go-yaml" "github.com/santhosh-tekuri/jsonschema/v6" ) -var schemaValidationLog = logger.New("workflow:schema_validation") +var schemaValidationLog = newValidationLogger("schema") // Cached compiled schema to avoid recompiling on every validation var ( diff --git a/pkg/workflow/secrets_validation.go b/pkg/workflow/secrets_validation.go index 1f42e0cefb4..8da858dde1c 100644 --- a/pkg/workflow/secrets_validation.go +++ b/pkg/workflow/secrets_validation.go @@ -3,11 +3,9 @@ package workflow import ( "errors" "regexp" - - "github.com/github/gh-aw/pkg/logger" ) -var secretsValidationLog = logger.New("workflow:secrets_validation") +var secretsValidationLog = newValidationLogger("secrets") // secretsExpressionPattern matches GitHub Actions secrets expressions for jobs.secrets validation. // Pattern matches: ${{ secrets.NAME }} or ${{ secrets.NAME1 || secrets.NAME2 }} diff --git a/pkg/workflow/shared_workflow_error.go b/pkg/workflow/shared_workflow_error.go deleted file mode 100644 index 859e63d9203..00000000000 --- a/pkg/workflow/shared_workflow_error.go +++ /dev/null @@ -1,48 +0,0 @@ -package workflow - -import ( - "fmt" - "path/filepath" - - "github.com/github/gh-aw/pkg/logger" -) - -var sharedWorkflowLog = logger.New("workflow:shared_workflow_error") - -// SharedWorkflowError represents a workflow that is missing the 'on' field -// and should be treated as a shared/importable workflow component rather than -// a standalone workflow. This is not an actual error - it's a signal that -// compilation should be skipped with an informational message. -type SharedWorkflowError struct { - Path string // File path of the shared workflow -} - -// Error implements the error interface -// Returns a formatted info message explaining that this is a shared workflow -func (e *SharedWorkflowError) Error() string { - sharedWorkflowLog.Printf("Formatting info message for shared workflow: %s", e.Path) - - filename := filepath.Base(e.Path) - - return fmt.Sprintf( - "ℹ️ Shared agentic workflow detected: %s\n\n"+ - "This workflow is missing the 'on' field and will be treated as a shared workflow component.\n"+ - "Shared workflows are reusable components meant to be imported by other workflows.\n\n"+ - "To use this shared workflow:\n"+ - " 1. Import it in another workflow's frontmatter:\n"+ - " ---\n"+ - " on: issues\n"+ - " imports:\n"+ - " - %s\n"+ - " ---\n\n"+ - " 2. Compile the workflow that imports it\n\n"+ - "Skipping compilation.", - filename, - e.Path, - ) -} - -// IsSharedWorkflow returns true, indicating this is a shared workflow -func (e *SharedWorkflowError) IsSharedWorkflow() bool { - return true -} diff --git a/pkg/workflow/step_order_validation.go b/pkg/workflow/step_order_validation.go index 2332ff7f592..2a7fd1f94cb 100644 --- a/pkg/workflow/step_order_validation.go +++ b/pkg/workflow/step_order_validation.go @@ -6,11 +6,9 @@ import ( "path/filepath" "slices" "strings" - - "github.com/github/gh-aw/pkg/logger" ) -var stepOrderLog = logger.New("workflow:step_order_validation") +var stepOrderLog = newValidationLogger("step_order") // StepType represents the type of step being generated type StepType int diff --git a/pkg/workflow/strict_mode_validation.go b/pkg/workflow/strict_mode_validation.go index e7a6b077c43..cff339b5d41 100644 --- a/pkg/workflow/strict_mode_validation.go +++ b/pkg/workflow/strict_mode_validation.go @@ -47,11 +47,10 @@ import ( "strings" "github.com/github/gh-aw/pkg/console" - "github.com/github/gh-aw/pkg/logger" "github.com/github/gh-aw/pkg/parser" ) -var strictModeValidationLog = logger.New("workflow:strict_mode_validation") +var strictModeValidationLog = newValidationLogger("strict_mode") // validateStrictPermissions refuses write permissions in strict mode func (c *Compiler) validateStrictPermissions(frontmatter map[string]any) error { diff --git a/pkg/workflow/template_injection_validation.go b/pkg/workflow/template_injection_validation.go index fd0e0aff16d..73758eb297c 100644 --- a/pkg/workflow/template_injection_validation.go +++ b/pkg/workflow/template_injection_validation.go @@ -52,11 +52,10 @@ import ( "regexp" "strings" - "github.com/github/gh-aw/pkg/logger" "github.com/goccy/go-yaml" ) -var templateInjectionValidationLog = logger.New("workflow:template_injection_validation") +var templateInjectionValidationLog = newValidationLogger("template_injection") // Pre-compiled regex patterns for template injection detection var ( diff --git a/pkg/workflow/template_validation.go b/pkg/workflow/template_validation.go index 59583d1a243..478099c1e96 100644 --- a/pkg/workflow/template_validation.go +++ b/pkg/workflow/template_validation.go @@ -35,11 +35,10 @@ import ( "regexp" "strings" - "github.com/github/gh-aw/pkg/logger" "github.com/github/gh-aw/pkg/parser" ) -var templateValidationLog = logger.New("workflow:template_validation") +var templateValidationLog = newValidationLogger("template") // Pre-compiled regexes for performance (avoid recompilation in hot paths) var ( diff --git a/pkg/workflow/tools_validation.go b/pkg/workflow/tools_validation.go index 24c2bb7dd17..b5b8775911a 100644 --- a/pkg/workflow/tools_validation.go +++ b/pkg/workflow/tools_validation.go @@ -3,11 +3,9 @@ package workflow import ( "errors" "strings" - - "github.com/github/gh-aw/pkg/logger" ) -var toolsValidationLog = logger.New("workflow:tools_validation") +var toolsValidationLog = newValidationLogger("tools") // validateBashToolConfig validates that bash tool configuration is explicit (not nil/anonymous) func validateBashToolConfig(tools *Tools, workflowName string) error { diff --git a/pkg/workflow/validation_helpers.go b/pkg/workflow/validation_helpers.go index 12f957d184e..b75a59c3b2b 100644 --- a/pkg/workflow/validation_helpers.go +++ b/pkg/workflow/validation_helpers.go @@ -6,6 +6,7 @@ // // # Available Helper Functions // +// - newValidationLogger() - Creates a standardized logger for a validation domain // - validateIntRange() - Validates that an integer value is within a specified range // - validateMountStringFormat() - Parses and validates a "source:dest:mode" mount string // @@ -30,6 +31,18 @@ import ( "github.com/github/gh-aw/pkg/logger" ) +// newValidationLogger creates a standardized logger for a validation domain. +// It follows the naming convention "workflow:_validation" used across +// all *_validation.go files. +// +// Example: +// +// var engineValidationLog = newValidationLogger("engine") +// // produces logger named "workflow:engine_validation" +func newValidationLogger(domain string) *logger.Logger { + return logger.New("workflow:" + domain + "_validation") +} + // validateIntRange validates that a value is within the specified inclusive range [min, max]. // It returns an error if the value is outside the range, with a descriptive message // including the field name and the actual value. diff --git a/pkg/workflow/workflow_errors.go b/pkg/workflow/workflow_errors.go new file mode 100644 index 00000000000..2e63d1ffc29 --- /dev/null +++ b/pkg/workflow/workflow_errors.go @@ -0,0 +1,307 @@ +// This file defines all error types and aggregation utilities for the workflow package. +// +// # Error Types +// +// - WorkflowValidationError - validation errors for workflow configuration fields +// - OperationError - errors that occurred during an operation (e.g., fetching a resource) +// - ConfigurationError - errors in safe-outputs configuration +// - SharedWorkflowError - signal that a workflow is a shared/importable component +// +// # Error Aggregation +// +// ErrorCollector collects multiple validation errors, supporting both fail-fast and +// collect-all modes. Use NewErrorCollector(failFast) to create one, then Add() errors +// and call Error() or FormattedError() to retrieve the aggregated result. + +package workflow + +import ( + "errors" + "fmt" + "path/filepath" + "strings" + "time" + + "github.com/github/gh-aw/pkg/logger" +) + +var errorHelpersLog = logger.New("workflow:error_helpers") + +// WorkflowValidationError represents an error that occurred during input validation +type WorkflowValidationError struct { + Field string + Value string + Reason string + Suggestion string + Timestamp time.Time +} + +// Error implements the error interface +func (e *WorkflowValidationError) Error() string { + var b strings.Builder + + fmt.Fprintf(&b, "[%s] Validation failed for field '%s'", + e.Timestamp.Format(time.RFC3339), e.Field) + + if e.Value != "" { + // Truncate long values + truncatedValue := e.Value + if len(truncatedValue) > 100 { + truncatedValue = truncatedValue[:97] + "..." + } + fmt.Fprintf(&b, "\n\nValue: %s", truncatedValue) + } + + fmt.Fprintf(&b, "\nReason: %s", e.Reason) + + if e.Suggestion != "" { + fmt.Fprintf(&b, "\nSuggestion: %s", e.Suggestion) + } + + return b.String() +} + +// NewValidationError creates a new validation error with context +func NewValidationError(field, value, reason, suggestion string) *WorkflowValidationError { + errorHelpersLog.Printf("Creating validation error: field=%s, reason=%s", field, reason) + return &WorkflowValidationError{ + Field: field, + Value: value, + Reason: reason, + Suggestion: suggestion, + Timestamp: time.Now(), + } +} + +// OperationError represents an error that occurred during an operation +type OperationError struct { + Operation string + EntityType string + EntityID string + Cause error + Suggestion string + Timestamp time.Time +} + +// Error implements the error interface +func (e *OperationError) Error() string { + var b strings.Builder + + fmt.Fprintf(&b, "[%s] Failed to %s %s", + e.Timestamp.Format(time.RFC3339), e.Operation, e.EntityType) + + if e.EntityID != "" { + fmt.Fprintf(&b, " #%s", e.EntityID) + } + + if e.Cause != nil { + fmt.Fprintf(&b, "\n\nUnderlying error: %v", e.Cause) + } + + if e.Suggestion != "" { + fmt.Fprintf(&b, "\nSuggestion: %s", e.Suggestion) + } else { + // Provide default suggestion + fmt.Fprintf(&b, "\nSuggestion: Check that the %s exists and you have the necessary permissions.", e.EntityType) + } + + return b.String() +} + +// Unwrap returns the underlying error +func (e *OperationError) Unwrap() error { + return e.Cause +} + +// NewOperationError creates a new operation error with context +func NewOperationError(operation, entityType, entityID string, cause error, suggestion string) *OperationError { + if errorHelpersLog.Enabled() { + errorHelpersLog.Printf("Creating operation error: operation=%s, entityType=%s, entityID=%s, cause=%v", + operation, entityType, entityID, cause) + } + return &OperationError{ + Operation: operation, + EntityType: entityType, + EntityID: entityID, + Cause: cause, + Suggestion: suggestion, + Timestamp: time.Now(), + } +} + +// ConfigurationError represents an error in safe-outputs configuration +type ConfigurationError struct { + ConfigKey string + Value string + Reason string + Suggestion string + Timestamp time.Time +} + +// Error implements the error interface +func (e *ConfigurationError) Error() string { + var b strings.Builder + + fmt.Fprintf(&b, "[%s] Configuration error in '%s'", + e.Timestamp.Format(time.RFC3339), e.ConfigKey) + + if e.Value != "" { + // Truncate long values + truncatedValue := e.Value + if len(truncatedValue) > 100 { + truncatedValue = truncatedValue[:97] + "..." + } + fmt.Fprintf(&b, "\n\nValue: %s", truncatedValue) + } + + fmt.Fprintf(&b, "\nReason: %s", e.Reason) + + if e.Suggestion != "" { + fmt.Fprintf(&b, "\nSuggestion: %s", e.Suggestion) + } else { + // Provide default suggestion + fmt.Fprintf(&b, "\nSuggestion: Check the safe-outputs configuration in your workflow frontmatter and ensure '%s' is correctly specified.", e.ConfigKey) + } + + return b.String() +} + +// NewConfigurationError creates a new configuration error with context +func NewConfigurationError(configKey, value, reason, suggestion string) *ConfigurationError { + errorHelpersLog.Printf("Creating configuration error: configKey=%s, reason=%s", configKey, reason) + return &ConfigurationError{ + ConfigKey: configKey, + Value: value, + Reason: reason, + Suggestion: suggestion, + Timestamp: time.Now(), + } +} + +var errorAggregationLog = logger.New("workflow:error_aggregation") + +// ErrorCollector collects multiple validation errors +type ErrorCollector struct { + errors []error + failFast bool +} + +// NewErrorCollector creates a new error collector +// If failFast is true, the collector will stop at the first error +func NewErrorCollector(failFast bool) *ErrorCollector { + errorAggregationLog.Printf("Creating error collector: fail_fast=%v", failFast) + return &ErrorCollector{ + errors: make([]error, 0), + failFast: failFast, + } +} + +// Add adds an error to the collector +// If failFast is enabled, returns the error immediately +// Otherwise, adds it to the collection and returns nil +func (c *ErrorCollector) Add(err error) error { + if err == nil { + return nil + } + + errorAggregationLog.Printf("Adding error to collector: %v", err) + + if c.failFast { + errorAggregationLog.Print("Fail-fast enabled, returning error immediately") + return err + } + + c.errors = append(c.errors, err) + return nil +} + +// HasErrors returns true if any errors have been collected +func (c *ErrorCollector) HasErrors() bool { + return len(c.errors) > 0 +} + +// Count returns the number of errors collected +func (c *ErrorCollector) Count() int { + return len(c.errors) +} + +// Error returns the aggregated error using errors.Join +// Returns nil if no errors were collected +func (c *ErrorCollector) Error() error { + if len(c.errors) == 0 { + return nil + } + + errorAggregationLog.Printf("Aggregating %d errors", len(c.errors)) + + if len(c.errors) == 1 { + return c.errors[0] + } + + return errors.Join(c.errors...) +} + +// FormattedError returns the aggregated error with a formatted header showing the count +// Returns nil if no errors were collected +// This method is preferred over Error() + FormatAggregatedError for better accuracy +func (c *ErrorCollector) FormattedError(category string) error { + if len(c.errors) == 0 { + return nil + } + + errorAggregationLog.Printf("Formatting %d errors for category: %s", len(c.errors), category) + + if len(c.errors) == 1 { + return c.errors[0] + } + + // Build formatted error with count header + var sb strings.Builder + fmt.Fprintf(&sb, "Found %d %s errors:", len(c.errors), category) + for _, err := range c.errors { + sb.WriteString("\n • ") + sb.WriteString(err.Error()) + } + + return fmt.Errorf("%s", sb.String()) +} + +var sharedWorkflowLog = logger.New("workflow:shared_workflow_error") + +// SharedWorkflowError represents a workflow that is missing the 'on' field +// and should be treated as a shared/importable workflow component rather than +// a standalone workflow. This is not an actual error - it's a signal that +// compilation should be skipped with an informational message. +type SharedWorkflowError struct { + Path string // File path of the shared workflow +} + +// Error implements the error interface +// Returns a formatted info message explaining that this is a shared workflow +func (e *SharedWorkflowError) Error() string { + sharedWorkflowLog.Printf("Formatting info message for shared workflow: %s", e.Path) + + filename := filepath.Base(e.Path) + + return fmt.Sprintf( + "ℹ️ Shared agentic workflow detected: %s\n\n"+ + "This workflow is missing the 'on' field and will be treated as a shared workflow component.\n"+ + "Shared workflows are reusable components meant to be imported by other workflows.\n\n"+ + "To use this shared workflow:\n"+ + " 1. Import it in another workflow's frontmatter:\n"+ + " ---\n"+ + " on: issues\n"+ + " imports:\n"+ + " - %s\n"+ + " ---\n\n"+ + " 2. Compile the workflow that imports it\n\n"+ + "Skipping compilation.", + filename, + e.Path, + ) +} + +// IsSharedWorkflow returns true, indicating this is a shared workflow +func (e *SharedWorkflowError) IsSharedWorkflow() bool { + return true +}