Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions pkg/cli/add_command.go
Original file line number Diff line number Diff line change
Expand Up @@ -455,6 +455,16 @@ func addWorkflowWithTracking(workflow *WorkflowSpec, number int, verbose bool, e
content = updatedContent
}

// Process imports field and replace with workflowspec
processedImportsContent, err := processImportsWithWorkflowSpec(content, workflow, sourceInfo.CommitSHA, verbose)
if err != nil {
if verbose {
fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to process imports: %v", err)))
}
} else {
content = processedImportsContent
}

// Process @include directives and replace with workflowspec
processedContent, err := processIncludesWithWorkflowSpec(content, workflow, sourceInfo.CommitSHA, sourceInfo.PackagePath, verbose)
if err != nil {
Expand Down
106 changes: 105 additions & 1 deletion pkg/cli/imports.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,101 @@ import (
"strings"

"github.com/githubnext/gh-aw/pkg/console"
"github.com/githubnext/gh-aw/pkg/constants"
"github.com/githubnext/gh-aw/pkg/parser"
"github.com/githubnext/gh-aw/pkg/workflow"
)

// processImportsWithWorkflowSpec processes imports field in frontmatter and replaces local file references
// with workflowspec format (owner/repo/path@sha) for all imports found
func processImportsWithWorkflowSpec(content string, workflow *WorkflowSpec, commitSHA string, verbose bool) (string, error) {
if verbose {
fmt.Fprintln(os.Stderr, console.FormatVerboseMessage("Processing imports field to replace with workflowspec"))
}

// Extract frontmatter from content
result, err := parser.ExtractFrontmatterFromContent(content)
if err != nil {
return content, nil // Return original content if no frontmatter
}

// Check if imports field exists
importsField, exists := result.Frontmatter["imports"]
if !exists {
return content, nil // No imports field, return original content
}

// Convert imports to array of strings
var imports []string
switch v := importsField.(type) {
case []any:
for _, item := range v {
if str, ok := item.(string); ok {
imports = append(imports, str)
}
}
case []string:
imports = v
default:
return content, nil // Invalid imports field, skip processing
}

// Process each import and replace with workflowspec format
processedImports := make([]string, 0, len(imports))
for _, importPath := range imports {
// Skip if already a workflowspec
if isWorkflowSpecFormat(importPath) {
processedImports = append(processedImports, importPath)
continue
}

// Build workflowspec for this import
// Format: owner/repo/path@sha
workflowSpec := workflow.Repo + "/" + importPath
if commitSHA != "" {
workflowSpec += "@" + commitSHA
} else if workflow.Version != "" {
workflowSpec += "@" + workflow.Version
}

processedImports = append(processedImports, workflowSpec)
}

// Update frontmatter with processed imports
result.Frontmatter["imports"] = processedImports

// Use helper function to reconstruct workflow file with proper field ordering
return reconstructWorkflowFileFromMap(result.Frontmatter, result.Markdown)
}

// reconstructWorkflowFileFromMap reconstructs a workflow file from frontmatter map and markdown
// using proper field ordering and YAML helpers
func reconstructWorkflowFileFromMap(frontmatter map[string]any, markdown string) (string, error) {
// Convert frontmatter to YAML with proper field ordering
// Use PriorityWorkflowFields to ensure consistent ordering of top-level fields
updatedFrontmatter, err := workflow.MarshalWithFieldOrder(frontmatter, constants.PriorityWorkflowFields)
if err != nil {
return "", fmt.Errorf("failed to marshal frontmatter: %w", err)
}

// Clean up the YAML - remove trailing newline and unquote the "on" key
frontmatterStr := strings.TrimSuffix(string(updatedFrontmatter), "\n")
frontmatterStr = workflow.UnquoteYAMLKey(frontmatterStr, "on")

// Reconstruct the file
var lines []string
lines = append(lines, "---")
if frontmatterStr != "" {
lines = append(lines, strings.Split(frontmatterStr, "\n")...)
}
lines = append(lines, "---")
if markdown != "" {
lines = append(lines, markdown)
}

return strings.Join(lines, "\n"), nil
}

// processIncludesWithWorkflowSpec processes @include directives in content and replaces local file references
// with workflowspec format (owner/repo/path@sha) for all includes found in the package
func processIncludesWithWorkflowSpec(content string, workflow *WorkflowSpec, commitSHA, packagePath string, verbose bool) (string, error) {
Expand Down Expand Up @@ -157,8 +249,20 @@ func processIncludesWithWorkflowSpec(content string, workflow *WorkflowSpec, com
}

// processIncludesInContent processes @include directives in workflow content for update command
// and also processes imports field in frontmatter
func processIncludesInContent(content string, workflow *WorkflowSpec, commitSHA string, verbose bool) (string, error) {
scanner := bufio.NewScanner(strings.NewReader(content))
// First process imports field in frontmatter
processedImportsContent, err := processImportsWithWorkflowSpec(content, workflow, commitSHA, verbose)
if err != nil {
if verbose {
fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Failed to process imports: %v", err)))
}
// Continue with original content on error
processedImportsContent = content
}

// Then process @include directives in markdown
scanner := bufio.NewScanner(strings.NewReader(processedImportsContent))
var result strings.Builder

for scanner.Scan() {
Expand Down
2 changes: 1 addition & 1 deletion pkg/constants/constants.go
Original file line number Diff line number Diff line change
Expand Up @@ -169,4 +169,4 @@ var PriorityJobFields = []string{"name", "runs-on", "needs", "if", "permissions"

// PriorityWorkflowFields defines the conventional field order for top-level GitHub Actions workflow frontmatter
// Fields appear in this order first, followed by remaining fields alphabetically
var PriorityWorkflowFields = []string{"on", "permissions", "if", "network", "safe-outputs", "steps"}
var PriorityWorkflowFields = []string{"on", "permissions", "if", "network", "imports", "safe-outputs", "steps"}
80 changes: 80 additions & 0 deletions pkg/parser/frontmatter.go
Original file line number Diff line number Diff line change
Expand Up @@ -288,6 +288,86 @@ func ExtractMarkdown(filePath string) (string, error) {
return ExtractMarkdownContent(string(content))
}

// ProcessImportsFromFrontmatter processes imports field from frontmatter
// Returns merged tools and engines from imported files
func ProcessImportsFromFrontmatter(frontmatter map[string]any, baseDir string) (mergedTools string, mergedEngines []string, err error) {
// Check if imports field exists
importsField, exists := frontmatter["imports"]
if !exists {
return "", nil, nil
}

// Convert to array of strings
var imports []string
switch v := importsField.(type) {
case []any:
for _, item := range v {
if str, ok := item.(string); ok {
imports = append(imports, str)
}
}
case []string:
imports = v
default:
return "", nil, fmt.Errorf("imports field must be an array of strings")
}

if len(imports) == 0 {
return "", nil, nil
}

// Track visited to prevent cycles
visited := make(map[string]bool)

// Process each import
var toolsBuilder strings.Builder
var engines []string

for _, importPath := range imports {
// Handle section references (file.md#Section)
var filePath, sectionName string
if strings.Contains(importPath, "#") {
parts := strings.SplitN(importPath, "#", 2)
filePath = parts[0]
sectionName = parts[1]
} else {
filePath = importPath
}

// Resolve import path (supports workflowspec format)
fullPath, err := resolveIncludePath(filePath, baseDir)
if err != nil {
return "", nil, fmt.Errorf("failed to resolve import '%s': %w", filePath, err)
}

// Check for cycles
if visited[fullPath] {
continue
}
visited[fullPath] = true

// Extract tools from imported file
toolsContent, err := processIncludedFileWithVisited(fullPath, sectionName, true, baseDir, visited)
if err != nil {
return "", nil, fmt.Errorf("failed to process imported file '%s': %w", fullPath, err)
}
toolsBuilder.WriteString(toolsContent + "\n")

// Extract engines from imported file
content, err := os.ReadFile(fullPath)
if err != nil {
return "", nil, fmt.Errorf("failed to read imported file '%s': %w", fullPath, err)
}

engineContent, err := extractEngineFromContent(string(content))
if err == nil && engineContent != "" {
engines = append(engines, engineContent)
}
}

return toolsBuilder.String(), engines, nil
}

// ProcessIncludes processes @include and @import directives in markdown content
// This matches the bash process_includes function behavior
func ProcessIncludes(content, baseDir string, extractTools bool) (string, error) {
Expand Down
111 changes: 111 additions & 0 deletions pkg/parser/frontmatter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1914,3 +1914,114 @@ func TestProcessIncludesWithCycleDetection(t *testing.T) {
t.Errorf("ProcessIncludes result should contain File B content")
}
}

func TestProcessImportsFromFrontmatter(t *testing.T) {
// Create temp directory for test files
tempDir := t.TempDir()

// Create a test include file
includeFile := filepath.Join(tempDir, "include.md")
includeContent := `---
tools:
bash:
allowed:
- ls
- cat
---
# Include Content
This is an included file.`
if err := os.WriteFile(includeFile, []byte(includeContent), 0644); err != nil {
t.Fatalf("Failed to write include file: %v", err)
}

tests := []struct {
name string
frontmatter map[string]any
wantToolsJSON bool
wantEngines bool
wantErr bool
}{
{
name: "no imports field",
frontmatter: map[string]any{
"on": "push",
},
wantToolsJSON: false,
wantEngines: false,
wantErr: false,
},
{
name: "empty imports array",
frontmatter: map[string]any{
"on": "push",
"imports": []string{},
},
wantToolsJSON: false,
wantEngines: false,
wantErr: false,
},
{
name: "valid imports",
frontmatter: map[string]any{
"on": "push",
"imports": []string{"include.md"},
},
wantToolsJSON: true,
wantEngines: false,
wantErr: false,
},
{
name: "invalid imports type",
frontmatter: map[string]any{
"on": "push",
"imports": "not-an-array",
},
wantToolsJSON: false,
wantEngines: false,
wantErr: true,
},
}

for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tools, engines, err := ProcessImportsFromFrontmatter(tt.frontmatter, tempDir)

if tt.wantErr {
if err == nil {
t.Errorf("ProcessImportsFromFrontmatter() expected error but got none")
}
return
}

if err != nil {
t.Errorf("ProcessImportsFromFrontmatter() unexpected error: %v", err)
return
}

if tt.wantToolsJSON {
if tools == "" {
t.Errorf("ProcessImportsFromFrontmatter() expected tools JSON but got empty string")
}
// Verify it's valid JSON
var toolsMap map[string]any
if err := json.Unmarshal([]byte(tools), &toolsMap); err != nil {
t.Errorf("ProcessImportsFromFrontmatter() tools not valid JSON: %v", err)
}
} else {
if tools != "" {
t.Errorf("ProcessImportsFromFrontmatter() expected no tools but got: %s", tools)
}
}

if tt.wantEngines {
if len(engines) == 0 {
t.Errorf("ProcessImportsFromFrontmatter() expected engines but got none")
}
} else {
if len(engines) != 0 {
t.Errorf("ProcessImportsFromFrontmatter() expected no engines but got: %v", engines)
}
}
})
}
}
8 changes: 8 additions & 0 deletions pkg/parser/schemas/main_workflow_schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,14 @@
"type": "string",
"description": "Optional source reference indicating where this workflow was added from. Format: owner/repo/path@ref (e.g., githubnext/agentics/workflows/ci-doctor.md@v1.0.0). Rendered as a comment in the generated lock file."
},
"imports": {
"type": "array",
"description": "Optional array of workflow specifications to import (similar to @include directives but defined in frontmatter). Format: owner/repo/path@ref (e.g., githubnext/agentics/workflows/shared/common.md@v1.0.0).",
"items": {
"type": "string",
"description": "Workflow specification in format owner/repo/path@ref"
}
},
"on": {
"description": "Workflow triggers that define when the agentic workflow should run. Supports standard GitHub Actions trigger events plus special command triggers for /commands (required)",
"oneOf": [
Expand Down
Loading