Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 61 additions & 1 deletion pkg/cli/update_command.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@ import (
"context"
"fmt"
"os"
"path/filepath"
"sort"
"strings"

"github.com/github/gh-aw/pkg/console"
"github.com/github/gh-aw/pkg/constants"
Expand Down Expand Up @@ -133,9 +136,14 @@ func RunUpdateWorkflows(ctx context.Context, workflowNames []string, allowMajor,

// Resolve and store SHA-256 digest pins for container images referenced in lock files.
updateLog.Print("Updating container image digest pins")
if err := UpdateContainerPins(ctx, workflowsDir, verbose); err != nil {
pinsUpdated, err := UpdateContainerPins(ctx, workflowsDir, verbose)
if err != nil {
// Non-fatal: Docker may not be available in all environments.
fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Warning: Failed to update container pins: %v", err)))
} else if pinsUpdated && !noCompile {
if err := recompileWorkflowsForContainerPins(workflowsDir, engineOverride, verbose); err != nil {
fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Warning: Failed to apply container pins to lock files: %v", err)))
}
}

// Update action references in user-provided steps within workflow .md files.
Expand All @@ -149,3 +157,55 @@ func RunUpdateWorkflows(ctx context.Context, workflowNames []string, allowMajor,
updateLog.Printf("Update process complete: had_error=%v", firstErr != nil)
return firstErr
}

// recompileWorkflowsForContainerPins recompiles workflows with existing lock files
// so newly resolved container digest pins are applied in the same update run.
func recompileWorkflowsForContainerPins(workflowsDir, engineOverride string, verbose bool) error {
workflowFiles, err := workflowFilesForExistingLocks(workflowsDir)
if err != nil {
return err
}
for _, workflowFile := range workflowFiles {
quiet := false
refreshStopTime := false
if err := compileWorkflowWithRefresh(workflowFile, verbose, quiet, engineOverride, refreshStopTime); err != nil {
return fmt.Errorf("failed to recompile %s after updating container pins: %w", filepath.Base(workflowFile), err)
}
}
Comment on lines +168 to +174
Copy link

Copilot AI Apr 22, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

recompileWorkflowsForContainerPins returns immediately on the first compilation error, which can leave remaining workflows with existing lock files un-recompiled (and therefore not updated to use the newly pinned image digests) in the same gh aw update run. Consider continuing through all workflows, collecting failures, and returning a combined error (or logging per-workflow warnings) so one bad workflow doesn't prevent pins being applied everywhere.

Suggested change
for _, workflowFile := range workflowFiles {
quiet := false
refreshStopTime := false
if err := compileWorkflowWithRefresh(workflowFile, verbose, quiet, engineOverride, refreshStopTime); err != nil {
return fmt.Errorf("failed to recompile %s after updating container pins: %w", filepath.Base(workflowFile), err)
}
}
var failures []string
for _, workflowFile := range workflowFiles {
quiet := false
refreshStopTime := false
if err := compileWorkflowWithRefresh(workflowFile, verbose, quiet, engineOverride, refreshStopTime); err != nil {
failures = append(failures, fmt.Sprintf("%s: %v", filepath.Base(workflowFile), err))
}
}
if len(failures) > 0 {
return fmt.Errorf("failed to recompile workflows after updating container pins: %s", strings.Join(failures, "; "))
}

Copilot uses AI. Check for mistakes.
return nil
}

// workflowFilesForExistingLocks returns sorted workflow markdown file paths that
// have a matching .lock.yml file in the same workflow directory.
func workflowFilesForExistingLocks(workflowsDir string) ([]string, error) {
if workflowsDir == "" {
workflowsDir = getWorkflowsDir()
}

entries, err := os.ReadDir(workflowsDir)
if err != nil {
return nil, fmt.Errorf("failed to read workflows directory %s: %w", workflowsDir, err)
}

lockFiles := make(map[string]struct{})
for _, entry := range entries {
if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".lock.yml") {
continue
}
base := strings.TrimSuffix(entry.Name(), ".lock.yml")
lockFiles[base] = struct{}{}
}

workflowFiles := make([]string, 0, len(lockFiles))
for _, entry := range entries {
if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".md") {
continue
}
base := strings.TrimSuffix(entry.Name(), ".md")
if _, ok := lockFiles[base]; ok {
workflowFiles = append(workflowFiles, filepath.Join(workflowsDir, entry.Name()))
}
}
sort.Strings(workflowFiles)
return workflowFiles, nil
}
49 changes: 49 additions & 0 deletions pkg/cli/update_command_container_pins_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
//go:build !integration

package cli

import (
"os"
"path/filepath"
"testing"

"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

func TestWorkflowFilesForExistingLocks(t *testing.T) {
t.Run("returns only workflow markdown files with matching lock files", func(t *testing.T) {
tmpDir := t.TempDir()
workflowsDir := filepath.Join(tmpDir, ".github", "workflows")
require.NoError(t, os.MkdirAll(workflowsDir, 0o755))

require.NoError(t, os.WriteFile(filepath.Join(workflowsDir, "a.md"), []byte("---\non: issues\n---"), 0o644))
require.NoError(t, os.WriteFile(filepath.Join(workflowsDir, "a.lock.yml"), []byte("name: a"), 0o644))
require.NoError(t, os.WriteFile(filepath.Join(workflowsDir, "b.md"), []byte("---\non: issues\n---"), 0o644))
require.NoError(t, os.WriteFile(filepath.Join(workflowsDir, "b.lock.yml"), []byte("name: b"), 0o644))
require.NoError(t, os.WriteFile(filepath.Join(workflowsDir, "c.md"), []byte("---\non: issues\n---"), 0o644))
require.NoError(t, os.WriteFile(filepath.Join(workflowsDir, "notes.md"), []byte("# include"), 0o644))
require.NoError(t, os.WriteFile(filepath.Join(workflowsDir, "notes.lock.yml"), []byte("name: notes"), 0o644))

files, err := workflowFilesForExistingLocks(workflowsDir)
require.NoError(t, err, "workflow file discovery should succeed")
assert.Equal(t, []string{
filepath.Join(workflowsDir, "a.md"),
filepath.Join(workflowsDir, "b.md"),
filepath.Join(workflowsDir, "notes.md"),
}, files, "only markdown files with lock counterparts should be returned in sorted order")
})

t.Run("returns empty slice when no matching pairs exist", func(t *testing.T) {
tmpDir := t.TempDir()
workflowsDir := filepath.Join(tmpDir, ".github", "workflows")
require.NoError(t, os.MkdirAll(workflowsDir, 0o755))

require.NoError(t, os.WriteFile(filepath.Join(workflowsDir, "only-md.md"), []byte("---\non: issues\n---"), 0o644))
require.NoError(t, os.WriteFile(filepath.Join(workflowsDir, "only-lock.lock.yml"), []byte("name: only-lock"), 0o644))

files, err := workflowFilesForExistingLocks(workflowsDir)
require.NoError(t, err, "workflow file discovery should succeed")
assert.Empty(t, files, "no matching markdown/lock pairs should produce an empty result")
})
}
17 changes: 9 additions & 8 deletions pkg/cli/update_container_pins.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,10 @@ var buildxDigestPattern = regexp.MustCompile(`(?m)^Digest:\s+(sha256:[a-f0-9]{64
// as they are already pinned. Images without a cached pin are queried via the
// Docker CLI ("docker buildx imagetools inspect").
//
// When Docker is unavailable the function logs a warning and returns nil so that
// the overall upgrade flow is not interrupted.
func UpdateContainerPins(ctx context.Context, workflowDir string, verbose bool) error {
// When Docker is unavailable the function logs a warning and returns (false, nil)
// so that the overall upgrade flow is not interrupted.
// The bool return value indicates whether any container pin entries were newly added.
func UpdateContainerPins(ctx context.Context, workflowDir string, verbose bool) (bool, error) {
containerPinsLog.Print("Starting container pin update")

if verbose {
Expand All @@ -65,14 +66,14 @@ func UpdateContainerPins(ctx context.Context, workflowDir string, verbose bool)
if verbose {
fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Warning: Failed to collect container images: %v", err)))
}
return nil
return false, nil
}

if len(images) == 0 {
if verbose {
fmt.Fprintln(os.Stderr, console.FormatVerboseMessage("No container images found in lock files"))
}
return nil
return false, nil
}

containerPinsLog.Printf("Found %d unique container image(s) across lock files", len(images))
Expand All @@ -82,7 +83,7 @@ func UpdateContainerPins(ctx context.Context, workflowDir string, verbose bool)
actionCache := workflow.NewActionCache(".")
if _, statErr := os.Stat(actionsLockPath); statErr == nil {
if loadErr := actionCache.Load(); loadErr != nil {
return fmt.Errorf("failed to load actions-lock.json: %w", loadErr)
return false, fmt.Errorf("failed to load actions-lock.json: %w", loadErr)
}
}

Expand Down Expand Up @@ -154,12 +155,12 @@ func UpdateContainerPins(ctx context.Context, workflowDir string, verbose bool)

if len(updatedImages) > 0 {
if err := actionCache.Save(); err != nil {
return fmt.Errorf("failed to save actions-lock.json: %w", err)
return false, fmt.Errorf("failed to save actions-lock.json: %w", err)
}
fmt.Fprintln(os.Stderr, console.FormatInfoMessage("Updated container pins in actions-lock.json"))
}

return nil
return len(updatedImages) > 0, nil
}

// collectImagesFromLockFiles scans all .lock.yml files under workflowDir and returns
Expand Down
2 changes: 1 addition & 1 deletion pkg/cli/upgrade_command.go
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ func runUpgradeCommand(ctx context.Context, verbose bool, workflowDir string, no
// pinned @sha256: references in the generated lock files.
if !noFix && !noActions {
upgradeLog.Print("Updating container image digest pins")
if err := UpdateContainerPins(ctx, workflowDir, verbose); err != nil {
if _, err := UpdateContainerPins(ctx, workflowDir, verbose); err != nil {
upgradeLog.Printf("Failed to update container pins: %v", err)
// Non-critical — Docker may not be available in all environments.
fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Warning: Failed to update container pins: %v", err)))
Expand Down
168 changes: 166 additions & 2 deletions pkg/parser/frontmatter_hash.go
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ func computeFrontmatterHashFromContent(content string, parsedFrontmatter map[str
}

// Compute hash using text-based approach with custom file reader
return computeFrontmatterHashTextBasedWithReader(frontmatterText, fullBody, baseDir, cache, relevantExpressions, fileReader)
return computeFrontmatterHashTextBasedWithReader(frontmatterText, fullBody, baseDir, filePath, cache, relevantExpressions, fileReader)
}

// extractRelevantTemplateExpressions extracts template expressions from markdown
Expand Down Expand Up @@ -421,7 +421,7 @@ func processImportsTextBased(frontmatterText, baseDir string, visited map[string
// computeFrontmatterHashTextBasedWithReader computes the hash using text-based approach with custom file reader.
// When markdown is non-empty, it is included as the full body text in the canonical data (used for
// inlined-imports mode where the entire body is compiled into the lock file).
func computeFrontmatterHashTextBasedWithReader(frontmatterText, markdown, baseDir string, cache *ImportCache, expressions []string, fileReader FileReader) (string, error) {
func computeFrontmatterHashTextBasedWithReader(frontmatterText, markdown, baseDir, filePath string, cache *ImportCache, expressions []string, fileReader FileReader) (string, error) {
frontmatterHashLog.Print("Computing frontmatter hash using text-based approach")

// Process imports using text-based parsing with custom file reader
Expand Down Expand Up @@ -462,6 +462,10 @@ func computeFrontmatterHashTextBasedWithReader(frontmatterText, markdown, baseDi
canonical["template-expressions"] = expressions
}

// Include relevant container digest pins in the canonical hash input.
// This ensures lock metadata changes when resolved container pins change.
addContainerPinsToCanonical(canonical, frontmatterText, filePath, fileReader)

// Serialize to canonical JSON
canonicalJSON := marshalSorted(canonical)

Expand All @@ -474,3 +478,163 @@ func computeFrontmatterHashTextBasedWithReader(frontmatterText, markdown, baseDi
frontmatterHashLog.Printf("Computed hash: %s", hashHex)
return hashHex, nil
}

// addContainerPinsToCanonical augments canonical hash data with digest pins relevant
// to container images declared in frontmatter tools.
func addContainerPinsToCanonical(canonical map[string]any, frontmatterText, workflowPath string, fileReader FileReader) {
frontmatter, err := ExtractFrontmatterFromContent("---\n" + frontmatterText + "\n---\n")
if err != nil {
frontmatterHashLog.Printf("Skipping container pins in hash for %s: failed to parse frontmatter for container pin extraction: %v", workflowPath, err)
return
}

images := extractContainerImagesFromFrontmatter(frontmatter.Frontmatter)
if len(images) == 0 {
return
}

lockData, ok := findActionsLockContent(workflowPath, fileReader)
if !ok {
return
}

type containerPin struct {
Digest string `json:"digest"`
PinnedImage string `json:"pinned_image"`
}
var lock struct {
Containers map[string]containerPin `json:"containers"`
}
if err := json.Unmarshal(lockData, &lock); err != nil {
frontmatterHashLog.Printf("Skipping container pins in hash for %s: failed to parse actions-lock.json: %v", workflowPath, err)
return
}
if len(lock.Containers) == 0 {
return
}

pins := make(map[string]any)
for _, image := range images {
if pin, ok := lock.Containers[image]; ok {
if pinnedRef := normalizePinnedContainerReference(image, pin); pinnedRef != "" {
pins[image] = pinnedRef
}
}
}

if len(pins) > 0 {
canonical["container-pins"] = pins
}
}

// normalizePinnedContainerReference returns a normalized container pin reference
// in the canonical image@sha256:<digest> syntax when available.
func normalizePinnedContainerReference(image string, pin struct {
Digest string `json:"digest"`
PinnedImage string `json:"pinned_image"`
}) string {
if strings.Contains(pin.PinnedImage, "@sha256:") {
return pin.PinnedImage
}
if strings.HasPrefix(pin.Digest, "sha256:") {
return image + "@" + pin.Digest
}
return ""
}

// findActionsLockContent searches upward from workflowPath for .github/aw/actions-lock.json.
func findActionsLockContent(workflowPath string, fileReader FileReader) ([]byte, bool) {
dir := filepath.Dir(workflowPath)
for {
candidate := filepath.Join(dir, ".github", "aw", "actions-lock.json")
if content, err := fileReader(candidate); err == nil {
return content, true
}

parent := filepath.Dir(dir)
if parent == dir {
break
}
dir = parent
}
return nil, false
}

// extractContainerImagesFromFrontmatter returns sorted, deduplicated container images
// from tool definitions in frontmatter.
func extractContainerImagesFromFrontmatter(frontmatter map[string]any) []string {
if frontmatter == nil {
return nil
}

tools, ok := frontmatter["tools"].(map[string]any)
if !ok || len(tools) == 0 {
return nil
}

set := make(map[string]bool)
for _, toolDef := range tools {
toolMap, ok := toolDef.(map[string]any)
if !ok {
continue
}

if container, ok := toolMap["container"].(string); ok && container != "" {
set[container] = true
continue
}

command, ok := toolMap["command"].(string)
// Only extract container images from docker commands. Tool definitions
// using other commands are not expected to expose container images in
// args, so skip them.
if !ok || command != "docker" {
continue
}
image := extractImageFromDockerRunArgs(toolMap["args"])
if image != "" {
set[image] = true
}
}

if len(set) == 0 {
return nil
}

images := make([]string, 0, len(set))
for image := range set {
images = append(images, image)
}
sort.Strings(images)
return images
}

// extractImageFromDockerRunArgs returns the last argument from docker run args,
// assuming it is the image reference for `docker run ... <image>`.
// Returns empty string if args is empty, cannot be converted to []any/[]string,
// the last value is not a string, or the last argument starts with "-" (likely
// a flag rather than an image name).
// This heuristic is intentionally narrow for frontmatter tool definitions where
// args are expected to end at the image reference and not include a container
// command after the image.
func extractImageFromDockerRunArgs(args any) string {
var last string
switch v := args.(type) {
case []any:
if len(v) == 0 {
return ""
}
last, _ = v[len(v)-1].(string)
case []string:
if len(v) == 0 {
return ""
}
last = v[len(v)-1]
default:
return ""
}
if last == "" || strings.HasPrefix(last, "-") {
return ""
}
return last
}
Loading