diff --git a/.github/shared/editorial.md b/.github/shared/editorial.md new file mode 100644 index 00000000000..79fb62bab63 --- /dev/null +++ b/.github/shared/editorial.md @@ -0,0 +1,3 @@ +## Writing Style + +Write in a **newspaper editorial tone**: clear, authoritative, and concise. Lead with the most important finding, use active voice, and keep sentences tight. Present data-driven observations as confident conclusions, not hedged guesses. Favor short paragraphs over long ones, and use subheadings to guide the reader through the report. diff --git a/.github/workflows/daily-fact.lock.yml b/.github/workflows/daily-fact.lock.yml index 1386e8bbd48..d4690f1b948 100644 --- a/.github/workflows/daily-fact.lock.yml +++ b/.github/workflows/daily-fact.lock.yml @@ -1395,18 +1395,18 @@ jobs: DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo '0') export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e CODEX_HOME -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.0' - cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_d8e5c0475d93e829_EOF + cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_6757623b9ba12653_EOF [history] persistence = "none" [shell_environment_policy] inherit = "core" include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"] - GH_AW_MCP_CONFIG_d8e5c0475d93e829_EOF + GH_AW_MCP_CONFIG_6757623b9ba12653_EOF # Generate JSON config for MCP gateway GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node) - cat << GH_AW_MCP_CONFIG_c44dd6d6554730bc_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs" + cat << GH_AW_MCP_CONFIG_ee671eb9410510b8_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs" { "mcpServers": { }, @@ -1417,11 +1417,11 @@ jobs: "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } - GH_AW_MCP_CONFIG_c44dd6d6554730bc_EOF + GH_AW_MCP_CONFIG_ee671eb9410510b8_EOF # Sync converter output to writable CODEX_HOME for Codex mkdir -p /tmp/gh-aw/mcp-config - cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_1d0a0d86b055804d_EOF + cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_73fd68f19c59a61a_EOF model_provider = "openai-proxy" [model_providers.openai-proxy] name = "OpenAI AWF proxy" @@ -1431,7 +1431,7 @@ jobs: [shell_environment_policy] inherit = "core" include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"] - GH_AW_CODEX_SHELL_POLICY_1d0a0d86b055804d_EOF + GH_AW_CODEX_SHELL_POLICY_73fd68f19c59a61a_EOF awk ' BEGIN { skip_openai_proxy = 0 } /^[[:space:]]*model_provider[[:space:]]*=/ { next } diff --git a/.github/workflows/daily-issues-report.lock.yml b/.github/workflows/daily-issues-report.lock.yml index 98553f9a4a5..412c11dd6ef 100644 --- a/.github/workflows/daily-issues-report.lock.yml +++ b/.github/workflows/daily-issues-report.lock.yml @@ -260,6 +260,7 @@ jobs: {{#runtime-import .github/workflows/shared/trends.md}} {{#runtime-import .github/workflows/shared/reporting.md}} {{#runtime-import .github/workflows/shared/observability-otlp.md}} + {{#runtime-import .github/shared/editorial.md}} {{#runtime-import .github/workflows/daily-issues-report.md}} GH_AW_PROMPT_9e1e7ac2d1f2b660_EOF } > "$GH_AW_PROMPT" diff --git a/.github/workflows/daily-issues-report.md b/.github/workflows/daily-issues-report.md index 49e2ff4818d..30876759757 100644 --- a/.github/workflows/daily-issues-report.md +++ b/.github/workflows/daily-issues-report.md @@ -35,6 +35,8 @@ features: --- {{#runtime-import? .github/shared-instructions.md}} +{{#runtime-import .github/shared/editorial.md}} + # Daily Issues Report Generator You are an expert analyst that generates comprehensive daily reports about repository issues, using Python for clustering and visualization. diff --git a/.github/workflows/daily-news.lock.yml b/.github/workflows/daily-news.lock.yml index 4fd66d92f5a..1a44abd0a94 100644 --- a/.github/workflows/daily-news.lock.yml +++ b/.github/workflows/daily-news.lock.yml @@ -250,6 +250,7 @@ jobs: {{#runtime-import .github/workflows/shared/reporting.md}} {{#runtime-import .github/workflows/shared/observability-otlp.md}} {{#runtime-import .github/workflows/shared/python-dataviz.md}} + {{#runtime-import .github/shared/editorial.md}} {{#runtime-import .github/workflows/daily-news.md}} GH_AW_PROMPT_d515ff7b72bcd152_EOF } > "$GH_AW_PROMPT" diff --git a/.github/workflows/daily-news.md b/.github/workflows/daily-news.md index 43ee2dfb838..0f24e156097 100644 --- a/.github/workflows/daily-news.md +++ b/.github/workflows/daily-news.md @@ -301,6 +301,8 @@ features: {{#runtime-import? .github/shared-instructions.md}} +{{#runtime-import .github/shared/editorial.md}} + # Daily News Write an upbeat, friendly, motivating summary of recent activity in the repo. diff --git a/.github/workflows/daily-observability-report.lock.yml b/.github/workflows/daily-observability-report.lock.yml index 470455a0317..fc13bade1dc 100644 --- a/.github/workflows/daily-observability-report.lock.yml +++ b/.github/workflows/daily-observability-report.lock.yml @@ -1313,18 +1313,18 @@ jobs: DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo '0') export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e CODEX_HOME -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.0' - cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_3c598095e22dad2c_EOF + cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_04708a94aec321c6_EOF [history] persistence = "none" [shell_environment_policy] inherit = "core" include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"] - GH_AW_MCP_CONFIG_3c598095e22dad2c_EOF + GH_AW_MCP_CONFIG_04708a94aec321c6_EOF # Generate JSON config for MCP gateway GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node) - cat << GH_AW_MCP_CONFIG_cdd8152942b1db40_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs" + cat << GH_AW_MCP_CONFIG_29862326540731be_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs" { "mcpServers": { }, @@ -1335,11 +1335,11 @@ jobs: "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } - GH_AW_MCP_CONFIG_cdd8152942b1db40_EOF + GH_AW_MCP_CONFIG_29862326540731be_EOF # Sync converter output to writable CODEX_HOME for Codex mkdir -p /tmp/gh-aw/mcp-config - cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_642d534dcaee7f48_EOF + cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_bf62a3cca3cc7f92_EOF model_provider = "openai-proxy" [model_providers.openai-proxy] name = "OpenAI AWF proxy" @@ -1349,7 +1349,7 @@ jobs: [shell_environment_policy] inherit = "core" include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"] - GH_AW_CODEX_SHELL_POLICY_642d534dcaee7f48_EOF + GH_AW_CODEX_SHELL_POLICY_bf62a3cca3cc7f92_EOF awk ' BEGIN { skip_openai_proxy = 0 } /^[[:space:]]*model_provider[[:space:]]*=/ { next } diff --git a/.github/workflows/daily-team-status.lock.yml b/.github/workflows/daily-team-status.lock.yml index f0870ca9a77..0e64ea02ad7 100644 --- a/.github/workflows/daily-team-status.lock.yml +++ b/.github/workflows/daily-team-status.lock.yml @@ -249,6 +249,7 @@ jobs: {{#runtime-import .github/workflows/shared/reporting-otlp.md}} {{#runtime-import .github/workflows/shared/reporting.md}} {{#runtime-import .github/workflows/shared/observability-otlp.md}} + {{#runtime-import .github/shared/editorial.md}} {{#runtime-import .github/workflows/daily-team-status.md}} GH_AW_PROMPT_31fef7a17811ecb8_EOF } > "$GH_AW_PROMPT" diff --git a/.github/workflows/daily-team-status.md b/.github/workflows/daily-team-status.md index d6a0940ee2b..339168eded7 100644 --- a/.github/workflows/daily-team-status.md +++ b/.github/workflows/daily-team-status.md @@ -36,6 +36,8 @@ features: {{#runtime-import? .github/shared-instructions.md}} +{{#runtime-import .github/shared/editorial.md}} + # Daily Team Status Create an upbeat daily status report for the team as a GitHub issue. diff --git a/.github/workflows/grumpy-reviewer.lock.yml b/.github/workflows/grumpy-reviewer.lock.yml index 5d1a91199bb..c5069d16d9b 100644 --- a/.github/workflows/grumpy-reviewer.lock.yml +++ b/.github/workflows/grumpy-reviewer.lock.yml @@ -1323,18 +1323,18 @@ jobs: DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo '0') export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e CODEX_HOME -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.0' - cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_25c512d1212ef215_EOF + cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_06df42b0a8eed999_EOF [history] persistence = "none" [shell_environment_policy] inherit = "core" include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"] - GH_AW_MCP_CONFIG_25c512d1212ef215_EOF + GH_AW_MCP_CONFIG_06df42b0a8eed999_EOF # Generate JSON config for MCP gateway GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node) - cat << GH_AW_MCP_CONFIG_2bd1284a143a926f_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs" + cat << GH_AW_MCP_CONFIG_3d62a58a41823422_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs" { "mcpServers": { }, @@ -1345,11 +1345,11 @@ jobs: "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } - GH_AW_MCP_CONFIG_2bd1284a143a926f_EOF + GH_AW_MCP_CONFIG_3d62a58a41823422_EOF # Sync converter output to writable CODEX_HOME for Codex mkdir -p /tmp/gh-aw/mcp-config - cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_dfc1313a4964ffae_EOF + cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_74d2ecfbdbcb1834_EOF model_provider = "openai-proxy" [model_providers.openai-proxy] name = "OpenAI AWF proxy" @@ -1359,7 +1359,7 @@ jobs: [shell_environment_policy] inherit = "core" include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"] - GH_AW_CODEX_SHELL_POLICY_dfc1313a4964ffae_EOF + GH_AW_CODEX_SHELL_POLICY_74d2ecfbdbcb1834_EOF awk ' BEGIN { skip_openai_proxy = 0 } /^[[:space:]]*model_provider[[:space:]]*=/ { next } diff --git a/.github/workflows/issue-arborist.lock.yml b/.github/workflows/issue-arborist.lock.yml index a0cebedaf68..f21868c5ce2 100644 --- a/.github/workflows/issue-arborist.lock.yml +++ b/.github/workflows/issue-arborist.lock.yml @@ -1302,18 +1302,18 @@ jobs: DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo '0') export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e CODEX_HOME -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.0' - cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_d53aa27ebe5905f7_EOF + cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_f4b4647a500ad2fd_EOF [history] persistence = "none" [shell_environment_policy] inherit = "core" include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"] - GH_AW_MCP_CONFIG_d53aa27ebe5905f7_EOF + GH_AW_MCP_CONFIG_f4b4647a500ad2fd_EOF # Generate JSON config for MCP gateway GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node) - cat << GH_AW_MCP_CONFIG_36ef1df3aa5a1da9_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs" + cat << GH_AW_MCP_CONFIG_6e9b8c3a68bdbe31_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs" { "mcpServers": { }, @@ -1324,11 +1324,11 @@ jobs: "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } - GH_AW_MCP_CONFIG_36ef1df3aa5a1da9_EOF + GH_AW_MCP_CONFIG_6e9b8c3a68bdbe31_EOF # Sync converter output to writable CODEX_HOME for Codex mkdir -p /tmp/gh-aw/mcp-config - cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_727a5a52c1cdbc23_EOF + cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_2a0e97535a4dc3cb_EOF model_provider = "openai-proxy" [model_providers.openai-proxy] name = "OpenAI AWF proxy" @@ -1338,7 +1338,7 @@ jobs: [shell_environment_policy] inherit = "core" include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"] - GH_AW_CODEX_SHELL_POLICY_727a5a52c1cdbc23_EOF + GH_AW_CODEX_SHELL_POLICY_2a0e97535a4dc3cb_EOF awk ' BEGIN { skip_openai_proxy = 0 } /^[[:space:]]*model_provider[[:space:]]*=/ { next } diff --git a/.github/workflows/smoke-codex.lock.yml b/.github/workflows/smoke-codex.lock.yml index 5c8c1f70cec..0905702b163 100644 --- a/.github/workflows/smoke-codex.lock.yml +++ b/.github/workflows/smoke-codex.lock.yml @@ -1762,18 +1762,18 @@ jobs: DOCKER_SOCK_GID=$(stat -c '%g' /var/run/docker.sock 2>/dev/null || echo '0') export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host --add-host host.docker.internal:127.0.0.1 --user '"${MCP_GATEWAY_UID}"':'"${MCP_GATEWAY_GID}"' --group-add '"${DOCKER_SOCK_GID}"' -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e MCP_GATEWAY_PAYLOAD_SIZE_THRESHOLD -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_GUARD_MIN_INTEGRITY -e GITHUB_MCP_GUARD_REPOS -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e CODEX_HOME -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.3.0' - cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_d51129f50258a9ce_EOF + cat > "${RUNNER_TEMP}/gh-aw/mcp-config/config.toml" << GH_AW_MCP_CONFIG_ff522ea58250e51b_EOF [history] persistence = "none" [shell_environment_policy] inherit = "core" include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"] - GH_AW_MCP_CONFIG_d51129f50258a9ce_EOF + GH_AW_MCP_CONFIG_ff522ea58250e51b_EOF # Generate JSON config for MCP gateway GH_AW_NODE=$(which node 2>/dev/null || command -v node 2>/dev/null || echo node) - cat << GH_AW_MCP_CONFIG_738764a67943cfb7_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs" + cat << GH_AW_MCP_CONFIG_096a4d11ac009ce5_EOF | "$GH_AW_NODE" "${RUNNER_TEMP}/gh-aw/actions/start_mcp_gateway.cjs" { "mcpServers": { }, @@ -1784,11 +1784,11 @@ jobs: "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" } } - GH_AW_MCP_CONFIG_738764a67943cfb7_EOF + GH_AW_MCP_CONFIG_096a4d11ac009ce5_EOF # Sync converter output to writable CODEX_HOME for Codex mkdir -p /tmp/gh-aw/mcp-config - cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_fe86f8648f17b1d8_EOF + cat > "/tmp/gh-aw/mcp-config/config.toml" << GH_AW_CODEX_SHELL_POLICY_6c7dbcec13843414_EOF model_provider = "openai-proxy" [model_providers.openai-proxy] name = "OpenAI AWF proxy" @@ -1798,7 +1798,7 @@ jobs: [shell_environment_policy] inherit = "core" include_only = ["CODEX_API_KEY", "HOME", "OPENAI_API_KEY", "PATH"] - GH_AW_CODEX_SHELL_POLICY_fe86f8648f17b1d8_EOF + GH_AW_CODEX_SHELL_POLICY_6c7dbcec13843414_EOF awk ' BEGIN { skip_openai_proxy = 0 } /^[[:space:]]*model_provider[[:space:]]*=/ { next } diff --git a/actions/setup/js/runtime_import.cjs b/actions/setup/js/runtime_import.cjs index 5870887b5dc..885815ceab6 100644 --- a/actions/setup/js/runtime_import.cjs +++ b/actions/setup/js/runtime_import.cjs @@ -906,7 +906,10 @@ async function processRuntimeImport(filepathOrUrl, optional, workspaceDir, start } /** - * Processes all runtime-import macros in the content recursively + * Processes all runtime-import macros in the content recursively. + * Also handles body-level {{#import}} directives by normalizing them to + * {{#runtime-import}} before processing, so that both the frontmatter `imports:` + * style and the inline `{{#import filepath}}` style resolve correctly at runtime. * @param {string} content - The markdown content containing runtime-import macros * @param {string} workspaceDir - The GITHUB_WORKSPACE directory path * @param {Set} [importedFiles] - Set of already imported files (for recursion tracking) @@ -915,6 +918,23 @@ async function processRuntimeImport(filepathOrUrl, optional, workspaceDir, start * @returns {Promise} - Content with runtime-import macros replaced by file/URL contents */ async function processRuntimeImports(content, workspaceDir, importedFiles = new Set(), importCache = new Map(), importStack = []) { + // Normalize body-level {{#import}} directives to {{#runtime-import}} equivalents. + // {{#import}} is deprecated — use {{#runtime-import}} or the 'imports:' frontmatter field instead. + // Both colon and no-colon syntax are supported for backward compatibility: + // {{#import filepath}} {{#import? filepath}} + // {{#import: filepath}} {{#import?: filepath}} + // Use [^\{\}] to avoid matching across brace boundaries (e.g. nested expressions). + const bodyImportRe = /\{\{#import(\?)?(?:[ \t]+|[ \t]*:[ \t]*)([^\{\}]+?)\}\}/g; + let bodyImportCount = 0; + content = content.replace(bodyImportRe, (_, optional, importPath) => { + bodyImportCount++; + const trimmedPath = importPath.trim(); + return `{{#runtime-import${optional || ""} ${trimmedPath}}}`; + }); + if (bodyImportCount > 0) { + core.warning(`Deprecated: ${bodyImportCount} {{#import}} directive(s) found. ` + `Use {{#runtime-import}} or the 'imports:' frontmatter field instead.`); + } + // Pattern to match {{#runtime-import filepath}} or {{#runtime-import? filepath}} // Captures: optional flag (?), whitespace, filepath/URL (which may include :startline-endline) const pattern = /\{\{#runtime-import(\?)?[ \t]+([^\}]+?)\}\}/g; @@ -983,9 +1003,11 @@ async function processRuntimeImports(content, workspaceDir, importedFiles = new // Import the file content let importedContent = await processRuntimeImport(filepathOrUrl, optional, workspaceDir, startLine, endLine); - // Recursively process any runtime-import macros in the imported content - if (importedContent && /\{\{#runtime-import/.test(importedContent)) { - core.info(`Recursively processing runtime-imports in ${filepathWithRange}`); + // Recursively process any runtime-import or body-level {{#import}} macros in the + // imported content. The recursive call to processRuntimeImports will normalize + // any {{#import}} directives before processing them. + if (importedContent && /\{\{#(?:runtime-import|import)/.test(importedContent)) { + core.info(`Recursively processing imports in ${filepathWithRange}`); importedContent = await processRuntimeImports(importedContent, workspaceDir, importedFiles, importCache, [...importStack]); } diff --git a/actions/setup/js/runtime_import.test.cjs b/actions/setup/js/runtime_import.test.cjs index 5ecf330eba5..ce34626f2f7 100644 --- a/actions/setup/js/runtime_import.test.cjs +++ b/actions/setup/js/runtime_import.test.cjs @@ -616,6 +616,62 @@ describe("runtime_import", () => { expect(result).toBe("Content"); })); }), + describe("body-level {{#import}} directives (deprecated)", () => { + (it("should resolve {{#import filepath}} (no colon) as runtime-import and emit deprecation warning", async () => { + fs.writeFileSync(path.join(workflowsDir, "import.md"), "Imported content"); + const result = await processRuntimeImports("Before\n{{#import import.md}}\nAfter", tempDir); + expect(result).toBe("Before\nImported content\nAfter"); + expect(core.warning).toHaveBeenCalledWith(expect.stringContaining("Deprecated")); + }), + it("should resolve {{#import? filepath}} optional variant", async () => { + fs.writeFileSync(path.join(workflowsDir, "import.md"), "Optional content"); + const result = await processRuntimeImports("Before\n{{#import? import.md}}\nAfter", tempDir); + expect(result).toBe("Before\nOptional content\nAfter"); + }), + it("should resolve {{#import: filepath}} colon syntax", async () => { + fs.writeFileSync(path.join(workflowsDir, "import.md"), "Colon content"); + const result = await processRuntimeImports("Before\n{{#import: import.md}}\nAfter", tempDir); + expect(result).toBe("Before\nColon content\nAfter"); + }), + it("should resolve {{#import?: filepath}} optional colon syntax", async () => { + fs.writeFileSync(path.join(workflowsDir, "import.md"), "Optional colon content"); + const result = await processRuntimeImports("Before\n{{#import?: import.md}}\nAfter", tempDir); + expect(result).toBe("Before\nOptional colon content\nAfter"); + }), + it("should return empty string for missing optional {{#import?}} file", async () => { + const result = await processRuntimeImports("Before\n{{#import? missing.md}}\nAfter", tempDir); + expect(result).toBe("Before\n\nAfter"); + expect(core.warning).toHaveBeenCalled(); + }), + it("should throw for missing required {{#import}} file", async () => { + await expect(processRuntimeImports("Before\n{{#import missing.md}}\nAfter", tempDir)).rejects.toThrow(); + }), + it("should resolve {{#import}} inside a file read via {{#runtime-import}} (nested fix)", async () => { + // This is the main bug scenario: a workflow file is loaded via {{#runtime-import}}, + // and its body contains {{#import shared/broken.md}} which must be resolved. + const sharedDir = path.join(workflowsDir, "shared"); + fs.mkdirSync(sharedDir, { recursive: true }); + fs.writeFileSync(path.join(sharedDir, "broken.md"), "Shared instructions"); + // The workflow body contains a body-level {{#import}} directive + fs.writeFileSync(path.join(workflowsDir, "my-workflow.md"), "# My Workflow\n\n{{#import shared/broken.md}}\n\nDo the work."); + // Simulate the compiled prompt: {{#runtime-import .github/workflows/my-workflow.md}} + // (the .github/ prefix is stripped by processRuntimeImport and resolved against the .github folder) + const result = await processRuntimeImports("{{#runtime-import .github/workflows/my-workflow.md}}", tempDir); + expect(result).toContain("Shared instructions"); + expect(result).not.toContain("{{#import"); + }), + it("should resolve multiple {{#import}} directives in one file", async () => { + fs.writeFileSync(path.join(workflowsDir, "a.md"), "Content A"); + fs.writeFileSync(path.join(workflowsDir, "b.md"), "Content B"); + const result = await processRuntimeImports("Before\n{{#import a.md}}\nMiddle\n{{#import b.md}}\nAfter", tempDir); + expect(result).toBe("Before\nContent A\nMiddle\nContent B\nAfter"); + }), + it("should not treat {{#importantthing}} (no space/colon) as an import directive", async () => { + const content = "Text {{#importantthing}} more text"; + const result = await processRuntimeImports(content, tempDir); + expect(result).toBe(content); + })); + }), describe("Edge Cases", () => { (it("should handle very large files", async () => { const largeContent = "x".repeat(1e5); @@ -1027,8 +1083,8 @@ describe("runtime_import", () => { const result = await processRuntimeImports("{{#runtime-import main.md}}", tempDir); expect(result).toBe("Main before\nLevel 1 before\nLevel 2 content\nLevel 1 after\nMain after"); - expect(core.info).toHaveBeenCalledWith(expect.stringContaining("Recursively processing runtime-imports in main.md")); - expect(core.info).toHaveBeenCalledWith(expect.stringContaining("Recursively processing runtime-imports in level1.md")); + expect(core.info).toHaveBeenCalledWith(expect.stringContaining("Recursively processing imports in main.md")); + expect(core.info).toHaveBeenCalledWith(expect.stringContaining("Recursively processing imports in level1.md")); }); it("should handle multiple recursive imports at different levels", async () => { diff --git a/docs/adr/28366-two-phase-resolution-of-body-level-import-directives.md b/docs/adr/28366-two-phase-resolution-of-body-level-import-directives.md new file mode 100644 index 00000000000..fc238ac6827 --- /dev/null +++ b/docs/adr/28366-two-phase-resolution-of-body-level-import-directives.md @@ -0,0 +1,78 @@ +# ADR-28366: Two-Phase Resolution of Body-Level `{{#import}}` Directives + +**Date**: 2026-04-25 +**Status**: Draft +**Deciders**: Unknown (copilot-swe-agent, pelikhan) + +--- + +## Part 1 — Narrative (Human-Friendly) + +### Context + +Workflow markdown files support two ways to pull in shared content: frontmatter `imports:` entries (resolved at compile time) and inline `{{#import filepath}}` / `{{#runtime-import filepath}}` directives (resolved at runtime by `runtime_import.cjs`). Until this change, `{{#import}}` directives placed directly in the workflow *body* (rather than in frontmatter) were silently ignored at runtime — the agent received the raw macro string instead of the imported file's content. Additionally, the compiled lock file gave no visibility into which sibling-directory files (e.g. `.github/shared/editorial.md`) would be pulled in at runtime, making lock-file integrity checks incomplete. + +### Decision + +We will resolve body-level `{{#import}}` directives via a two-phase approach. At **compile time**, the Go compiler (`pkg/workflow/compiler_orchestrator_tools.go`) scans the markdown body for `{{#import:}}` directives (the colon form) and promotes them to explicit `{{#runtime-import}}` macros in the compiled lock file, giving the lock file full visibility into the import graph before runtime. At **runtime**, `runtime_import.cjs` normalises all remaining `{{#import}}` variants (colon, no-colon, optional `?` forms) to `{{#runtime-import}}` at the start of `processRuntimeImports`, ensuring any directive that was not promoted at compile time is still resolved. A deduplication `Set` in `runtime_import.cjs` prevents double-importing when both phases have emitted the same macro. + +### Alternatives Considered + +#### Alternative 1: Compile-Time Inline Expansion Only + +Expand `{{#import}}` directives fully at compile time and inline the file content directly into the compiled prompt, matching the behaviour of frontmatter `imports:` entries that carry `inputs:`. This would eliminate the runtime dependency and make the compiled artefact self-contained. It was rejected because it creates a static snapshot of imported content that goes stale when the shared file is updated without recompiling the workflow — the runtime-import model exists precisely to get fresh shared-file content on every run. + +#### Alternative 2: Runtime Normalisation Only (No Compile-Time Promotion) + +Add the `{{#import}} → {{#runtime-import}}` normalisation solely inside `runtime_import.cjs` and leave the Go compiler unchanged. This is the simpler path and fixes the agent prompt bug. It was rejected because it leaves the lock file blind to body-level imports: the `Includes:` manifest header would not list `.github/shared/editorial.md`, so lock-file content-hash checks and dependency auditing tools would miss those files. + +### Consequences + +#### Positive +- Body-level `{{#import}}` directives (all four syntax variants: colon, no-colon, optional, optional-colon) are now correctly resolved and injected into the agent prompt at runtime. +- The compiled lock file's `Includes:` header now explicitly lists body-level imported files, enabling accurate lock-file integrity checks and dependency tracking. +- The `importedFiles` deduplication set in `runtime_import.cjs` prevents the same file being imported twice when both the compile-time promotion and the runtime normalisation emit the same macro. +- The repo-root-relative path helper (`findGitHubRepoRoot`) ensures that files in sibling `.github/` subdirectories are recorded with clean paths (e.g. `.github/shared/editorial.md`) rather than absolute system paths. + +#### Negative +- Two separate codepaths now handle `{{#import}}`: compile-time promotion in Go (`ExtractBodyLevelImportPaths`) and runtime normalisation in JavaScript (`processRuntimeImports`). Keeping these in sync when the directive syntax evolves requires changes in both languages. +- The compile-time phase only promotes the `{{#import:}}` colon form (as used by `ParseImportDirective`). The no-colon `{{#import filepath}}` form is handled only at runtime, creating a syntax asymmetry that is not immediately obvious to workflow authors. +- Adding `ExtractBodyLevelImportPaths` introduces a second scan of the markdown body at compile time (the first scan is `ExpandIncludesWithManifest`), adding minor overhead for workflows with large bodies. + +#### Neutral +- All daily agentic workflow `.lock.yml` files are regenerated to include the new `{{#runtime-import .github/workflows/shared/noop-reminder.md}}` macro — this is a mechanical, non-semantic change to the compiled artefacts. +- The new `findGitHubRepoRoot` helper is a pure function with no side effects; it is already unit-tested via `TestManifestIncludePathRelativeToRepoRoot`. + +--- + +## Part 2 — Normative Specification (RFC 2119) + +> The key words **MUST**, **MUST NOT**, **REQUIRED**, **SHALL**, **SHALL NOT**, **SHOULD**, **SHOULD NOT**, **RECOMMENDED**, **MAY**, and **OPTIONAL** in this section are to be interpreted as described in [RFC 2119](https://www.rfc-editor.org/rfc/rfc2119). + +### Compile-Time Promotion + +1. The Go compiler **MUST** call `ExtractBodyLevelImportPaths` on the markdown body after frontmatter has been stripped and before generating the lock file's `{{#runtime-import}}` macro list. +2. Each path returned by `ExtractBodyLevelImportPaths` **MUST** be emitted as an explicit `{{#runtime-import }}` (or `{{#runtime-import? }}` for optional) macro in the compiled lock file, appearing before the main workflow-file macro. +3. `ExtractBodyLevelImportPaths` **MUST** convert relative paths to workspace-root-relative form (e.g. `.github/workflows/shared/tools.md`) using `findGitHubRepoRoot` before returning them. +4. `ExtractBodyLevelImportPaths` **MUST NOT** process legacy `@include` or `@import` directives; those are handled by `ExpandIncludesWithManifest`. +5. The `Includes:` manifest header in the lock file **MUST** list every file referenced by a body-level `{{#import:}}` directive using a repo-root-relative path, not an absolute system path. + +### Runtime Normalisation + +1. `processRuntimeImports` **MUST** normalise all `{{#import}}` directive variants (with and without colon separator, with and without `?` optional marker) to their `{{#runtime-import}}` equivalents before the main macro processing loop executes. +2. The normalisation regex **MUST** match `{{#import filepath}}`, `{{#import: filepath}}`, `{{#import? filepath}}`, and `{{#import?: filepath}}`, but **MUST NOT** match tokens such as `{{#importantthing}}` that lack a whitespace or colon separator after `import`. +3. `processRuntimeImports` **MUST** apply deduplication via an `importedFiles` Set so that a file promoted at compile time and also referenced via a body-level `{{#import}}` at runtime is imported exactly once. +4. Recursive calls to `processRuntimeImports` on imported file content **MUST** pass the same `importedFiles` Set and `importCache` Map to propagate deduplication state. + +### Path Resolution + +1. `findGitHubRepoRoot` **MUST** walk up the directory tree from `baseDir` until it finds a directory named `.github` and return its parent, or return an empty string if no `.github` ancestor is found. +2. When building the `Includes:` manifest, the compiler **MUST** prefer repo-root-relative paths over `baseDir`-relative paths for files located in sibling `.github/` subdirectories (e.g. `.github/shared/`), falling back to absolute paths only when neither relative form is computable without a `..` prefix. + +### Conformance + +An implementation is considered conformant with this ADR if it satisfies all **MUST** and **MUST NOT** requirements above. Failure to meet any **MUST** or **MUST NOT** requirement constitutes non-conformance. + +--- + +*This is a DRAFT ADR generated by the [Design Decision Gate](https://github.com/github/gh-aw/actions/runs/24919963187) workflow. The PR author must review, complete, and finalize this document before the PR can merge.* diff --git a/docs/src/content/docs/reference/imports.md b/docs/src/content/docs/reference/imports.md index c02d6c7379a..1d0e0613606 100644 --- a/docs/src/content/docs/reference/imports.md +++ b/docs/src/content/docs/reference/imports.md @@ -51,20 +51,36 @@ An imported workflow can only be imported once per workflow. New 'with': {"languages":["typescript"]} ``` -In markdown, use the special `{{#import ...}}` directive: +In markdown, use `{{#runtime-import filepath}}` to inject the content of another file directly into the body at that position. This is useful for sharing reusable prompt snippets, tone instructions, or reference material across workflows. ```aw wrap --- -... +on: schedule +engine: copilot --- -# Your Workflow +{{#runtime-import .github/shared/editorial.md}} -Workflow instructions here... +# Daily Report -{{#import shared/common-tools.md}} +Generate the daily report. ``` +Use `{{#runtime-import? filepath}}` to silently skip a missing file instead of failing: + +```aw wrap +{{#runtime-import .github/shared/editorial.md}} # required — fails if missing +{{#runtime-import? .github/shared/optional.md}} # optional — skipped if missing +``` + +Paths are resolved within the `.github` folder. You can specify paths with or without the `.github/` prefix — both `.github/shared/editorial.md` and `shared/editorial.md` refer to the same file. See [Runtime Imports](/gh-aw/reference/templating/#runtime-imports) for URLs, line ranges, and security details. + +> [!NOTE] +> `{{#runtime-import}}` injects **content** (markdown text) at the insertion point. It does not merge frontmatter configuration. To share tools, permissions, or MCP servers across workflows, use the `imports:` frontmatter field instead. + +> [!WARNING] +> The `{{#import filepath}}` body-level directive is **deprecated**. Replace it with `{{#runtime-import filepath}}`. The old syntax still works at runtime (it normalizes to `{{#runtime-import}}` automatically) but emits deprecation warnings at both compile time and runtime. + ## Shared Workflow Components Files without an `on` field are shared workflow components — validated but not compiled into GitHub Actions, only imported by other workflows. The compiler skips them with an informative message. @@ -251,7 +267,18 @@ imports: - shared/tools.md#WebSearch ``` -Use the `{{#import? ...}}` syntax to mark an import as optional, which skips missing files silently instead of failing compilation. +Use `?` after `import` to mark an import as optional — missing files are skipped silently instead of failing compilation. This applies to both frontmatter imports and body-level directives: + +```yaml +# Frontmatter — optional +imports: + - shared/optional-tools.md? +``` + +```aw wrap +# Body — optional content injection +{{#runtime-import? .github/shared/optional.md}} +``` ## Remote Repository Imports diff --git a/docs/src/content/docs/reference/templating.md b/docs/src/content/docs/reference/templating.md index cfb9b2fbfcd..c09faa15b83 100644 --- a/docs/src/content/docs/reference/templating.md +++ b/docs/src/content/docs/reference/templating.md @@ -182,10 +182,13 @@ Runtime imports are processed before other substitutions: - **`.github` folder only:** File paths are restricted to `.github` folder for security - **No authentication:** URL fetching doesn't support private URLs with tokens -- **No recursion:** Imported content cannot contain additional runtime imports - **Per-run cache:** URL cache doesn't persist across workflow runs - **Line numbers:** Refer to raw file content before front matter removal +### Deprecated `{{#import}}` + +`{{#import filepath}}` (without `runtime-`) is a **deprecated** body-level shorthand. It normalizes to `{{#runtime-import filepath}}` at runtime for backward compatibility, but emits deprecation warnings at both compile time and runtime. Use `{{#runtime-import}}` directly for all new workflows. See [Imports](/gh-aw/reference/imports/) for details. + ### Error Handling | Error | Message | diff --git a/pkg/parser/import_directive.go b/pkg/parser/import_directive.go index c365157e607..349ff51ae1e 100644 --- a/pkg/parser/import_directive.go +++ b/pkg/parser/import_directive.go @@ -9,11 +9,11 @@ import ( var importDirectiveLog = logger.New("parser:import_directive") -// IncludeDirectivePattern matches @include, @import (deprecated), or {{#import (new) directives +// IncludeDirectivePattern matches @include, @import (deprecated), or {{#import (deprecated) directives // The colon after #import is optional and ignored if present var IncludeDirectivePattern = regexp.MustCompile(`^(?:@(?:include|import)(\?)?\s+(.+)|{{#import(\?)?\s*:?\s*(.+?)\s*}})$`) -// LegacyIncludeDirectivePattern matches only the deprecated @include and @import directives +// LegacyIncludeDirectivePattern matches the deprecated @include, @import, and {{#import}} directives var LegacyIncludeDirectivePattern = regexp.MustCompile(`^@(?:include|import)(\?)?\s+(.+)$`) // ImportDirectiveMatch holds the parsed components of an import directive @@ -39,21 +39,23 @@ func ParseImportDirective(line string) *ImportDirectiveMatch { return nil } - // Determine legacy vs new syntax from the captured groups of the first match. - // Group 2 (path for @include/@import) is non-empty iff the legacy alternative matched. - isLegacy := matches[2] != "" - importDirectiveLog.Printf("Parsing import directive: legacy=%t, line=%s", isLegacy, trimmedLine) + // All matched forms are now deprecated/legacy. + // Group 2 non-empty → @-style (@include/@import), Group 4 non-empty → {{#import}} style. + // Both are legacy; the distinction is kept for message formatting. + atStyleLegacy := matches[2] != "" + isLegacy := true // every form matched by IncludeDirectivePattern is deprecated + importDirectiveLog.Printf("Parsing import directive: legacy=%t, atStyle=%t, line=%s", isLegacy, atStyleLegacy, trimmedLine) var isOptional bool var path string - if isLegacy { - // Legacy syntax: @include? path or @import? path + if atStyleLegacy { + // @-style legacy syntax: @include? path or @import? path // Group 1: optional marker, Group 2: path isOptional = matches[1] == "?" path = strings.TrimSpace(matches[2]) } else { - // New syntax: {{#import?: path}} or {{#import: path}} (colon is optional) + // {{#import}} deprecated syntax: {{#import?: path}} or {{#import: path}} (colon is optional) // Group 3: optional marker, Group 4: path isOptional = matches[3] == "?" path = strings.TrimSpace(matches[4]) diff --git a/pkg/parser/import_syntax_test.go b/pkg/parser/import_syntax_test.go index ae8c60d2422..ee22b103bdf 100644 --- a/pkg/parser/import_syntax_test.go +++ b/pkg/parser/import_syntax_test.go @@ -16,71 +16,71 @@ func TestParseImportDirective(t *testing.T) { wantOptional bool wantLegacy bool }{ - // New syntax tests + // Deprecated {{#import}} syntax tests (all forms are now legacy) { - name: "new syntax - basic import", + name: "deprecated - basic import", input: "{{#import: shared/tools.md}}", wantMatch: true, wantPath: "shared/tools.md", wantOptional: false, - wantLegacy: false, + wantLegacy: true, }, { - name: "new syntax - optional import", + name: "deprecated - optional import", input: "{{#import?: shared/tools.md}}", wantMatch: true, wantPath: "shared/tools.md", wantOptional: true, - wantLegacy: false, + wantLegacy: true, }, { - name: "new syntax - with extra spaces", + name: "deprecated - with extra spaces", input: "{{#import: shared/tools.md }}", wantMatch: true, wantPath: "shared/tools.md", wantOptional: false, - wantLegacy: false, + wantLegacy: true, }, { - name: "new syntax - with section", + name: "deprecated - with section", input: "{{#import: shared/tools.md#Security}}", wantMatch: true, wantPath: "shared/tools.md#Security", wantOptional: false, - wantLegacy: false, + wantLegacy: true, }, { - name: "new syntax - optional with section", + name: "deprecated - optional with section", input: "{{#import?: shared/tools.md#Security}}", wantMatch: true, wantPath: "shared/tools.md#Security", wantOptional: true, - wantLegacy: false, + wantLegacy: true, }, - // New syntax without colon tests + // Deprecated {{#import}} syntax without colon tests { - name: "new syntax - basic import without colon", + name: "deprecated - basic import without colon", input: "{{#import shared/tools.md}}", wantMatch: true, wantPath: "shared/tools.md", wantOptional: false, - wantLegacy: false, + wantLegacy: true, }, { - name: "new syntax - optional import without colon", + name: "deprecated - optional import without colon", input: "{{#import? shared/tools.md}}", wantMatch: true, wantPath: "shared/tools.md", wantOptional: true, - wantLegacy: false, + wantLegacy: true, }, { - name: "new syntax - with section without colon", + name: "deprecated - with section without colon", input: "{{#import shared/tools.md#Security}}", wantMatch: true, wantPath: "shared/tools.md#Security", wantOptional: false, - wantLegacy: false, + wantLegacy: true, }, // Legacy syntax tests { diff --git a/pkg/parser/include_expander.go b/pkg/parser/include_expander.go index 0c93c03b6a1..76a8d9b2932 100644 --- a/pkg/parser/include_expander.go +++ b/pkg/parser/include_expander.go @@ -5,6 +5,7 @@ import ( "bytes" "fmt" "path/filepath" + "regexp" "strings" "github.com/github/gh-aw/pkg/logger" @@ -48,20 +49,37 @@ func ExpandIncludesWithManifest(content, baseDir string, extractTools bool) (str currentContent = processedContent } - // Convert visited map to slice of file paths (make them relative to baseDir if possible) + // Find the repo root by walking up from baseDir to the parent of the .github folder. + // This allows files outside baseDir (e.g. .github/shared/ when baseDir is .github/workflows/) + // to be recorded with a clean repo-root-relative path instead of an absolute path. + repoRoot := findGitHubRepoRoot(baseDir) + + // Convert visited map to slice of file paths (make them relative to baseDir if possible, + // falling back to repo-root-relative, and only as a last resort using the absolute path) var includedFiles []string for filePath := range visited { - // Try to make path relative to baseDir for cleaner output + // First: try to make path relative to baseDir for cleaner output relPath, err := filepath.Rel(baseDir, filePath) if err == nil && !strings.HasPrefix(relPath, "..") { // Normalize to Unix paths (forward slashes) for cross-platform compatibility relPath = filepath.ToSlash(relPath) includedFiles = append(includedFiles, relPath) - } else { - // Normalize to Unix paths (forward slashes) for cross-platform compatibility - filePath = filepath.ToSlash(filePath) - includedFiles = append(includedFiles, filePath) + continue } + + // Second: try repo-root-relative path to avoid absolute paths for files in sibling + // directories (e.g. .github/shared/ relative to .github/workflows/) + if repoRoot != "" { + repoRelPath, repoRelErr := filepath.Rel(repoRoot, filePath) + if repoRelErr == nil && !strings.HasPrefix(repoRelPath, "..") { + repoRelPath = filepath.ToSlash(repoRelPath) + includedFiles = append(includedFiles, repoRelPath) + continue + } + } + + // Fallback: use the absolute path (should be rare) + includedFiles = append(includedFiles, filepath.ToSlash(filePath)) } includeExpanderLog.Printf("Include expansion complete: visited_files=%d", len(includedFiles)) @@ -74,7 +92,103 @@ func ExpandIncludesWithManifest(content, baseDir string, extractTools bool) (str return currentContent, includedFiles, nil } -// ExpandIncludesForEngines recursively expands @include and @import directives to extract engine configurations +// findGitHubRepoRoot walks up the directory tree from dir to find the parent of the +// first ".github" directory encountered. It is used to compute repo-root-relative +// paths for files that live in sibling .github/ subdirectories (e.g. .github/shared/) +// so that the lock file Includes header shows ".github/shared/editorial.md" rather +// than an absolute system path. +// +// Returns the repo root directory (the parent of ".github"), or "" if no ".github" +// ancestor directory is found before reaching the filesystem root. +func findGitHubRepoRoot(dir string) string { + current := filepath.Clean(dir) + for { + if filepath.Base(current) == ".github" { + return filepath.Dir(current) + } + parent := filepath.Dir(current) + if parent == current { + // Reached filesystem root + return "" + } + current = parent + } +} + +// BodyLevelImport represents a single {{#runtime-import}} or deprecated {{#import}} directive +// found in a markdown body, with the path resolved to be workspace-root-relative. +type BodyLevelImport struct { + Path string // workspace-root-relative path for the {{#runtime-import}} macro + Optional bool // true when the original directive used the ? form +} + +// bodyLevelRuntimeImportRe matches {{#runtime-import}} and {{#runtime-import?}} directives +// in a single line of markdown (same pattern as runtime_import.cjs uses at runtime). +var bodyLevelRuntimeImportRe = regexp.MustCompile(`^\{\{#runtime-import(\?)?[ \t]+([^\}]+?)\}\}$`) + +// ExtractBodyLevelImportPaths scans the markdown body (content is the body after frontmatter +// has been stripped) for {{#runtime-import}} directives and returns them as BodyLevelImport entries +// whose Path fields are ready to use in explicit {{#runtime-import}} macros in the compiled lock file. +// +// Relative paths (e.g. "shared/tools.md") are converted to workspace-root-relative form +// (e.g. ".github/workflows/shared/tools.md") using baseDir and the repo root. +// Paths that already start with ".github/" are kept as-is. +// Deprecated {{#import}} and legacy @include / @import directives are ignored; +// they are handled (with deprecation warnings) by include_processor.go. +func ExtractBodyLevelImportPaths(content, baseDir string) []BodyLevelImport { + repoRoot := findGitHubRepoRoot(baseDir) + + var results []BodyLevelImport + scanner := bufio.NewScanner(strings.NewReader(content)) + for scanner.Scan() { + line := strings.TrimSpace(scanner.Text()) + + // Match {{#runtime-import}} directives only. + m := bodyLevelRuntimeImportRe.FindStringSubmatch(line) + if m == nil { + continue + } + optional := m[1] == "?" + + // Skip optional directives — they are handled with proper semantics at runtime + // when runtime_import.cjs processes the workflow body. Promoting an optional + // directive as a required macro would cause failures if the file is missing. + if optional { + continue + } + importPath := strings.TrimSpace(m[2]) + + // Strip section reference (e.g. "file.md#Section" → "file.md") + if idx := strings.Index(importPath, "#"); idx >= 0 { + importPath = importPath[:idx] + } + importPath = strings.TrimSpace(importPath) + + // Skip URLs — these are fetched at runtime and don't need promotion. + if strings.HasPrefix(importPath, "http://") || strings.HasPrefix(importPath, "https://") { + continue + } + + // Convert relative paths to workspace-root-relative. + // Paths already starting with ".github/" are workspace-root-relative. + // Absolute paths are used as-is. + if !strings.HasPrefix(importPath, ".github/") && !filepath.IsAbs(importPath) { + if repoRoot != "" { + fullPath := filepath.Join(baseDir, importPath) + if rel, err := filepath.Rel(repoRoot, fullPath); err == nil && !strings.HasPrefix(rel, "..") { + importPath = rel + } + } + } + + results = append(results, BodyLevelImport{ + Path: filepath.ToSlash(importPath), + Optional: false, // optional directives are skipped above; only required imports are promoted + }) + } + return results +} + func ExpandIncludesForEngines(content, baseDir string) ([]string, error) { includeExpanderLog.Printf("Expanding includes for engines: baseDir=%s", baseDir) return expandIncludesForField(content, baseDir, func(c string) (string, error) { diff --git a/pkg/parser/include_processor.go b/pkg/parser/include_processor.go index 62bec99d5ae..10d9d881ee3 100644 --- a/pkg/parser/include_processor.go +++ b/pkg/parser/include_processor.go @@ -38,10 +38,23 @@ func processIncludesWithVisited(content, baseDir string, extractTools bool, visi if directive.IsOptional { optionalMarker = "?" } - fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Deprecated syntax: %q. Use {{#import%s %s}} instead.", + // Choose the recommended replacement based on which deprecated form was used. + // {{#import}} directives → recommend {{#runtime-import}} or imports: frontmatter. + // @include / @import directives → recommend {{#import}} (already deprecated itself, + // but still a closer equivalent than jumping straight to runtime-import). + var suggestion string + if strings.HasPrefix(strings.TrimSpace(directive.Original), "{{") { + suggestion = fmt.Sprintf("Use {{#runtime-import%s %s}} for content injection or the 'imports:' frontmatter field for configuration merging.", + optionalMarker, + directive.Path) + } else { + suggestion = fmt.Sprintf("Use {{#runtime-import%s %s}} instead.", + optionalMarker, + directive.Path) + } + fmt.Fprintln(os.Stderr, console.FormatWarningMessage(fmt.Sprintf("Deprecated syntax: %q. %s", directive.Original, - optionalMarker, - directive.Path))) + suggestion))) } isOptional := directive.IsOptional diff --git a/pkg/workflow/compiler_orchestrator_tools.go b/pkg/workflow/compiler_orchestrator_tools.go index 81bcd1f86da..c2703e7e81b 100644 --- a/pkg/workflow/compiler_orchestrator_tools.go +++ b/pkg/workflow/compiler_orchestrator_tools.go @@ -240,6 +240,20 @@ func (c *Compiler) processToolsAndMarkdown(result *parser.FrontmatterResult, cle orchestratorToolsLog.Printf("Found %d import paths for runtime-import macros", len(importPaths)) } + // Extract body-level {{#runtime-import}} directives and append them to importPaths so they + // appear as explicit macros in the compiled lock file (before the main workflow-file macro). + // This makes imported files visible in the lock file at a glance and ensures they are + // fetched before the main workflow body is processed. + // At runtime, runtime_import.cjs deduplicates via an importedFiles Set, so files listed + // here won't be imported a second time when the main workflow file body is processed. + bodyImports := parser.ExtractBodyLevelImportPaths(result.Markdown, markdownDir) + if len(bodyImports) > 0 { + orchestratorToolsLog.Printf("Found %d body-level {{#runtime-import}} directive(s) to promote to lock-file macros", len(bodyImports)) + for _, bi := range bodyImports { + importPaths = append(importPaths, bi.Path) + } + } + // Handle imported markdown from frontmatter imports field // Only imports WITH inputs will have markdown content (for compile-time substitution) var importedMarkdown string diff --git a/pkg/workflow/manifest_test.go b/pkg/workflow/manifest_test.go index a6929b9dd38..161465a8f34 100644 --- a/pkg/workflow/manifest_test.go +++ b/pkg/workflow/manifest_test.go @@ -311,3 +311,169 @@ Handle the issue.` } } } + +// TestManifestIncludePathRelativeToRepoRoot verifies that included files in sibling +// .github/ subdirectories (e.g. .github/shared/ when the workflow is in .github/workflows/) +// are recorded with a repo-root-relative path instead of an absolute path. +func TestManifestIncludePathRelativeToRepoRoot(t *testing.T) { + tmpDir := testutil.TempDir(t, "manifest-sibling-test") + + // Create .github/workflows/ and .github/shared/ structure + workflowsDir := filepath.Join(tmpDir, ".github", "workflows") + sharedDir := filepath.Join(tmpDir, ".github", "shared") + if err := os.MkdirAll(workflowsDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(sharedDir, 0755); err != nil { + t.Fatal(err) + } + + // Create an include file in .github/shared/ (sibling of .github/workflows/) + editorialFile := filepath.Join(sharedDir, "editorial.md") + editorialContent := `## Writing Style + +Write in a newspaper editorial tone.` + if err := os.WriteFile(editorialFile, []byte(editorialContent), 0644); err != nil { + t.Fatal(err) + } + + // Create workflow that includes the file via .github/-prefixed path + workflowContent := `--- +on: issues +permissions: + contents: read + issues: read + pull-requests: read +engine: copilot +--- + +# Test Workflow + +{{#import: .github/shared/editorial.md}} + +Handle the issue.` + + compiler := NewCompiler() + testFile := filepath.Join(workflowsDir, "test-workflow.md") + if err := os.WriteFile(testFile, []byte(workflowContent), 0644); err != nil { + t.Fatal(err) + } + + if err := compiler.CompileWorkflow(testFile); err != nil { + t.Fatalf("Unexpected error compiling workflow: %v", err) + } + + lockFile := stringutil.MarkdownToLockFile(testFile) + content, err := os.ReadFile(lockFile) + if err != nil { + t.Fatalf("Failed to read generated lock file: %v", err) + } + + lockContent := string(content) + + // The Includes section should show .github/shared/editorial.md (relative to repo root), + // NOT an absolute path like /tmp/.../.../.github/shared/editorial.md + expectedLine := "# - .github/shared/editorial.md" + if !strings.Contains(lockContent, expectedLine) { + t.Errorf("Expected relative include path %q in lock file, but not found.\nLock file content excerpt:\n%s", + expectedLine, extractLockFileHeader(lockContent)) + } + + // Verify no absolute path appears in the Includes section + for line := range strings.SplitSeq(lockContent, "\n") { + if strings.HasPrefix(line, "# - /") { + t.Errorf("Found absolute path in lock file Includes section: %q", line) + } + } +} + +// extractLockFileHeader returns the first 50 lines of a lock file for test diagnostics. +func extractLockFileHeader(content string) string { + lines := strings.Split(content, "\n") + if len(lines) > 50 { + lines = lines[:50] + } + return strings.Join(lines, "\n") +} + +// TestBodyLevelRuntimeImportPromotedToMacro verifies that a body-level {{#runtime-import}} directive +// in the workflow markdown generates an explicit {{#runtime-import}} macro in the compiled lock-file prompt, +// making the imported content visible without having to chase the workflow file at runtime. +func TestBodyLevelRuntimeImportPromotedToMacro(t *testing.T) { + tmpDir := testutil.TempDir(t, "body-import-test") + + // Create .github/workflows/ and .github/shared/ structure + workflowsDir := filepath.Join(tmpDir, ".github", "workflows") + sharedDir := filepath.Join(tmpDir, ".github", "shared") + if err := os.MkdirAll(workflowsDir, 0755); err != nil { + t.Fatal(err) + } + if err := os.MkdirAll(sharedDir, 0755); err != nil { + t.Fatal(err) + } + + // Create the shared editorial file + editorialFile := filepath.Join(sharedDir, "editorial.md") + if err := os.WriteFile(editorialFile, []byte("## Writing Style\n\nNewspaper editorial tone.\n"), 0644); err != nil { + t.Fatal(err) + } + + // Workflow uses {{#runtime-import}} directly (preferred form) + workflowContent := `--- +on: + schedule: + - cron: "0 9 * * *" +permissions: + contents: read + issues: read +engine: copilot +safe-outputs: + create-issue: {} +--- + +{{#runtime-import .github/shared/editorial.md}} + +# Daily Report + +Generate the daily report.` + + compiler := NewCompiler() + testFile := filepath.Join(workflowsDir, "daily-report.md") + if err := os.WriteFile(testFile, []byte(workflowContent), 0644); err != nil { + t.Fatal(err) + } + + if err := compiler.CompileWorkflow(testFile); err != nil { + t.Fatalf("Unexpected error compiling workflow: %v", err) + } + + lockFile := stringutil.MarkdownToLockFile(testFile) + content, err := os.ReadFile(lockFile) + if err != nil { + t.Fatalf("Failed to read generated lock file: %v", err) + } + + lockContent := string(content) + + // The compiled prompt must contain an explicit {{#runtime-import .github/shared/editorial.md}} + // BEFORE the {{#runtime-import ...daily-report.md}} line so that the editorial content + // is visible in the lock file and imported before the main workflow body is processed. + expectedEditorialMacro := "{{#runtime-import .github/shared/editorial.md}}" + if !strings.Contains(lockContent, expectedEditorialMacro) { + t.Errorf("Expected %q in compiled lock file prompt, but not found.\nContent excerpt:\n%s", + expectedEditorialMacro, extractLockFileHeader(lockContent)) + } + + // The main workflow file must still be imported after the editorial import + expectedMainMacro := "{{#runtime-import .github/workflows/daily-report.md}}" + if !strings.Contains(lockContent, expectedMainMacro) { + t.Errorf("Expected %q in compiled lock file prompt, but not found", expectedMainMacro) + } + + // editorial macro must come before the main workflow macro + editorialIdx := strings.Index(lockContent, expectedEditorialMacro) + mainIdx := strings.Index(lockContent, expectedMainMacro) + if editorialIdx >= mainIdx { + t.Errorf("Expected editorial import (%d) to appear before main workflow import (%d)", editorialIdx, mainIdx) + } +}