From 4734c05391d3c991e3ccdcd60e6861523c70301c Mon Sep 17 00:00:00 2001 From: RubyJ Date: Wed, 11 Mar 2026 17:29:27 -0400 Subject: [PATCH] feat: emit --openai-api-target / --anthropic-api-target from OPENAI/ANTHROPIC_BASE_URL When engine.env sets OPENAI_BASE_URL or ANTHROPIC_BASE_URL pointing to a custom endpoint, extract the hostname and pass it as --openai-api-target / --anthropic-api-target to the AWF command. This allows the API proxy sidecar to forward requests to internal LLM routers or other custom OpenAI/Anthropic-compatible endpoints, rather than the hardcoded api.openai.com / api.anthropic.com defaults. Depends on gh-aw-firewall PR #1247 which adds the --openai-api-target and --anthropic-api-target flags to AWF. Closes: https://github.com/github/gh-aw/issues/20590 --- pkg/workflow/awf_helpers.go | 43 +++++++ pkg/workflow/enable_api_proxy_test.go | 154 ++++++++++++++++++++++++++ 2 files changed, 197 insertions(+) diff --git a/pkg/workflow/awf_helpers.go b/pkg/workflow/awf_helpers.go index 0de7049a1d6..13f673ec944 100644 --- a/pkg/workflow/awf_helpers.go +++ b/pkg/workflow/awf_helpers.go @@ -24,6 +24,7 @@ package workflow import ( "fmt" + "net/url" "sort" "strings" @@ -190,6 +191,26 @@ func BuildAWFArgs(config AWFCommandConfig) []string { awfArgs = append(awfArgs, "--enable-api-proxy") awfHelpersLog.Print("Added --enable-api-proxy for LLM API proxying") + // If engine.env sets OPENAI_BASE_URL, extract the hostname and pass it as + // --openai-api-target so the API proxy forwards to the custom endpoint instead + // of the default api.openai.com. This allows use of internal LLM routers, + // Azure OpenAI, or any OpenAI-compatible API. + engineEnv := getEngineEnvOverrides(config.WorkflowData) + if engineEnv != nil { + if openaiBaseURL, ok := engineEnv["OPENAI_BASE_URL"]; ok && openaiBaseURL != "" { + if host := extractHostname(openaiBaseURL); host != "" { + awfArgs = append(awfArgs, "--openai-api-target", host) + awfHelpersLog.Printf("Set --openai-api-target to %s from OPENAI_BASE_URL", host) + } + } + if anthropicBaseURL, ok := engineEnv["ANTHROPIC_BASE_URL"]; ok && anthropicBaseURL != "" { + if host := extractHostname(anthropicBaseURL); host != "" { + awfArgs = append(awfArgs, "--anthropic-api-target", host) + awfHelpersLog.Printf("Set --anthropic-api-target to %s from ANTHROPIC_BASE_URL", host) + } + } + } + // Add SSL Bump support for HTTPS content inspection (v0.9.0+) sslBumpArgs := getSSLBumpArgs(firewallConfig) awfArgs = append(awfArgs, sslBumpArgs...) @@ -228,6 +249,28 @@ func GetAWFCommandPrefix(workflowData *WorkflowData) string { return string(constants.AWFDefaultCommand) } +// extractHostname parses a URL and returns just the hostname (without scheme, path, or port). +// Returns an empty string if the URL is invalid or has no host, and logs a warning for non-empty invalid inputs. +func extractHostname(rawURL string) string { + parsed, err := url.Parse(rawURL) + if err != nil { + if rawURL != "" { + awfHelpersLog.Printf("Warning: failed to parse URL %q for hostname extraction: %v", rawURL, err) + } + return "" + } + + hostname := parsed.Hostname() + if hostname == "" { + if rawURL != "" { + awfHelpersLog.Printf("Warning: URL %q has no hostname component; skipping API target configuration", rawURL) + } + return "" + } + + return hostname +} + // WrapCommandInShell wraps an engine command in a shell invocation for AWF execution. // This is needed because AWF requires commands to be wrapped in shell for proper execution. // diff --git a/pkg/workflow/enable_api_proxy_test.go b/pkg/workflow/enable_api_proxy_test.go index e23329bc57d..d15dc5a336e 100644 --- a/pkg/workflow/enable_api_proxy_test.go +++ b/pkg/workflow/enable_api_proxy_test.go @@ -5,6 +5,160 @@ import ( "testing" ) +// TestExtractHostname tests the extractHostname helper function. +func TestExtractHostname(t *testing.T) { + tests := []struct { + name string + input string + expected string + }{ + {name: "full URL", input: "https://llm-router.internal.example.com/v1", expected: "llm-router.internal.example.com"}, + {name: "URL with port", input: "https://llm-router.internal.example.com:8443/v1", expected: "llm-router.internal.example.com"}, + {name: "plain hostname", input: "api.openai.com", expected: "api.openai.com"}, + {name: "empty string", input: "", expected: ""}, + {name: "URL without path", input: "https://example.com", expected: "example.com"}, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result := extractHostname(tc.input) + if result != tc.expected { + t.Errorf("extractHostname(%q) = %q, want %q", tc.input, result, tc.expected) + } + }) + } +} + +// TestAWFOpenAIApiTarget tests that --openai-api-target is emitted when OPENAI_BASE_URL is in engine.env. +func TestAWFOpenAIApiTarget(t *testing.T) { + t.Run("emits --openai-api-target when OPENAI_BASE_URL is set in engine.env", func(t *testing.T) { + workflowData := &WorkflowData{ + Name: "test-workflow", + EngineConfig: &EngineConfig{ + ID: "codex", + Env: map[string]string{ + "OPENAI_BASE_URL": "https://llm-router.internal.example.com/v1", + }, + }, + NetworkPermissions: &NetworkPermissions{ + Firewall: &FirewallConfig{Enabled: true}, + }, + } + + engine := NewCodexEngine() + steps := engine.GetExecutionSteps(workflowData, "test.log") + if len(steps) == 0 { + t.Fatal("Expected at least one execution step") + } + + stepContent := strings.Join(steps[0], "\n") + if !strings.Contains(stepContent, "--openai-api-target llm-router.internal.example.com") { + t.Errorf("Expected AWF command to contain '--openai-api-target llm-router.internal.example.com', got:\n%s", stepContent) + } + }) + + t.Run("does not emit --openai-api-target when OPENAI_BASE_URL is absent", func(t *testing.T) { + workflowData := &WorkflowData{ + Name: "test-workflow", + EngineConfig: &EngineConfig{ + ID: "codex", + }, + NetworkPermissions: &NetworkPermissions{ + Firewall: &FirewallConfig{Enabled: true}, + }, + } + + engine := NewCodexEngine() + steps := engine.GetExecutionSteps(workflowData, "test.log") + if len(steps) == 0 { + t.Fatal("Expected at least one execution step") + } + + stepContent := strings.Join(steps[0], "\n") + if strings.Contains(stepContent, "--openai-api-target") { + t.Errorf("Expected AWF command NOT to contain '--openai-api-target', got:\n%s", stepContent) + } + }) + + t.Run("does not emit --openai-api-target when OPENAI_BASE_URL is invalid", func(t *testing.T) { + workflowData := &WorkflowData{ + Name: "test-workflow", + EngineConfig: &EngineConfig{ + ID: "codex", + Env: map[string]string{ + "OPENAI_BASE_URL": "://invalid-url", + }, + }, + NetworkPermissions: &NetworkPermissions{ + Firewall: &FirewallConfig{Enabled: true}, + }, + } + + engine := NewCodexEngine() + steps := engine.GetExecutionSteps(workflowData, "test.log") + if len(steps) == 0 { + t.Fatal("Expected at least one execution step") + } + + stepContent := strings.Join(steps[0], "\n") + if strings.Contains(stepContent, "--openai-api-target") { + t.Errorf("Expected AWF command NOT to contain '--openai-api-target' for invalid URL, got:\n%s", stepContent) + } + }) +} + +// TestAWFAnthropicApiTarget tests that --anthropic-api-target is emitted when ANTHROPIC_BASE_URL is in engine.env. +func TestAWFAnthropicApiTarget(t *testing.T) { + t.Run("emits --anthropic-api-target when ANTHROPIC_BASE_URL is set in engine.env", func(t *testing.T) { + workflowData := &WorkflowData{ + Name: "test-workflow", + EngineConfig: &EngineConfig{ + ID: "claude", + Env: map[string]string{ + "ANTHROPIC_BASE_URL": "https://llm-router.internal.example.com/v1", + }, + }, + NetworkPermissions: &NetworkPermissions{ + Firewall: &FirewallConfig{Enabled: true}, + }, + } + + engine := NewClaudeEngine() + steps := engine.GetExecutionSteps(workflowData, "test.log") + if len(steps) == 0 { + t.Fatal("Expected at least one execution step") + } + + stepContent := strings.Join(steps[0], "\n") + if !strings.Contains(stepContent, "--anthropic-api-target llm-router.internal.example.com") { + t.Errorf("Expected AWF command to contain '--anthropic-api-target llm-router.internal.example.com', got:\n%s", stepContent) + } + }) + + t.Run("does not emit --anthropic-api-target when ANTHROPIC_BASE_URL is absent", func(t *testing.T) { + workflowData := &WorkflowData{ + Name: "test-workflow", + EngineConfig: &EngineConfig{ + ID: "claude", + }, + NetworkPermissions: &NetworkPermissions{ + Firewall: &FirewallConfig{Enabled: true}, + }, + } + + engine := NewClaudeEngine() + steps := engine.GetExecutionSteps(workflowData, "test.log") + if len(steps) == 0 { + t.Fatal("Expected at least one execution step") + } + + stepContent := strings.Join(steps[0], "\n") + if strings.Contains(stepContent, "--anthropic-api-target") { + t.Errorf("Expected AWF command NOT to contain '--anthropic-api-target', got:\n%s", stepContent) + } + }) +} + // TestEngineAWFEnableApiProxy tests that engines with LLM gateway support // include --enable-api-proxy flag in AWF commands. func TestEngineAWFEnableApiProxy(t *testing.T) {