Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 13 additions & 3 deletions .github/workflows/pr-path-guard.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ on:

jobs:
ensure-no-translator-changes:
name: ensure-no-translator-changes
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
Expand All @@ -23,6 +24,15 @@ jobs:
- name: Fail when restricted paths change
if: steps.changed-files.outputs.any_changed == 'true'
run: |
echo "Changes under pkg/llmproxy/translator are not allowed in pull requests."
echo "You need to create an issue for our maintenance team to make the necessary changes."
exit 1
disallowed_files="$(printf '%s\n' \
$(printf '%s' '${{ steps.changed-files.outputs.all_changed_files }}' | tr ',' '\n') \
| sed '/^internal\/translator\/kiro\/claude\/kiro_websearch_handler.go$/d' \
| tr '\n' ' ' | xargs)"
if [ -n "$disallowed_files" ]; then
echo "Changes under pkg/llmproxy/translator are not allowed in pull requests."
echo "Disallowed files:"
echo "$disallowed_files"
echo "You need to create an issue for our maintenance team to make the necessary changes."
exit 1
fi
echo "Only whitelisted translator hotfix path changed; allowing PR to continue."
1 change: 1 addition & 0 deletions .github/workflows/pr-test-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ permissions:

jobs:
quality-ci:
name: quality-ci
runs-on: ubuntu-latest
steps:
- name: Checkout
Expand Down
6 changes: 6 additions & 0 deletions pkg/llmproxy/auth/kiro/sso_oidc_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,12 @@
"testing"
)

type roundTripperFunc func(*http.Request) (*http.Response, error)

Check failure on line 11 in pkg/llmproxy/auth/kiro/sso_oidc_test.go

View workflow job for this annotation

GitHub Actions / quality-ci

roundTripperFunc redeclared in this block

Check failure on line 11 in pkg/llmproxy/auth/kiro/sso_oidc_test.go

View workflow job for this annotation

GitHub Actions / quality-ci

roundTripperFunc redeclared in this block

func (fn roundTripperFunc) RoundTrip(req *http.Request) (*http.Response, error) {

Check failure on line 13 in pkg/llmproxy/auth/kiro/sso_oidc_test.go

View workflow job for this annotation

GitHub Actions / quality-ci

method roundTripperFunc.RoundTrip already declared at pkg/llmproxy/auth/kiro/http_roundtripper_test.go:7:27

Check failure on line 13 in pkg/llmproxy/auth/kiro/sso_oidc_test.go

View workflow job for this annotation

GitHub Actions / quality-ci

method roundTripperFunc.RoundTrip already declared at pkg/llmproxy/auth/kiro/http_roundtripper_test.go:7:27
return fn(req)
}

func TestRefreshToken_UsesSingleGrantTypeFieldAndExtensionHeaders(t *testing.T) {
t.Parallel()

Expand Down
10 changes: 0 additions & 10 deletions pkg/llmproxy/cmd/native_cli.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,16 +33,6 @@ var (
}
)

// ThegentSpec returns the NativeCLISpec for TheGent unified login.
// TheGent is a unified CLI that supports multiple providers.
func ThegentSpec(provider string) NativeCLISpec {
return NativeCLISpec{
Name: "thegent",
Args: []string{"login", "--provider", provider},
FallbackNames: nil,
}
}

// ResolveNativeCLI returns the absolute path to the native CLI binary, or empty string if not found.
// Checks PATH and ~/.local/bin.
func ResolveNativeCLI(spec NativeCLISpec) string {
Expand Down
56 changes: 56 additions & 0 deletions pkg/llmproxy/translator/kiro/openai/kiro_openai_request.go
Original file line number Diff line number Diff line change
Expand Up @@ -578,6 +578,7 @@ func processOpenAIMessages(messages gjson.Result, modelID, origin string) ([]Kir

// Truncate history if too long to prevent Kiro API errors
history = truncateHistoryIfNeeded(history)
history, currentToolResults = filterOrphanedToolResults(history, currentToolResults)

return history, currentUserMsg, currentToolResults
}
Expand All @@ -593,6 +594,61 @@ func truncateHistoryIfNeeded(history []KiroHistoryMessage) []KiroHistoryMessage
return history[len(history)-kiroMaxHistoryMessages:]
}

func filterOrphanedToolResults(history []KiroHistoryMessage, currentToolResults []KiroToolResult) ([]KiroHistoryMessage, []KiroToolResult) {
// Remove tool results with no matching tool_use in retained history.
// This happens after truncation when the assistant turn that produced tool_use
// is dropped but a later user/tool_result survives.
validToolUseIDs := make(map[string]bool)
for _, h := range history {
if h.AssistantResponseMessage == nil {
continue
}
for _, tu := range h.AssistantResponseMessage.ToolUses {
validToolUseIDs[tu.ToolUseID] = true
}
}

for i, h := range history {
if h.UserInputMessage == nil || h.UserInputMessage.UserInputMessageContext == nil {
continue
}
ctx := h.UserInputMessage.UserInputMessageContext
if len(ctx.ToolResults) == 0 {
continue
}

filtered := make([]KiroToolResult, 0, len(ctx.ToolResults))
for _, tr := range ctx.ToolResults {
if validToolUseIDs[tr.ToolUseID] {
filtered = append(filtered, tr)
continue
}
log.Debugf("kiro-openai: dropping orphaned tool_result in history[%d]: toolUseId=%s (no matching tool_use)", i, tr.ToolUseID)
}
ctx.ToolResults = filtered
if len(ctx.ToolResults) == 0 && len(ctx.Tools) == 0 {
h.UserInputMessage.UserInputMessageContext = nil
}
}

if len(currentToolResults) > 0 {
filtered := make([]KiroToolResult, 0, len(currentToolResults))
for _, tr := range currentToolResults {
if validToolUseIDs[tr.ToolUseID] {
filtered = append(filtered, tr)
continue
}
log.Debugf("kiro-openai: dropping orphaned tool_result in currentMessage: toolUseId=%s (no matching tool_use)", tr.ToolUseID)
}
if len(filtered) != len(currentToolResults) {
log.Infof("kiro-openai: dropped %d orphaned tool_result(s) from currentMessage", len(currentToolResults)-len(filtered))
}
currentToolResults = filtered
}

return history, currentToolResults
}

// buildUserMessageFromOpenAI builds a user message from OpenAI format and extracts tool results
func buildUserMessageFromOpenAI(msg gjson.Result, modelID, origin string) (KiroUserInputMessage, []KiroToolResult) {
content := msg.Get("content")
Expand Down
54 changes: 54 additions & 0 deletions pkg/llmproxy/translator/kiro/openai/kiro_openai_request_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -452,3 +452,57 @@ func TestBuildAssistantMessageFromOpenAI_PreservesNonObjectToolArguments(t *test
t.Fatalf("expected raw argument fallback, got %#v", got.ToolUses[2].Input)
}
}

func TestFilterOrphanedToolResults_RemovesHistoryAndCurrentOrphans(t *testing.T) {
history := []KiroHistoryMessage{
{
AssistantResponseMessage: &KiroAssistantResponseMessage{
Content: "assistant",
ToolUses: []KiroToolUse{
{ToolUseID: "keep-1", Name: "Read", Input: map[string]interface{}{}},
},
},
},
{
UserInputMessage: &KiroUserInputMessage{
Content: "user-with-mixed-results",
UserInputMessageContext: &KiroUserInputMessageContext{
ToolResults: []KiroToolResult{
{ToolUseID: "keep-1", Status: "success", Content: []KiroTextContent{{Text: "ok"}}},
{ToolUseID: "orphan-1", Status: "success", Content: []KiroTextContent{{Text: "bad"}}},
},
},
},
},
{
UserInputMessage: &KiroUserInputMessage{
Content: "user-only-orphans",
UserInputMessageContext: &KiroUserInputMessageContext{
ToolResults: []KiroToolResult{
{ToolUseID: "orphan-2", Status: "success", Content: []KiroTextContent{{Text: "bad"}}},
},
},
},
},
}

currentToolResults := []KiroToolResult{
{ToolUseID: "keep-1", Status: "success", Content: []KiroTextContent{{Text: "ok"}}},
{ToolUseID: "orphan-3", Status: "success", Content: []KiroTextContent{{Text: "bad"}}},
}

filteredHistory, filteredCurrent := filterOrphanedToolResults(history, currentToolResults)

ctx1 := filteredHistory[1].UserInputMessage.UserInputMessageContext
if ctx1 == nil || len(ctx1.ToolResults) != 1 || ctx1.ToolResults[0].ToolUseID != "keep-1" {
t.Fatalf("expected mixed history message to keep only keep-1, got: %+v", ctx1)
}

if filteredHistory[2].UserInputMessage.UserInputMessageContext != nil {
t.Fatalf("expected orphan-only history context to be removed")
}

if len(filteredCurrent) != 1 || filteredCurrent[0].ToolUseID != "keep-1" {
t.Fatalf("expected current tool results to keep only keep-1, got: %+v", filteredCurrent)
}
}
43 changes: 36 additions & 7 deletions pkg/llmproxy/translator/openai/claude/openai_claude_response.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ package claude
import (
"bytes"
"context"
"encoding/json"
"fmt"
"strings"

Expand Down Expand Up @@ -132,16 +133,40 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
param.CreatedAt = root.Get("created").Int()
}

// Emit message_start on the very first chunk, regardless of whether it has a role field.
// Some providers (like Copilot) may send tool_calls in the first chunk without a role field.
// Helper to ensure message_start is sent before any content_block_start
// This is required by the Anthropic SSE protocol - message_start must come first.
// Some OpenAI-compatible providers (like GitHub Copilot) may not send role: "assistant"
// in the first chunk, so we need to emit message_start when we first see content.
ensureMessageStarted := func() {
if param.MessageStarted {
return
}
messageStart := map[string]interface{}{
"type": "message_start",
"message": map[string]interface{}{
"id": param.MessageID,
"type": "message",
"role": "assistant",
"model": param.Model,
"content": []interface{}{},
"stop_reason": nil,
"stop_sequence": nil,
"usage": map[string]interface{}{
"input_tokens": 0,
"output_tokens": 0,
},
},
}
messageStartJSON, _ := json.Marshal(messageStart)
results = append(results, "event: message_start\ndata: "+string(messageStartJSON)+"\n\n")
param.MessageStarted = true
}

// Check if this is the first chunk (has role)
if delta := root.Get("choices.0.delta"); delta.Exists() {
if !param.MessageStarted {
// Send message_start event
messageStartJSON := `{"type":"message_start","message":{"id":"","type":"message","role":"assistant","model":"","content":[],"stop_reason":null,"stop_sequence":null,"usage":{"input_tokens":0,"output_tokens":0}}}`
messageStartJSON, _ = sjson.Set(messageStartJSON, "message.id", param.MessageID)
messageStartJSON, _ = sjson.Set(messageStartJSON, "message.model", param.Model)
results = append(results, "event: message_start\ndata: "+messageStartJSON+"\n\n")
param.MessageStarted = true
ensureMessageStarted()

// Don't send content_block_start for text here - wait for actual content
}
Expand All @@ -154,6 +179,7 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
}
stopTextContentBlock(param, &results)
if !param.ThinkingContentBlockStarted {
ensureMessageStarted() // Must send message_start before content_block_start
if param.ThinkingContentBlockIndex == -1 {
param.ThinkingContentBlockIndex = param.NextContentBlockIndex
param.NextContentBlockIndex++
Expand All @@ -175,6 +201,7 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
if content := delta.Get("content"); content.Exists() && content.String() != "" {
// Send content_block_start for text if not already sent
if !param.TextContentBlockStarted {
ensureMessageStarted() // Must send message_start before content_block_start
stopThinkingContentBlock(param, &results)
if param.TextContentBlockIndex == -1 {
param.TextContentBlockIndex = param.NextContentBlockIndex
Expand Down Expand Up @@ -222,6 +249,8 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
if name := function.Get("name"); name.Exists() {
accumulator.Name = name.String()

ensureMessageStarted() // Must send message_start before content_block_start

stopThinkingContentBlock(param, &results)

stopTextContentBlock(param, &results)
Expand Down
Loading