diff --git a/internal/translator/antigravity/claude/antigravity_claude_response.go b/internal/translator/antigravity/claude/antigravity_claude_response.go index 875e54a71..532f2be42 100644 --- a/internal/translator/antigravity/claude/antigravity_claude_response.go +++ b/internal/translator/antigravity/claude/antigravity_claude_response.go @@ -350,14 +350,12 @@ func resolveStopReason(params *Params) string { return "tool_use" } - switch params.FinishReason { - case "MAX_TOKENS": + if params.FinishReason == "MAX_TOKENS" { return "max_tokens" - case "STOP", "FINISH_REASON_UNSPECIFIED", "UNKNOWN": - return "end_turn" } return "end_turn" + } // ConvertAntigravityResponseToClaudeNonStream converts a non-streaming Gemini CLI response to a non-streaming Claude response. @@ -498,13 +496,8 @@ func ConvertAntigravityResponseToClaudeNonStream(_ context.Context, _ string, or stopReason = "tool_use" } else { if finish := root.Get("response.candidates.0.finishReason"); finish.Exists() { - switch finish.String() { - case "MAX_TOKENS": + if finish.String() == "MAX_TOKENS" { stopReason = "max_tokens" - case "STOP", "FINISH_REASON_UNSPECIFIED", "UNKNOWN": - stopReason = "end_turn" - default: - stopReason = "end_turn" } } } diff --git a/internal/translator/antigravity/openai/chat-completions/antigravity_openai_response.go b/internal/translator/antigravity/openai/chat-completions/antigravity_openai_response.go index 1b7866d01..143ebefd2 100644 --- a/internal/translator/antigravity/openai/chat-completions/antigravity_openai_response.go +++ b/internal/translator/antigravity/openai/chat-completions/antigravity_openai_response.go @@ -79,11 +79,17 @@ func ConvertAntigravityResponseToOpenAI(_ context.Context, _ string, originalReq template, _ = sjson.Set(template, "id", responseIDResult.String()) } - // Extract and set the finish reason. - if finishReasonResult := gjson.GetBytes(rawJSON, "response.candidates.0.finishReason"); finishReasonResult.Exists() { - template, _ = sjson.Set(template, "choices.0.finish_reason", strings.ToLower(finishReasonResult.String())) - template, _ = sjson.Set(template, "choices.0.native_finish_reason", strings.ToLower(finishReasonResult.String())) + // Extract the finish reason (set after payload processing). + finishReason := "" + if stopReasonResult := gjson.GetBytes(rawJSON, "response.stop_reason"); stopReasonResult.Exists() { + finishReason = stopReasonResult.String() } + if finishReason == "" { + if finishReasonResult := gjson.GetBytes(rawJSON, "response.candidates.0.finishReason"); finishReasonResult.Exists() { + finishReason = finishReasonResult.String() + } + } + finishReason = strings.ToLower(finishReason) // Extract and set usage metadata (token counts). if usageResult := gjson.GetBytes(rawJSON, "response.usageMetadata"); usageResult.Exists() { @@ -198,6 +204,12 @@ func ConvertAntigravityResponseToOpenAI(_ context.Context, _ string, originalReq if hasFunctionCall { template, _ = sjson.Set(template, "choices.0.finish_reason", "tool_calls") template, _ = sjson.Set(template, "choices.0.native_finish_reason", "tool_calls") + } else if finishReason != "" && (*param).(*convertCliResponseToOpenAIChatParams).FunctionIndex == 0 { + // Only pass through specific finish reasons + if finishReason == "max_tokens" || finishReason == "stop" { + template, _ = sjson.Set(template, "choices.0.finish_reason", finishReason) + template, _ = sjson.Set(template, "choices.0.native_finish_reason", finishReason) + } } return []string{template} diff --git a/internal/translator/codex/claude/codex_claude_response.go b/internal/translator/codex/claude/codex_claude_response.go index e3909d45e..e02da1a5a 100644 --- a/internal/translator/codex/claude/codex_claude_response.go +++ b/internal/translator/codex/claude/codex_claude_response.go @@ -99,12 +99,16 @@ func ConvertCodexResponseToClaude(_ context.Context, _ string, originalRequestRa output = "event: content_block_stop\n" output += fmt.Sprintf("data: %s\n\n", template) } else if typeStr == "response.completed" { - template = `{"type":"message_delta","delta":{"stop_reason":"tool_use","stop_sequence":null},"usage":{"input_tokens":0,"output_tokens":0}}` + template = `{"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"input_tokens":0,"output_tokens":0}}` p := (*param).(*bool) if *p { template, _ = sjson.Set(template, "delta.stop_reason", "tool_use") } else { - template, _ = sjson.Set(template, "delta.stop_reason", "end_turn") + stopReason := rootResult.Get("response.stop_reason").String() + // Only pass through specific stop reasons + if stopReason == "max_tokens" || stopReason == "stop" || stopReason == "end_turn" { + template, _ = sjson.Set(template, "delta.stop_reason", stopReason) + } } template, _ = sjson.Set(template, "usage.input_tokens", rootResult.Get("response.usage.input_tokens").Int()) template, _ = sjson.Set(template, "usage.output_tokens", rootResult.Get("response.usage.output_tokens").Int()) diff --git a/internal/translator/gemini-cli/claude/gemini-cli_claude_response.go b/internal/translator/gemini-cli/claude/gemini-cli_claude_response.go index 2f8e95488..f9d5b016f 100644 --- a/internal/translator/gemini-cli/claude/gemini-cli_claude_response.go +++ b/internal/translator/gemini-cli/claude/gemini-cli_claude_response.go @@ -244,6 +244,12 @@ func ConvertGeminiCLIResponseToClaude(_ context.Context, _ string, originalReque // Set tool_use stop reason if tools were used in this response if usedTool { template = `{"type":"message_delta","delta":{"stop_reason":"tool_use","stop_sequence":null},"usage":{"input_tokens":0,"output_tokens":0}}` + } else { + if finish := gjson.GetBytes(rawJSON, "response.candidates.0.finishReason"); finish.Exists() { + if finish.String() == "MAX_TOKENS" { + template, _ = sjson.Set(template, "delta.stop_reason", "max_tokens") + } + } } // Include thinking tokens in output token count if present @@ -352,13 +358,8 @@ func ConvertGeminiCLIResponseToClaudeNonStream(_ context.Context, _ string, orig stopReason = "tool_use" } else { if finish := root.Get("response.candidates.0.finishReason"); finish.Exists() { - switch finish.String() { - case "MAX_TOKENS": + if finish.String() == "MAX_TOKENS" { stopReason = "max_tokens" - case "STOP", "FINISH_REASON_UNSPECIFIED", "UNKNOWN": - stopReason = "end_turn" - default: - stopReason = "end_turn" } } } diff --git a/internal/translator/gemini-cli/openai/chat-completions/gemini-cli_openai_response.go b/internal/translator/gemini-cli/openai/chat-completions/gemini-cli_openai_response.go index 5a1faf510..e94d8ea7a 100644 --- a/internal/translator/gemini-cli/openai/chat-completions/gemini-cli_openai_response.go +++ b/internal/translator/gemini-cli/openai/chat-completions/gemini-cli_openai_response.go @@ -77,11 +77,17 @@ func ConvertCliResponseToOpenAI(_ context.Context, _ string, originalRequestRawJ template, _ = sjson.Set(template, "id", responseIDResult.String()) } - // Extract and set the finish reason. - if finishReasonResult := gjson.GetBytes(rawJSON, "response.candidates.0.finishReason"); finishReasonResult.Exists() { - template, _ = sjson.Set(template, "choices.0.finish_reason", strings.ToLower(finishReasonResult.String())) - template, _ = sjson.Set(template, "choices.0.native_finish_reason", strings.ToLower(finishReasonResult.String())) + // Extract the finish reason (set after payload processing). + finishReason := "" + if stopReasonResult := gjson.GetBytes(rawJSON, "response.stop_reason"); stopReasonResult.Exists() { + finishReason = stopReasonResult.String() } + if finishReason == "" { + if finishReasonResult := gjson.GetBytes(rawJSON, "response.candidates.0.finishReason"); finishReasonResult.Exists() { + finishReason = finishReasonResult.String() + } + } + finishReason = strings.ToLower(finishReason) // Extract and set usage metadata (token counts). if usageResult := gjson.GetBytes(rawJSON, "response.usageMetadata"); usageResult.Exists() { @@ -187,6 +193,12 @@ func ConvertCliResponseToOpenAI(_ context.Context, _ string, originalRequestRawJ if hasFunctionCall { template, _ = sjson.Set(template, "choices.0.finish_reason", "tool_calls") template, _ = sjson.Set(template, "choices.0.native_finish_reason", "tool_calls") + } else if finishReason != "" && (*param).(*convertCliResponseToOpenAIChatParams).FunctionIndex == 0 { + // Only pass through specific finish reasons + if finishReason == "max_tokens" || finishReason == "stop" { + template, _ = sjson.Set(template, "choices.0.finish_reason", finishReason) + template, _ = sjson.Set(template, "choices.0.native_finish_reason", finishReason) + } } return []string{template} diff --git a/internal/translator/gemini/claude/gemini_claude_response.go b/internal/translator/gemini/claude/gemini_claude_response.go index db14c78a1..d245ca9f4 100644 --- a/internal/translator/gemini/claude/gemini_claude_response.go +++ b/internal/translator/gemini/claude/gemini_claude_response.go @@ -251,6 +251,12 @@ func ConvertGeminiResponseToClaude(_ context.Context, _ string, originalRequestR template := `{"type":"message_delta","delta":{"stop_reason":"end_turn","stop_sequence":null},"usage":{"input_tokens":0,"output_tokens":0}}` if usedTool { template = `{"type":"message_delta","delta":{"stop_reason":"tool_use","stop_sequence":null},"usage":{"input_tokens":0,"output_tokens":0}}` + } else { + if finish := gjson.GetBytes(rawJSON, "candidates.0.finishReason"); finish.Exists() { + if finish.String() == "MAX_TOKENS" { + template, _ = sjson.Set(template, "delta.stop_reason", "max_tokens") + } + } } thoughtsTokenCount := usageResult.Get("thoughtsTokenCount").Int() @@ -358,13 +364,8 @@ func ConvertGeminiResponseToClaudeNonStream(_ context.Context, _ string, origina stopReason = "tool_use" } else { if finish := root.Get("candidates.0.finishReason"); finish.Exists() { - switch finish.String() { - case "MAX_TOKENS": + if finish.String() == "MAX_TOKENS" { stopReason = "max_tokens" - case "STOP", "FINISH_REASON_UNSPECIFIED", "UNKNOWN": - stopReason = "end_turn" - default: - stopReason = "end_turn" } } } diff --git a/internal/translator/gemini/openai/chat-completions/gemini_openai_response.go b/internal/translator/gemini/openai/chat-completions/gemini_openai_response.go index 52fbba430..0efdead69 100644 --- a/internal/translator/gemini/openai/chat-completions/gemini_openai_response.go +++ b/internal/translator/gemini/openai/chat-completions/gemini_openai_response.go @@ -81,11 +81,17 @@ func ConvertGeminiResponseToOpenAI(_ context.Context, _ string, originalRequestR template, _ = sjson.Set(template, "id", responseIDResult.String()) } - // Extract and set the finish reason. - if finishReasonResult := gjson.GetBytes(rawJSON, "candidates.0.finishReason"); finishReasonResult.Exists() { - template, _ = sjson.Set(template, "choices.0.finish_reason", strings.ToLower(finishReasonResult.String())) - template, _ = sjson.Set(template, "choices.0.native_finish_reason", strings.ToLower(finishReasonResult.String())) + // Extract the finish reason (set after payload processing). + finishReason := "" + if stopReasonResult := gjson.GetBytes(rawJSON, "stop_reason"); stopReasonResult.Exists() { + finishReason = stopReasonResult.String() } + if finishReason == "" { + if finishReasonResult := gjson.GetBytes(rawJSON, "candidates.0.finishReason"); finishReasonResult.Exists() { + finishReason = finishReasonResult.String() + } + } + finishReason = strings.ToLower(finishReason) // Extract and set usage metadata (token counts). if usageResult := gjson.GetBytes(rawJSON, "usageMetadata"); usageResult.Exists() { @@ -199,6 +205,12 @@ func ConvertGeminiResponseToOpenAI(_ context.Context, _ string, originalRequestR if hasFunctionCall { template, _ = sjson.Set(template, "choices.0.finish_reason", "tool_calls") template, _ = sjson.Set(template, "choices.0.native_finish_reason", "tool_calls") + } else if finishReason != "" && (*param).(*convertGeminiResponseToOpenAIChatParams).FunctionIndex == 0 { + // Only pass through specific finish reasons + if finishReason == "max_tokens" || finishReason == "stop" { + template, _ = sjson.Set(template, "choices.0.finish_reason", finishReason) + template, _ = sjson.Set(template, "choices.0.native_finish_reason", finishReason) + } } return []string{template}