Skip to content
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 34 additions & 19 deletions internal/translator/openai/claude/openai_claude_response.go
Original file line number Diff line number Diff line change
Expand Up @@ -128,29 +128,40 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
param.CreatedAt = root.Get("created").Int()
}

// Helper to ensure message_start is sent before any content_block_start
// This is required by the Anthropic SSE protocol - message_start must come first.
// Some OpenAI-compatible providers (like GitHub Copilot) may not send role: "assistant"
// in the first chunk, so we need to emit message_start when we first see content.
ensureMessageStarted := func() {
if param.MessageStarted {
return
}
messageStart := map[string]interface{}{
"type": "message_start",
"message": map[string]interface{}{
"id": param.MessageID,
"type": "message",
"role": "assistant",
"model": param.Model,
"content": []interface{}{},
"stop_reason": nil,
"stop_sequence": nil,
"usage": map[string]interface{}{
"input_tokens": 0,
"output_tokens": 0,
},
},
}
messageStartJSON, _ := json.Marshal(messageStart)
results = append(results, "event: message_start\ndata: "+string(messageStartJSON)+"\n\n")
Comment on lines +155 to +156

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The error from json.Marshal is ignored. While it's unlikely to fail for this static data structure, it's a good practice to handle errors to prevent silent failures or sending malformed events. Consider checking the error and returning if it's not nil. This would prevent a malformed event from being sent. Ideally, the error should also be logged.

		messageStartJSON, err := json.Marshal(messageStart)
		if err != nil {
			return
		}
		results = append(results, "event: message_start\ndata: "+string(messageStartJSON)+"\n\n")

param.MessageStarted = true
}
Comment on lines +135 to +158

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

This new helper is defined as a closure. For consistency with other helper functions in this file like stopThinkingContentBlock and stopTextContentBlock, consider making it a standalone package-level function. This would look like func ensureMessageStarted(param *ConvertOpenAIResponseToAnthropicParams, results *[]string) and would improve code consistency and maintainability.


// Check if this is the first chunk (has role)
if delta := root.Get("choices.0.delta"); delta.Exists() {
if role := delta.Get("role"); role.Exists() && role.String() == "assistant" && !param.MessageStarted {
// Send message_start event
messageStart := map[string]interface{}{
"type": "message_start",
"message": map[string]interface{}{
"id": param.MessageID,
"type": "message",
"role": "assistant",
"model": param.Model,
"content": []interface{}{},
"stop_reason": nil,
"stop_sequence": nil,
"usage": map[string]interface{}{
"input_tokens": 0,
"output_tokens": 0,
},
},
}
messageStartJSON, _ := json.Marshal(messageStart)
results = append(results, "event: message_start\ndata: "+string(messageStartJSON)+"\n\n")
param.MessageStarted = true
ensureMessageStarted()

// Don't send content_block_start for text here - wait for actual content
}
Expand All @@ -163,6 +174,7 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
}
stopTextContentBlock(param, &results)
if !param.ThinkingContentBlockStarted {
ensureMessageStarted() // Must send message_start before content_block_start
if param.ThinkingContentBlockIndex == -1 {
param.ThinkingContentBlockIndex = param.NextContentBlockIndex
param.NextContentBlockIndex++
Expand Down Expand Up @@ -197,6 +209,7 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
if content := delta.Get("content"); content.Exists() && content.String() != "" {
// Send content_block_start for text if not already sent
if !param.TextContentBlockStarted {
ensureMessageStarted() // Must send message_start before content_block_start
stopThinkingContentBlock(param, &results)
if param.TextContentBlockIndex == -1 {
param.TextContentBlockIndex = param.NextContentBlockIndex
Expand Down Expand Up @@ -257,6 +270,8 @@ func convertOpenAIStreamingChunkToAnthropic(rawJSON []byte, param *ConvertOpenAI
if name := function.Get("name"); name.Exists() {
accumulator.Name = name.String()

ensureMessageStarted() // Must send message_start before content_block_start

stopThinkingContentBlock(param, &results)

stopTextContentBlock(param, &results)
Expand Down