Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Don't serialise empty content on ChatCompletionMessage #841

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
111 changes: 111 additions & 0 deletions api_integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -312,3 +312,114 @@ func TestChatCompletionStructuredOutputsFunctionCalling(t *testing.T) {
}
}
}

func TestChatCompletionJsonSchemaWithFunctionCallingResponse(t *testing.T) {
apiToken := os.Getenv("OPENAI_TOKEN")
if apiToken == "" {
t.Skip("Skipping testing against production OpenAI API. Set OPENAI_TOKEN environment variable to enable it.")
}

var err error
c := openai.NewClient(apiToken)
ctx := context.Background()

type MyStructuredResponse struct {
PascalCase string `json:"pascal_case" required:"true" description:"PascalCase"`
CamelCase string `json:"camel_case" required:"true" description:"CamelCase"`
KebabCase string `json:"kebab_case" required:"true" description:"KebabCase"`
SnakeCase string `json:"snake_case" required:"true" description:"SnakeCase"`
}
var result MyStructuredResponse
schema, err := jsonschema.GenerateSchemaForType(result)
if err != nil {
t.Fatal("CreateChatCompletion (use json_schema response) GenerateSchemaForType error")
}

resp, err := c.CreateChatCompletion(
ctx,
openai.ChatCompletionRequest{
Model: openai.GPT4oMini,
Messages: []openai.ChatCompletionMessage{
{
Role: openai.ChatMessageRoleSystem,
Content: "Please enter a string, and we will convert it into the following naming conventions:" +
"1. PascalCase: Each word starts with an uppercase letter, with no spaces or separators." +
"2. CamelCase: The first word starts with a lowercase letter, " +
"and subsequent words start with an uppercase letter, with no spaces or separators." +
"3. KebabCase: All letters are lowercase, with words separated by hyphens `-`." +
"4. SnakeCase: All letters are lowercase, with words separated by underscores `_`.",
},
{
Role: openai.ChatMessageRoleUser,
Content: "Hello World",
},
{
Role: openai.ChatMessageRoleAssistant,
ToolCalls: []openai.ToolCall{
{
ID: "call_cTSjmyVCPkRh870yFvkUrql5",
Type: openai.ToolTypeFunction,
Function: openai.FunctionCall{
Name: "display_cases",
Arguments: `{"PascalCase":"HelloWorld","CamelCase":"helloWorld","KebabCase":"hello-world","SnakeCase":"hello_world"}`,
},
},
},
},
{
Role: openai.ChatMessageRoleTool,
ToolCallID: "call_cTSjmyVCPkRh870yFvkUrql5",
Content: "Here are the naming conventions for 'Hello World':\n" +
"PascalCase: HelloWorld\n" +
"CamelCase: helloWorld\n" +
"KebabCase: hello-world\n" +
"SnakeCase: hello_world\n",
},
},
ResponseFormat: &openai.ChatCompletionResponseFormat{
Type: openai.ChatCompletionResponseFormatTypeJSONSchema,
JSONSchema: &openai.ChatCompletionResponseFormatJSONSchema{
Name: "cases",
Schema: schema,
Strict: true,
},
},
Tools: []openai.Tool{
{
Type: openai.ToolTypeFunction,
Function: &openai.FunctionDefinition{
Name: "display_cases",
Strict: true,
Parameters: &jsonschema.Definition{
Type: jsonschema.Object,
Properties: map[string]jsonschema.Definition{
"PascalCase": {
Type: jsonschema.String,
},
"CamelCase": {
Type: jsonschema.String,
},
"KebabCase": {
Type: jsonschema.String,
},
"SnakeCase": {
Type: jsonschema.String,
},
},
Required: []string{"PascalCase", "CamelCase", "KebabCase", "SnakeCase"},
AdditionalProperties: false,
},
},
},
},
},
)
checks.NoError(t, err, "CreateChatCompletion returned error")
if err == nil {
err = schema.Unmarshal(resp.Choices[0].Message.Content, &result)
checks.NoError(t, err, "CreateChatCompletion (use json_schema response) unmarshal error")
}
if result.PascalCase != "HelloWorld" {
t.Errorf("PascalCase: expected 'HelloWorld', got '%s'", result.PascalCase)
}
}
11 changes: 11 additions & 0 deletions chat.go
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,17 @@ func (m ChatCompletionMessage) MarshalJSON() ([]byte, error) {
ToolCallID string `json:"tool_call_id,omitempty"`
}(m)
return json.Marshal(msg)
} else if len(m.ToolCalls) > 0 && m.Content == "" {
msg := struct {
Role string `json:"role"`
Content string `json:"-"`
MultiContent []ChatMessagePart `json:"-"`
Name string `json:"name,omitempty"`
FunctionCall *FunctionCall `json:"function_call,omitempty"`
ToolCalls []ToolCall `json:"tool_calls,omitempty"`
ToolCallID string `json:"tool_call_id,omitempty"`
}(m)
return json.Marshal(msg)
}
msg := struct {
Role string `json:"role"`
Expand Down
23 changes: 23 additions & 0 deletions chat_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -425,6 +425,29 @@ func TestMultipartChatMessageSerialization(t *testing.T) {
}
}

func TestToolCallChatMessageSerialization(t *testing.T) {
jsonText := `{"role":"assistant","tool_calls":` +
`[{"id":"123","type":"function","function":{"name":"my_func","arguments":"{}"}}]}`

toolCallMsg := openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleAssistant,
ToolCalls: []openai.ToolCall{{
ID: "123",
Type: openai.ToolTypeFunction,
Function: openai.FunctionCall{Name: "my_func", Arguments: "{}"},
}},
}

s, err := json.Marshal(toolCallMsg)
if err != nil {
t.Fatalf("Expected no error: %s", err)
}
res := strings.ReplaceAll(string(s), " ", "")
if res != jsonText {
t.Fatalf("invalid message: %s", string(s))
}
}

// handleChatCompletionEndpoint Handles the ChatGPT completion endpoint by the test server.
func handleChatCompletionEndpoint(w http.ResponseWriter, r *http.Request) {
var err error
Expand Down
Loading