From f40a67ffc52532dbadf4342bcbaccc92fcf49a90 Mon Sep 17 00:00:00 2001 From: Ken VanDine Date: Fri, 27 Mar 2026 17:01:12 -0400 Subject: [PATCH] =?UTF-8?q?fix(ollama=5Fapi):=20normalise=20tool=5Fcalls?= =?UTF-8?q?=20during=20Ollama=E2=86=92OpenAI=20conversion?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit llama.cpp's common_chat_msgs_parse_oaicompat strictly validates the OpenAI spec when processing message history. Ollama clients violate the spec in two ways that cause 500 errors: 1. Missing "type":"function" on tool_call objects The Ollama API spec does not require a "type" field, but llama.cpp throws "Missing tool call type" if it is absent. Fix by injecting "type":"function" on any tool_call that lacks it. 2. Non-JSON arguments string (e.g. arguments="{") Ollama clients may persist incomplete streaming state to conversation history mid-stream. llama.cpp's func_args_not_string() tries to parse the arguments string as JSON and throws a parse_error 500. Fix by skipping tool calls whose arguments are not valid JSON rather than forwarding them to the backend. --- src/cpp/server/ollama_api.cpp | 31 +++++++++++++++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/src/cpp/server/ollama_api.cpp b/src/cpp/server/ollama_api.cpp index 19b62cba2..f0d2ca5e8 100644 --- a/src/cpp/server/ollama_api.cpp +++ b/src/cpp/server/ollama_api.cpp @@ -301,9 +301,36 @@ json OllamaApi::convert_ollama_to_openai_chat(const json& ollama_request) { openai_msg["content"] = msg.value("content", ""); } - // Forward tool_calls if present + // Forward tool_calls if present, with two normalisations required + // by llama.cpp's strict OpenAI-spec validation: + // 1. Inject "type":"function" — Ollama clients omit this field. + // 2. Skip tool calls whose arguments are not valid JSON — these + // are streaming artifacts (e.g. arguments="{") that an Ollama + // client may persist to history mid-stream; forwarding them + // causes func_args_not_string() to throw a parse error 500. if (msg.contains("tool_calls")) { - openai_msg["tool_calls"] = msg["tool_calls"]; + json tool_calls = json::array(); + for (auto tc : msg["tool_calls"]) { + if (!tc.contains("type")) { + tc["type"] = "function"; + } + // Validate arguments JSON if present + if (tc.contains("function") && tc["function"].contains("arguments")) { + const auto& args = tc["function"]["arguments"]; + if (args.is_string()) { + try { + json::parse(args.get()); + } catch (const std::exception&) { + // Skip this tool call — arguments are not valid JSON + continue; + } + } + } + tool_calls.push_back(tc); + } + if (!tool_calls.empty()) { + openai_msg["tool_calls"] = tool_calls; + } } messages.push_back(openai_msg);