fix: pass OpenAI-style tool_calls from provider to parser

The OpenAI-compatible provider was not properly handling tool_calls
in API responses. When providers like MiniMax return tool_calls in
OpenAI's native format, the provider was only extracting the content
field and discarding the tool_calls.

Changes:
- Update ResponseMessage struct to include optional tool_calls field
- Add ToolCall and Function structs for deserializing tool_calls
- Serialize full message as JSON when tool_calls are present
- Fall back to plain content when no tool_calls

This allows the parse_tool_calls function in the agent loop to
properly handle OpenAI-style tool_calls format.

All 1080 tests pass.

Related to #226

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
argenis de la rosa 2026-02-15 20:50:40 -05:00
parent 82ffb36f90
commit 7456692e9c

View file

@ -90,9 +90,25 @@ struct Choice {
message: ResponseMessage, message: ResponseMessage,
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize, Serialize)]
struct ResponseMessage { struct ResponseMessage {
content: String, #[serde(default)]
content: Option<String>,
#[serde(default)]
tool_calls: Option<Vec<ToolCall>>,
}
#[derive(Debug, Deserialize, Serialize)]
struct ToolCall {
#[serde(rename = "type")]
kind: Option<String>,
function: Option<Function>,
}
#[derive(Debug, Deserialize, Serialize)]
struct Function {
name: Option<String>,
arguments: Option<String>,
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
@ -287,7 +303,17 @@ impl Provider for OpenAiCompatibleProvider {
.choices .choices
.into_iter() .into_iter()
.next() .next()
.map(|c| c.message.content) .map(|c| {
// If tool_calls are present, serialize the full message as JSON
// so parse_tool_calls can handle the OpenAI-style format
if c.message.tool_calls.is_some() && c.message.tool_calls.as_ref().map_or(false, |t| !t.is_empty()) {
serde_json::to_string(&c.message)
.unwrap_or_else(|_| c.message.content.unwrap_or_default())
} else {
// No tool calls, return content as-is
c.message.content.unwrap_or_default()
}
})
.ok_or_else(|| anyhow::anyhow!("No response from {}", self.name)) .ok_or_else(|| anyhow::anyhow!("No response from {}", self.name))
} }
@ -359,7 +385,17 @@ impl Provider for OpenAiCompatibleProvider {
.choices .choices
.into_iter() .into_iter()
.next() .next()
.map(|c| c.message.content) .map(|c| {
// If tool_calls are present, serialize the full message as JSON
// so parse_tool_calls can handle the OpenAI-style format
if c.message.tool_calls.is_some() && c.message.tool_calls.as_ref().map_or(false, |t| !t.is_empty()) {
serde_json::to_string(&c.message)
.unwrap_or_else(|_| c.message.content.unwrap_or_default())
} else {
// No tool calls, return content as-is
c.message.content.unwrap_or_default()
}
})
.ok_or_else(|| anyhow::anyhow!("No response from {}", self.name)) .ok_or_else(|| anyhow::anyhow!("No response from {}", self.name))
} }
} }
@ -431,7 +467,7 @@ mod tests {
fn response_deserializes() { fn response_deserializes() {
let json = r#"{"choices":[{"message":{"content":"Hello from Venice!"}}]}"#; let json = r#"{"choices":[{"message":{"content":"Hello from Venice!"}}]}"#;
let resp: ApiChatResponse = serde_json::from_str(json).unwrap(); let resp: ApiChatResponse = serde_json::from_str(json).unwrap();
assert_eq!(resp.choices[0].message.content, "Hello from Venice!"); assert_eq!(resp.choices[0].message.content, Some("Hello from Venice!".to_string()));
} }
#[test] #[test]