fix(providers): correct GLM API base URL to /api/paas/v4

* fix: add OpenAI-style tool_calls support for MiniMax and other providers

MiniMax and some other providers return tool calls in OpenAI's native
JSON format instead of ZeroClaw's XML-style <invoke> tag format.

This fix adds support for parsing OpenAI-style tool_calls:
- {"tool_calls": [{"type": "function", "function": {"name": "...", "arguments": "{...}"}}]}

The parser now:
1. First tries to parse as OpenAI-style JSON with tool_calls array
2. Falls back to ZeroClaw's original <invoke> tag format
3. Correctly handles the nested JSON string in the arguments field

Added 3 new tests covering:
- Single tool call in OpenAI format
- Multiple tool calls in OpenAI format
- Tool calls without content field

Fixes #226

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* fix(providers): correct GLM API base URL to /api/paas/v4

The GLM (Zhipu) provider was using the incorrect base URL
`https://open.bigmodel.cn/api/paas` which resulted in 404 errors
when making API calls. The correct endpoint is
`https://open.bigmodel.cn/api/paas/v4`.

This fixes issue #238 where the agent would appear unresponsive
when using GLM-5 as the default model.

The fix aligns with the existing test `chat_completions_url_glm`
which already expected the correct v4 endpoint.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

---------

Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Argenis 2026-02-15 20:21:19 -05:00 committed by GitHub
parent 8eb57836d8
commit 3014926687
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 76 additions and 1 deletions

View file

@ -77,6 +77,45 @@ fn parse_tool_calls(response: &str) -> (String, Vec<ParsedToolCall>) {
let mut calls = Vec::new();
let mut remaining = response;
// First, try to parse as OpenAI-style JSON response with tool_calls array
// This handles providers like Minimax that return tool_calls in native JSON format
if let Ok(json_value) = serde_json::from_str::<serde_json::Value>(response.trim()) {
if let Some(tool_calls) = json_value.get("tool_calls").and_then(|v| v.as_array()) {
for tc in tool_calls {
if let Some(function) = tc.get("function") {
let name = function
.get("name")
.and_then(|v| v.as_str())
.unwrap_or("")
.to_string();
// Arguments in OpenAI format are a JSON string that needs parsing
let arguments = if let Some(args_str) = function.get("arguments").and_then(|v| v.as_str()) {
serde_json::from_str::<serde_json::Value>(args_str)
.unwrap_or(serde_json::Value::Object(serde_json::Map::new()))
} else {
serde_json::Value::Object(serde_json::Map::new())
};
if !name.is_empty() {
calls.push(ParsedToolCall { name, arguments });
}
}
}
// If we found tool_calls, extract any content field as text
if !calls.is_empty() {
if let Some(content) = json_value.get("content").and_then(|v| v.as_str()) {
if !content.trim().is_empty() {
text_parts.push(content.trim().to_string());
}
}
return (text_parts.join("\n"), calls);
}
}
}
// Fall back to XML-style <invoke> tag parsing (ZeroClaw's original format)
while let Some(start) = remaining.find("<tool_call>") {
// Everything before the tag is text
let before = &remaining[..start];
@ -538,6 +577,42 @@ After text."#;
assert_eq!(calls.len(), 1);
}
#[test]
fn parse_tool_calls_handles_openai_format() {
// OpenAI-style response with tool_calls array
let response = r#"{"content": "Let me check that for you.", "tool_calls": [{"type": "function", "function": {"name": "shell", "arguments": "{\"command\": \"ls -la\"}"}}]}"#;
let (text, calls) = parse_tool_calls(response);
assert_eq!(text, "Let me check that for you.");
assert_eq!(calls.len(), 1);
assert_eq!(calls[0].name, "shell");
assert_eq!(
calls[0].arguments.get("command").unwrap().as_str().unwrap(),
"ls -la"
);
}
#[test]
fn parse_tool_calls_handles_openai_format_multiple_calls() {
let response = r#"{"tool_calls": [{"type": "function", "function": {"name": "file_read", "arguments": "{\"path\": \"a.txt\"}"}}, {"type": "function", "function": {"name": "file_read", "arguments": "{\"path\": \"b.txt\"}"}}]}"#;
let (text, calls) = parse_tool_calls(response);
assert_eq!(calls.len(), 2);
assert_eq!(calls[0].name, "file_read");
assert_eq!(calls[1].name, "file_read");
}
#[test]
fn parse_tool_calls_openai_format_without_content() {
// Some providers don't include content field with tool_calls
let response = r#"{"tool_calls": [{"type": "function", "function": {"name": "memory_recall", "arguments": "{}"}}]}"#;
let (text, calls) = parse_tool_calls(response);
assert!(text.is_empty()); // No content field
assert_eq!(calls.len(), 1);
assert_eq!(calls[0].name, "memory_recall");
}
#[test]
fn build_tool_instructions_includes_all_tools() {
use crate::security::SecurityPolicy;

View file

@ -194,7 +194,7 @@ pub fn create_provider(name: &str, api_key: Option<&str>) -> anyhow::Result<Box<
"Z.AI", "https://api.z.ai/api/coding/paas/v4", key, AuthStyle::Bearer,
))),
"glm" | "zhipu" => Ok(Box::new(OpenAiCompatibleProvider::new(
"GLM", "https://open.bigmodel.cn/api/paas", key, AuthStyle::Bearer,
"GLM", "https://open.bigmodel.cn/api/paas/v4", key, AuthStyle::Bearer,
))),
"minimax" => Ok(Box::new(OpenAiCompatibleProvider::new(
"MiniMax", "https://api.minimax.chat", key, AuthStyle::Bearer,