feat(providers): add native tool calling for OpenAI-compatible providers

Implement chat_with_tools() on CompatibleProvider so OpenAI-compatible
endpoints (OpenRouter, local LLMs, etc.) can use structured tool calling
instead of prompt-injected tool descriptions.

Changes:
- CompatibleProvider: capabilities() reports native_tool_calling, new
  chat_with_tools() sends tools in API request and parses tool_calls
  from response, chat() bridges to chat_with_tools() when ToolSpecs
  are provided
- RouterProvider: chat_with_tools() delegation with model hint resolution
- loop_.rs: expose tools_to_openai_format as pub(crate), add
  tools_to_openai_format_from_specs for ToolSpec-based conversion

Adds 9 new tests and updates 1 existing test.
This commit is contained in:
Vernon Stinebaker 2026-02-18 17:15:02 +08:00 committed by Chummy
parent 6acec94666
commit 3b0133596c
3 changed files with 388 additions and 7 deletions

View file

@ -644,7 +644,8 @@ fn parse_tool_calls(response: &str) -> (String, Vec<ParsedToolCall>) {
remaining = &after_open[close_idx + close_tag.len()..];
} else {
if let Some(json_end) = find_json_end(after_open) {
if let Ok(value) = serde_json::from_str::<serde_json::Value>(&after_open[..json_end])
if let Ok(value) =
serde_json::from_str::<serde_json::Value>(&after_open[..json_end])
{
let parsed_calls = parse_tool_calls_from_json_value(&value);
if !parsed_calls.is_empty() {