zeroclaw/src/providers/openai.rs
Edvard 1336c2f03e feat(providers): add warmup() for OpenAI, Anthropic, Gemini, Compatible, GLM
All five providers have HTTP clients but did not implement warmup(),
relying on the trait default no-op. This adds lightweight warmup calls
to establish TLS + HTTP/2 connection pools on startup, reducing
first-request latency. Each warmup is skipped when credentials are
absent, matching the OpenRouter pattern.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-18 14:35:03 +08:00

459 lines
14 KiB
Rust

use crate::providers::traits::{
ChatMessage, ChatRequest as ProviderChatRequest, ChatResponse as ProviderChatResponse,
Provider, ToolCall as ProviderToolCall,
};
use crate::tools::ToolSpec;
use async_trait::async_trait;
use reqwest::Client;
use serde::{Deserialize, Serialize};
pub struct OpenAiProvider {
credential: Option<String>,
client: Client,
}
#[derive(Debug, Serialize)]
struct ChatRequest {
model: String,
messages: Vec<Message>,
temperature: f64,
}
#[derive(Debug, Serialize)]
struct Message {
role: String,
content: String,
}
#[derive(Debug, Deserialize)]
struct ChatResponse {
choices: Vec<Choice>,
}
#[derive(Debug, Deserialize)]
struct Choice {
message: ResponseMessage,
}
#[derive(Debug, Deserialize)]
struct ResponseMessage {
content: String,
}
#[derive(Debug, Serialize)]
struct NativeChatRequest {
model: String,
messages: Vec<NativeMessage>,
temperature: f64,
#[serde(skip_serializing_if = "Option::is_none")]
tools: Option<Vec<NativeToolSpec>>,
#[serde(skip_serializing_if = "Option::is_none")]
tool_choice: Option<String>,
}
#[derive(Debug, Serialize)]
struct NativeMessage {
role: String,
#[serde(skip_serializing_if = "Option::is_none")]
content: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
tool_call_id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
tool_calls: Option<Vec<NativeToolCall>>,
}
#[derive(Debug, Serialize)]
struct NativeToolSpec {
#[serde(rename = "type")]
kind: String,
function: NativeToolFunctionSpec,
}
#[derive(Debug, Serialize)]
struct NativeToolFunctionSpec {
name: String,
description: String,
parameters: serde_json::Value,
}
#[derive(Debug, Serialize, Deserialize)]
struct NativeToolCall {
#[serde(skip_serializing_if = "Option::is_none")]
id: Option<String>,
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
kind: Option<String>,
function: NativeFunctionCall,
}
#[derive(Debug, Serialize, Deserialize)]
struct NativeFunctionCall {
name: String,
arguments: String,
}
#[derive(Debug, Deserialize)]
struct NativeChatResponse {
choices: Vec<NativeChoice>,
}
#[derive(Debug, Deserialize)]
struct NativeChoice {
message: NativeResponseMessage,
}
#[derive(Debug, Deserialize)]
struct NativeResponseMessage {
#[serde(default)]
content: Option<String>,
#[serde(default)]
tool_calls: Option<Vec<NativeToolCall>>,
}
impl OpenAiProvider {
pub fn new(credential: Option<&str>) -> Self {
Self {
credential: credential.map(ToString::to_string),
client: Client::builder()
.timeout(std::time::Duration::from_secs(120))
.connect_timeout(std::time::Duration::from_secs(10))
.build()
.unwrap_or_else(|_| Client::new()),
}
}
fn convert_tools(tools: Option<&[ToolSpec]>) -> Option<Vec<NativeToolSpec>> {
tools.map(|items| {
items
.iter()
.map(|tool| NativeToolSpec {
kind: "function".to_string(),
function: NativeToolFunctionSpec {
name: tool.name.clone(),
description: tool.description.clone(),
parameters: tool.parameters.clone(),
},
})
.collect()
})
}
fn convert_messages(messages: &[ChatMessage]) -> Vec<NativeMessage> {
messages
.iter()
.map(|m| {
if m.role == "assistant" {
if let Ok(value) = serde_json::from_str::<serde_json::Value>(&m.content) {
if let Some(tool_calls_value) = value.get("tool_calls") {
if let Ok(parsed_calls) =
serde_json::from_value::<Vec<ProviderToolCall>>(
tool_calls_value.clone(),
)
{
let tool_calls = parsed_calls
.into_iter()
.map(|tc| NativeToolCall {
id: Some(tc.id),
kind: Some("function".to_string()),
function: NativeFunctionCall {
name: tc.name,
arguments: tc.arguments,
},
})
.collect::<Vec<_>>();
let content = value
.get("content")
.and_then(serde_json::Value::as_str)
.map(ToString::to_string);
return NativeMessage {
role: "assistant".to_string(),
content,
tool_call_id: None,
tool_calls: Some(tool_calls),
};
}
}
}
}
if m.role == "tool" {
if let Ok(value) = serde_json::from_str::<serde_json::Value>(&m.content) {
let tool_call_id = value
.get("tool_call_id")
.and_then(serde_json::Value::as_str)
.map(ToString::to_string);
let content = value
.get("content")
.and_then(serde_json::Value::as_str)
.map(ToString::to_string);
return NativeMessage {
role: "tool".to_string(),
content,
tool_call_id,
tool_calls: None,
};
}
}
NativeMessage {
role: m.role.clone(),
content: Some(m.content.clone()),
tool_call_id: None,
tool_calls: None,
}
})
.collect()
}
fn parse_native_response(message: NativeResponseMessage) -> ProviderChatResponse {
let tool_calls = message
.tool_calls
.unwrap_or_default()
.into_iter()
.map(|tc| ProviderToolCall {
id: tc.id.unwrap_or_else(|| uuid::Uuid::new_v4().to_string()),
name: tc.function.name,
arguments: tc.function.arguments,
})
.collect::<Vec<_>>();
ProviderChatResponse {
text: message.content,
tool_calls,
}
}
}
#[async_trait]
impl Provider for OpenAiProvider {
async fn chat_with_system(
&self,
system_prompt: Option<&str>,
message: &str,
model: &str,
temperature: f64,
) -> anyhow::Result<String> {
let credential = self.credential.as_ref().ok_or_else(|| {
anyhow::anyhow!("OpenAI API key not set. Set OPENAI_API_KEY or edit config.toml.")
})?;
let mut messages = Vec::new();
if let Some(sys) = system_prompt {
messages.push(Message {
role: "system".to_string(),
content: sys.to_string(),
});
}
messages.push(Message {
role: "user".to_string(),
content: message.to_string(),
});
let request = ChatRequest {
model: model.to_string(),
messages,
temperature,
};
let response = self
.client
.post("https://api.openai.com/v1/chat/completions")
.header("Authorization", format!("Bearer {credential}"))
.json(&request)
.send()
.await?;
if !response.status().is_success() {
return Err(super::api_error("OpenAI", response).await);
}
let chat_response: ChatResponse = response.json().await?;
chat_response
.choices
.into_iter()
.next()
.map(|c| c.message.content)
.ok_or_else(|| anyhow::anyhow!("No response from OpenAI"))
}
async fn chat(
&self,
request: ProviderChatRequest<'_>,
model: &str,
temperature: f64,
) -> anyhow::Result<ProviderChatResponse> {
let credential = self.credential.as_ref().ok_or_else(|| {
anyhow::anyhow!("OpenAI API key not set. Set OPENAI_API_KEY or edit config.toml.")
})?;
let tools = Self::convert_tools(request.tools);
let native_request = NativeChatRequest {
model: model.to_string(),
messages: Self::convert_messages(request.messages),
temperature,
tool_choice: tools.as_ref().map(|_| "auto".to_string()),
tools,
};
let response = self
.client
.post("https://api.openai.com/v1/chat/completions")
.header("Authorization", format!("Bearer {credential}"))
.json(&native_request)
.send()
.await?;
if !response.status().is_success() {
return Err(super::api_error("OpenAI", response).await);
}
let native_response: NativeChatResponse = response.json().await?;
let message = native_response
.choices
.into_iter()
.next()
.map(|c| c.message)
.ok_or_else(|| anyhow::anyhow!("No response from OpenAI"))?;
Ok(Self::parse_native_response(message))
}
fn supports_native_tools(&self) -> bool {
true
}
async fn warmup(&self) -> anyhow::Result<()> {
if let Some(credential) = self.credential.as_ref() {
self.client
.get("https://api.openai.com/v1/models")
.header("Authorization", format!("Bearer {credential}"))
.send()
.await?
.error_for_status()?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn creates_with_key() {
let p = OpenAiProvider::new(Some("openai-test-credential"));
assert_eq!(p.credential.as_deref(), Some("openai-test-credential"));
}
#[test]
fn creates_without_key() {
let p = OpenAiProvider::new(None);
assert!(p.credential.is_none());
}
#[test]
fn creates_with_empty_key() {
let p = OpenAiProvider::new(Some(""));
assert_eq!(p.credential.as_deref(), Some(""));
}
#[tokio::test]
async fn chat_fails_without_key() {
let p = OpenAiProvider::new(None);
let result = p.chat_with_system(None, "hello", "gpt-4o", 0.7).await;
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("API key not set"));
}
#[tokio::test]
async fn chat_with_system_fails_without_key() {
let p = OpenAiProvider::new(None);
let result = p
.chat_with_system(Some("You are ZeroClaw"), "test", "gpt-4o", 0.5)
.await;
assert!(result.is_err());
}
#[test]
fn request_serializes_with_system_message() {
let req = ChatRequest {
model: "gpt-4o".to_string(),
messages: vec![
Message {
role: "system".to_string(),
content: "You are ZeroClaw".to_string(),
},
Message {
role: "user".to_string(),
content: "hello".to_string(),
},
],
temperature: 0.7,
};
let json = serde_json::to_string(&req).unwrap();
assert!(json.contains("\"role\":\"system\""));
assert!(json.contains("\"role\":\"user\""));
assert!(json.contains("gpt-4o"));
}
#[test]
fn request_serializes_without_system() {
let req = ChatRequest {
model: "gpt-4o".to_string(),
messages: vec![Message {
role: "user".to_string(),
content: "hello".to_string(),
}],
temperature: 0.0,
};
let json = serde_json::to_string(&req).unwrap();
assert!(!json.contains("system"));
assert!(json.contains("\"temperature\":0.0"));
}
#[test]
fn response_deserializes_single_choice() {
let json = r#"{"choices":[{"message":{"content":"Hi!"}}]}"#;
let resp: ChatResponse = serde_json::from_str(json).unwrap();
assert_eq!(resp.choices.len(), 1);
assert_eq!(resp.choices[0].message.content, "Hi!");
}
#[test]
fn response_deserializes_empty_choices() {
let json = r#"{"choices":[]}"#;
let resp: ChatResponse = serde_json::from_str(json).unwrap();
assert!(resp.choices.is_empty());
}
#[test]
fn response_deserializes_multiple_choices() {
let json = r#"{"choices":[{"message":{"content":"A"}},{"message":{"content":"B"}}]}"#;
let resp: ChatResponse = serde_json::from_str(json).unwrap();
assert_eq!(resp.choices.len(), 2);
assert_eq!(resp.choices[0].message.content, "A");
}
#[test]
fn response_with_unicode() {
let json = r#"{"choices":[{"message":{"content":"こんにちは 🦀"}}]}"#;
let resp: ChatResponse = serde_json::from_str(json).unwrap();
assert_eq!(resp.choices[0].message.content, "こんにちは 🦀");
}
#[test]
fn response_with_long_content() {
let long = "x".repeat(100_000);
let json = format!(r#"{{"choices":[{{"message":{{"content":"{long}"}}}}]}}"#);
let resp: ChatResponse = serde_json::from_str(&json).unwrap();
assert_eq!(resp.choices[0].message.content.len(), 100_000);
}
#[tokio::test]
async fn warmup_without_key_is_noop() {
let provider = OpenAiProvider::new(None);
let result = provider.warmup().await;
assert!(result.is_ok());
}
}