feat: initial release — ZeroClaw v0.1.0
- 22 AI providers (OpenRouter, Anthropic, OpenAI, Mistral, etc.) - 7 channels (CLI, Telegram, Discord, Slack, iMessage, Matrix, Webhook) - 5-step onboarding wizard with Project Context personalization - OpenClaw-aligned system prompt (SOUL.md, IDENTITY.md, USER.md, AGENTS.md, etc.) - SQLite memory backend with auto-save - Skills system with on-demand loading - Security: autonomy levels, command allowlists, cost limits - 532 tests passing, 0 clippy warnings
This commit is contained in:
commit
05cb353f7f
71 changed files with 15757 additions and 0 deletions
212
src/providers/anthropic.rs
Normal file
212
src/providers/anthropic.rs
Normal file
|
|
@ -0,0 +1,212 @@
|
|||
use crate::providers::traits::Provider;
|
||||
use async_trait::async_trait;
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub struct AnthropicProvider {
|
||||
api_key: Option<String>,
|
||||
client: Client,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct ChatRequest {
|
||||
model: String,
|
||||
max_tokens: u32,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
system: Option<String>,
|
||||
messages: Vec<Message>,
|
||||
temperature: f64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct Message {
|
||||
role: String,
|
||||
content: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ChatResponse {
|
||||
content: Vec<ContentBlock>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ContentBlock {
|
||||
text: String,
|
||||
}
|
||||
|
||||
impl AnthropicProvider {
|
||||
pub fn new(api_key: Option<&str>) -> Self {
|
||||
Self {
|
||||
api_key: api_key.map(ToString::to_string),
|
||||
client: Client::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Provider for AnthropicProvider {
|
||||
async fn chat_with_system(
|
||||
&self,
|
||||
system_prompt: Option<&str>,
|
||||
message: &str,
|
||||
model: &str,
|
||||
temperature: f64,
|
||||
) -> anyhow::Result<String> {
|
||||
let api_key = self.api_key.as_ref().ok_or_else(|| {
|
||||
anyhow::anyhow!(
|
||||
"Anthropic API key not set. Set ANTHROPIC_API_KEY or edit config.toml."
|
||||
)
|
||||
})?;
|
||||
|
||||
let request = ChatRequest {
|
||||
model: model.to_string(),
|
||||
max_tokens: 4096,
|
||||
system: system_prompt.map(ToString::to_string),
|
||||
messages: vec![Message {
|
||||
role: "user".to_string(),
|
||||
content: message.to_string(),
|
||||
}],
|
||||
temperature,
|
||||
};
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.post("https://api.anthropic.com/v1/messages")
|
||||
.header("x-api-key", api_key)
|
||||
.header("anthropic-version", "2023-06-01")
|
||||
.header("content-type", "application/json")
|
||||
.json(&request)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let error = response.text().await?;
|
||||
anyhow::bail!("Anthropic API error: {error}");
|
||||
}
|
||||
|
||||
let chat_response: ChatResponse = response.json().await?;
|
||||
|
||||
chat_response
|
||||
.content
|
||||
.into_iter()
|
||||
.next()
|
||||
.map(|c| c.text)
|
||||
.ok_or_else(|| anyhow::anyhow!("No response from Anthropic"))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn creates_with_key() {
|
||||
let p = AnthropicProvider::new(Some("sk-ant-test123"));
|
||||
assert!(p.api_key.is_some());
|
||||
assert_eq!(p.api_key.as_deref(), Some("sk-ant-test123"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn creates_without_key() {
|
||||
let p = AnthropicProvider::new(None);
|
||||
assert!(p.api_key.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn creates_with_empty_key() {
|
||||
let p = AnthropicProvider::new(Some(""));
|
||||
assert!(p.api_key.is_some());
|
||||
assert_eq!(p.api_key.as_deref(), Some(""));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn chat_fails_without_key() {
|
||||
let p = AnthropicProvider::new(None);
|
||||
let result = p.chat_with_system(None, "hello", "claude-3-opus", 0.7).await;
|
||||
assert!(result.is_err());
|
||||
let err = result.unwrap_err().to_string();
|
||||
assert!(err.contains("API key not set"), "Expected key error, got: {err}");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn chat_with_system_fails_without_key() {
|
||||
let p = AnthropicProvider::new(None);
|
||||
let result = p
|
||||
.chat_with_system(Some("You are ZeroClaw"), "hello", "claude-3-opus", 0.7)
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn chat_request_serializes_without_system() {
|
||||
let req = ChatRequest {
|
||||
model: "claude-3-opus".to_string(),
|
||||
max_tokens: 4096,
|
||||
system: None,
|
||||
messages: vec![Message {
|
||||
role: "user".to_string(),
|
||||
content: "hello".to_string(),
|
||||
}],
|
||||
temperature: 0.7,
|
||||
};
|
||||
let json = serde_json::to_string(&req).unwrap();
|
||||
assert!(!json.contains("system"), "system field should be skipped when None");
|
||||
assert!(json.contains("claude-3-opus"));
|
||||
assert!(json.contains("hello"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn chat_request_serializes_with_system() {
|
||||
let req = ChatRequest {
|
||||
model: "claude-3-opus".to_string(),
|
||||
max_tokens: 4096,
|
||||
system: Some("You are ZeroClaw".to_string()),
|
||||
messages: vec![Message {
|
||||
role: "user".to_string(),
|
||||
content: "hello".to_string(),
|
||||
}],
|
||||
temperature: 0.7,
|
||||
};
|
||||
let json = serde_json::to_string(&req).unwrap();
|
||||
assert!(json.contains("\"system\":\"You are ZeroClaw\""));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn chat_response_deserializes() {
|
||||
let json = r#"{"content":[{"type":"text","text":"Hello there!"}]}"#;
|
||||
let resp: ChatResponse = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(resp.content.len(), 1);
|
||||
assert_eq!(resp.content[0].text, "Hello there!");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn chat_response_empty_content() {
|
||||
let json = r#"{"content":[]}"#;
|
||||
let resp: ChatResponse = serde_json::from_str(json).unwrap();
|
||||
assert!(resp.content.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn chat_response_multiple_blocks() {
|
||||
let json = r#"{"content":[{"type":"text","text":"First"},{"type":"text","text":"Second"}]}"#;
|
||||
let resp: ChatResponse = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(resp.content.len(), 2);
|
||||
assert_eq!(resp.content[0].text, "First");
|
||||
assert_eq!(resp.content[1].text, "Second");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn temperature_range_serializes() {
|
||||
for temp in [0.0, 0.5, 1.0, 2.0] {
|
||||
let req = ChatRequest {
|
||||
model: "claude-3-opus".to_string(),
|
||||
max_tokens: 4096,
|
||||
system: None,
|
||||
messages: vec![],
|
||||
temperature: temp,
|
||||
};
|
||||
let json = serde_json::to_string(&req).unwrap();
|
||||
assert!(json.contains(&format!("{temp}")));
|
||||
}
|
||||
}
|
||||
}
|
||||
245
src/providers/compatible.rs
Normal file
245
src/providers/compatible.rs
Normal file
|
|
@ -0,0 +1,245 @@
|
|||
//! Generic OpenAI-compatible provider.
|
||||
//! Most LLM APIs follow the same `/v1/chat/completions` format.
|
||||
//! This module provides a single implementation that works for all of them.
|
||||
|
||||
use crate::providers::traits::Provider;
|
||||
use async_trait::async_trait;
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// A provider that speaks the OpenAI-compatible chat completions API.
|
||||
/// Used by: Venice, Vercel AI Gateway, Cloudflare AI Gateway, Moonshot,
|
||||
/// Synthetic, `OpenCode` Zen, `Z.AI`, `GLM`, `MiniMax`, Bedrock, Qianfan, Groq, Mistral, `xAI`, etc.
|
||||
pub struct OpenAiCompatibleProvider {
|
||||
pub(crate) name: String,
|
||||
pub(crate) base_url: String,
|
||||
pub(crate) api_key: Option<String>,
|
||||
pub(crate) auth_header: AuthStyle,
|
||||
client: Client,
|
||||
}
|
||||
|
||||
/// How the provider expects the API key to be sent.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum AuthStyle {
|
||||
/// `Authorization: Bearer <key>`
|
||||
Bearer,
|
||||
/// `x-api-key: <key>` (used by some Chinese providers)
|
||||
XApiKey,
|
||||
/// Custom header name
|
||||
Custom(String),
|
||||
}
|
||||
|
||||
impl OpenAiCompatibleProvider {
|
||||
pub fn new(name: &str, base_url: &str, api_key: Option<&str>, auth_style: AuthStyle) -> Self {
|
||||
Self {
|
||||
name: name.to_string(),
|
||||
base_url: base_url.trim_end_matches('/').to_string(),
|
||||
api_key: api_key.map(ToString::to_string),
|
||||
auth_header: auth_style,
|
||||
client: Client::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct ChatRequest {
|
||||
model: String,
|
||||
messages: Vec<Message>,
|
||||
temperature: f64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct Message {
|
||||
role: String,
|
||||
content: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ChatResponse {
|
||||
choices: Vec<Choice>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Choice {
|
||||
message: ResponseMessage,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ResponseMessage {
|
||||
content: String,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Provider for OpenAiCompatibleProvider {
|
||||
async fn chat_with_system(
|
||||
&self,
|
||||
system_prompt: Option<&str>,
|
||||
message: &str,
|
||||
model: &str,
|
||||
temperature: f64,
|
||||
) -> anyhow::Result<String> {
|
||||
let api_key = self.api_key.as_ref().ok_or_else(|| {
|
||||
anyhow::anyhow!(
|
||||
"{} API key not set. Run `zeroclaw onboard` or set the appropriate env var.",
|
||||
self.name
|
||||
)
|
||||
})?;
|
||||
|
||||
let mut messages = Vec::new();
|
||||
|
||||
if let Some(sys) = system_prompt {
|
||||
messages.push(Message {
|
||||
role: "system".to_string(),
|
||||
content: sys.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
messages.push(Message {
|
||||
role: "user".to_string(),
|
||||
content: message.to_string(),
|
||||
});
|
||||
|
||||
let request = ChatRequest {
|
||||
model: model.to_string(),
|
||||
messages,
|
||||
temperature,
|
||||
};
|
||||
|
||||
let url = format!("{}/v1/chat/completions", self.base_url);
|
||||
|
||||
let mut req = self.client.post(&url).json(&request);
|
||||
|
||||
match &self.auth_header {
|
||||
AuthStyle::Bearer => {
|
||||
req = req.header("Authorization", format!("Bearer {api_key}"));
|
||||
}
|
||||
AuthStyle::XApiKey => {
|
||||
req = req.header("x-api-key", api_key.as_str());
|
||||
}
|
||||
AuthStyle::Custom(header) => {
|
||||
req = req.header(header.as_str(), api_key.as_str());
|
||||
}
|
||||
}
|
||||
|
||||
let response = req.send().await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let error = response.text().await?;
|
||||
anyhow::bail!("{} API error: {error}", self.name);
|
||||
}
|
||||
|
||||
let chat_response: ChatResponse = response.json().await?;
|
||||
|
||||
chat_response
|
||||
.choices
|
||||
.into_iter()
|
||||
.next()
|
||||
.map(|c| c.message.content)
|
||||
.ok_or_else(|| anyhow::anyhow!("No response from {}", self.name))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn make_provider(name: &str, url: &str, key: Option<&str>) -> OpenAiCompatibleProvider {
|
||||
OpenAiCompatibleProvider::new(name, url, key, AuthStyle::Bearer)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn creates_with_key() {
|
||||
let p = make_provider("venice", "https://api.venice.ai", Some("vn-key"));
|
||||
assert_eq!(p.name, "venice");
|
||||
assert_eq!(p.base_url, "https://api.venice.ai");
|
||||
assert_eq!(p.api_key.as_deref(), Some("vn-key"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn creates_without_key() {
|
||||
let p = make_provider("test", "https://example.com", None);
|
||||
assert!(p.api_key.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strips_trailing_slash() {
|
||||
let p = make_provider("test", "https://example.com/", None);
|
||||
assert_eq!(p.base_url, "https://example.com");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn chat_fails_without_key() {
|
||||
let p = make_provider("Venice", "https://api.venice.ai", None);
|
||||
let result = p.chat_with_system(None, "hello", "llama-3.3-70b", 0.7).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().to_string().contains("Venice API key not set"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn request_serializes_correctly() {
|
||||
let req = ChatRequest {
|
||||
model: "llama-3.3-70b".to_string(),
|
||||
messages: vec![
|
||||
Message { role: "system".to_string(), content: "You are ZeroClaw".to_string() },
|
||||
Message { role: "user".to_string(), content: "hello".to_string() },
|
||||
],
|
||||
temperature: 0.7,
|
||||
};
|
||||
let json = serde_json::to_string(&req).unwrap();
|
||||
assert!(json.contains("llama-3.3-70b"));
|
||||
assert!(json.contains("system"));
|
||||
assert!(json.contains("user"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn response_deserializes() {
|
||||
let json = r#"{"choices":[{"message":{"content":"Hello from Venice!"}}]}"#;
|
||||
let resp: ChatResponse = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(resp.choices[0].message.content, "Hello from Venice!");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn response_empty_choices() {
|
||||
let json = r#"{"choices":[]}"#;
|
||||
let resp: ChatResponse = serde_json::from_str(json).unwrap();
|
||||
assert!(resp.choices.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn x_api_key_auth_style() {
|
||||
let p = OpenAiCompatibleProvider::new(
|
||||
"moonshot", "https://api.moonshot.cn", Some("ms-key"), AuthStyle::XApiKey,
|
||||
);
|
||||
assert!(matches!(p.auth_header, AuthStyle::XApiKey));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn custom_auth_style() {
|
||||
let p = OpenAiCompatibleProvider::new(
|
||||
"custom", "https://api.example.com", Some("key"), AuthStyle::Custom("X-Custom-Key".into()),
|
||||
);
|
||||
assert!(matches!(p.auth_header, AuthStyle::Custom(_)));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn all_compatible_providers_fail_without_key() {
|
||||
let providers = vec![
|
||||
make_provider("Venice", "https://api.venice.ai", None),
|
||||
make_provider("Moonshot", "https://api.moonshot.cn", None),
|
||||
make_provider("GLM", "https://open.bigmodel.cn", None),
|
||||
make_provider("MiniMax", "https://api.minimax.chat", None),
|
||||
make_provider("Groq", "https://api.groq.com/openai", None),
|
||||
make_provider("Mistral", "https://api.mistral.ai", None),
|
||||
make_provider("xAI", "https://api.x.ai", None),
|
||||
];
|
||||
|
||||
for p in providers {
|
||||
let result = p.chat_with_system(None, "test", "model", 0.7).await;
|
||||
assert!(result.is_err(), "{} should fail without key", p.name);
|
||||
assert!(
|
||||
result.unwrap_err().to_string().contains("API key not set"),
|
||||
"{} error should mention key", p.name
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
266
src/providers/mod.rs
Normal file
266
src/providers/mod.rs
Normal file
|
|
@ -0,0 +1,266 @@
|
|||
pub mod anthropic;
|
||||
pub mod compatible;
|
||||
pub mod ollama;
|
||||
pub mod openai;
|
||||
pub mod openrouter;
|
||||
pub mod traits;
|
||||
|
||||
pub use traits::Provider;
|
||||
|
||||
use compatible::{AuthStyle, OpenAiCompatibleProvider};
|
||||
|
||||
/// Factory: create the right provider from config
|
||||
#[allow(clippy::too_many_lines)]
|
||||
pub fn create_provider(name: &str, api_key: Option<&str>) -> anyhow::Result<Box<dyn Provider>> {
|
||||
match name {
|
||||
// ── Primary providers (custom implementations) ───────
|
||||
"openrouter" => Ok(Box::new(openrouter::OpenRouterProvider::new(api_key))),
|
||||
"anthropic" => Ok(Box::new(anthropic::AnthropicProvider::new(api_key))),
|
||||
"openai" => Ok(Box::new(openai::OpenAiProvider::new(api_key))),
|
||||
"ollama" => Ok(Box::new(ollama::OllamaProvider::new(
|
||||
api_key.filter(|k| !k.is_empty()),
|
||||
))),
|
||||
|
||||
// ── OpenAI-compatible providers ──────────────────────
|
||||
"venice" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"Venice", "https://api.venice.ai", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
"vercel" | "vercel-ai" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"Vercel AI Gateway", "https://api.vercel.ai", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
"cloudflare" | "cloudflare-ai" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"Cloudflare AI Gateway",
|
||||
"https://gateway.ai.cloudflare.com/v1",
|
||||
api_key,
|
||||
AuthStyle::Bearer,
|
||||
))),
|
||||
"moonshot" | "kimi" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"Moonshot", "https://api.moonshot.cn", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
"synthetic" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"Synthetic", "https://api.synthetic.com", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
"opencode" | "opencode-zen" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"OpenCode Zen", "https://api.opencode.ai", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
"zai" | "z.ai" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"Z.AI", "https://api.z.ai", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
"glm" | "zhipu" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"GLM", "https://open.bigmodel.cn/api/paas", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
"minimax" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"MiniMax", "https://api.minimax.chat", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
"bedrock" | "aws-bedrock" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"Amazon Bedrock",
|
||||
"https://bedrock-runtime.us-east-1.amazonaws.com",
|
||||
api_key,
|
||||
AuthStyle::Bearer,
|
||||
))),
|
||||
"qianfan" | "baidu" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"Qianfan", "https://aip.baidubce.com", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
|
||||
// ── Extended ecosystem (community favorites) ─────────
|
||||
"groq" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"Groq", "https://api.groq.com/openai", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
"mistral" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"Mistral", "https://api.mistral.ai", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
"xai" | "grok" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"xAI", "https://api.x.ai", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
"deepseek" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"DeepSeek", "https://api.deepseek.com", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
"together" | "together-ai" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"Together AI", "https://api.together.xyz", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
"fireworks" | "fireworks-ai" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"Fireworks AI", "https://api.fireworks.ai/inference", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
"perplexity" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"Perplexity", "https://api.perplexity.ai", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
"cohere" => Ok(Box::new(OpenAiCompatibleProvider::new(
|
||||
"Cohere", "https://api.cohere.com/compatibility", api_key, AuthStyle::Bearer,
|
||||
))),
|
||||
|
||||
_ => anyhow::bail!(
|
||||
"Unknown provider: {name}. Run `zeroclaw integrations list -c ai` to see all available providers."
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// ── Primary providers ────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn factory_openrouter() {
|
||||
assert!(create_provider("openrouter", Some("sk-test")).is_ok());
|
||||
assert!(create_provider("openrouter", None).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_anthropic() {
|
||||
assert!(create_provider("anthropic", Some("sk-test")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_openai() {
|
||||
assert!(create_provider("openai", Some("sk-test")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_ollama() {
|
||||
assert!(create_provider("ollama", None).is_ok());
|
||||
}
|
||||
|
||||
// ── OpenAI-compatible providers ──────────────────────────
|
||||
|
||||
#[test]
|
||||
fn factory_venice() {
|
||||
assert!(create_provider("venice", Some("vn-key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_vercel() {
|
||||
assert!(create_provider("vercel", Some("key")).is_ok());
|
||||
assert!(create_provider("vercel-ai", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_cloudflare() {
|
||||
assert!(create_provider("cloudflare", Some("key")).is_ok());
|
||||
assert!(create_provider("cloudflare-ai", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_moonshot() {
|
||||
assert!(create_provider("moonshot", Some("key")).is_ok());
|
||||
assert!(create_provider("kimi", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_synthetic() {
|
||||
assert!(create_provider("synthetic", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_opencode() {
|
||||
assert!(create_provider("opencode", Some("key")).is_ok());
|
||||
assert!(create_provider("opencode-zen", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_zai() {
|
||||
assert!(create_provider("zai", Some("key")).is_ok());
|
||||
assert!(create_provider("z.ai", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_glm() {
|
||||
assert!(create_provider("glm", Some("key")).is_ok());
|
||||
assert!(create_provider("zhipu", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_minimax() {
|
||||
assert!(create_provider("minimax", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_bedrock() {
|
||||
assert!(create_provider("bedrock", Some("key")).is_ok());
|
||||
assert!(create_provider("aws-bedrock", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_qianfan() {
|
||||
assert!(create_provider("qianfan", Some("key")).is_ok());
|
||||
assert!(create_provider("baidu", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
// ── Extended ecosystem ───────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn factory_groq() {
|
||||
assert!(create_provider("groq", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_mistral() {
|
||||
assert!(create_provider("mistral", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_xai() {
|
||||
assert!(create_provider("xai", Some("key")).is_ok());
|
||||
assert!(create_provider("grok", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_deepseek() {
|
||||
assert!(create_provider("deepseek", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_together() {
|
||||
assert!(create_provider("together", Some("key")).is_ok());
|
||||
assert!(create_provider("together-ai", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_fireworks() {
|
||||
assert!(create_provider("fireworks", Some("key")).is_ok());
|
||||
assert!(create_provider("fireworks-ai", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_perplexity() {
|
||||
assert!(create_provider("perplexity", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_cohere() {
|
||||
assert!(create_provider("cohere", Some("key")).is_ok());
|
||||
}
|
||||
|
||||
// ── Error cases ──────────────────────────────────────────
|
||||
|
||||
#[test]
|
||||
fn factory_unknown_provider_errors() {
|
||||
let p = create_provider("nonexistent", None);
|
||||
assert!(p.is_err());
|
||||
let msg = p.err().unwrap().to_string();
|
||||
assert!(msg.contains("Unknown provider"));
|
||||
assert!(msg.contains("nonexistent"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_empty_name_errors() {
|
||||
assert!(create_provider("", None).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn factory_all_providers_create_successfully() {
|
||||
let providers = [
|
||||
"openrouter", "anthropic", "openai", "ollama",
|
||||
"venice", "vercel", "cloudflare", "moonshot", "synthetic",
|
||||
"opencode", "zai", "glm", "minimax", "bedrock", "qianfan",
|
||||
"groq", "mistral", "xai", "deepseek", "together",
|
||||
"fireworks", "perplexity", "cohere",
|
||||
];
|
||||
for name in providers {
|
||||
assert!(
|
||||
create_provider(name, Some("test-key")).is_ok(),
|
||||
"Provider '{name}' should create successfully"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
177
src/providers/ollama.rs
Normal file
177
src/providers/ollama.rs
Normal file
|
|
@ -0,0 +1,177 @@
|
|||
use crate::providers::traits::Provider;
|
||||
use async_trait::async_trait;
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub struct OllamaProvider {
|
||||
base_url: String,
|
||||
client: Client,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct ChatRequest {
|
||||
model: String,
|
||||
messages: Vec<Message>,
|
||||
stream: bool,
|
||||
options: Options,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct Message {
|
||||
role: String,
|
||||
content: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct Options {
|
||||
temperature: f64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ChatResponse {
|
||||
message: ResponseMessage,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ResponseMessage {
|
||||
content: String,
|
||||
}
|
||||
|
||||
impl OllamaProvider {
|
||||
pub fn new(base_url: Option<&str>) -> Self {
|
||||
Self {
|
||||
base_url: base_url
|
||||
.unwrap_or("http://localhost:11434")
|
||||
.trim_end_matches('/')
|
||||
.to_string(),
|
||||
client: Client::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Provider for OllamaProvider {
|
||||
async fn chat_with_system(
|
||||
&self,
|
||||
system_prompt: Option<&str>,
|
||||
message: &str,
|
||||
model: &str,
|
||||
temperature: f64,
|
||||
) -> anyhow::Result<String> {
|
||||
let mut messages = Vec::new();
|
||||
|
||||
if let Some(sys) = system_prompt {
|
||||
messages.push(Message {
|
||||
role: "system".to_string(),
|
||||
content: sys.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
messages.push(Message {
|
||||
role: "user".to_string(),
|
||||
content: message.to_string(),
|
||||
});
|
||||
|
||||
let request = ChatRequest {
|
||||
model: model.to_string(),
|
||||
messages,
|
||||
stream: false,
|
||||
options: Options { temperature },
|
||||
};
|
||||
|
||||
let url = format!("{}/api/chat", self.base_url);
|
||||
|
||||
let response = self.client.post(&url).json(&request).send().await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let error = response.text().await?;
|
||||
anyhow::bail!("Ollama error: {error}. Is Ollama running? (brew install ollama && ollama serve)");
|
||||
}
|
||||
|
||||
let chat_response: ChatResponse = response.json().await?;
|
||||
Ok(chat_response.message.content)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn default_url() {
|
||||
let p = OllamaProvider::new(None);
|
||||
assert_eq!(p.base_url, "http://localhost:11434");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn custom_url_trailing_slash() {
|
||||
let p = OllamaProvider::new(Some("http://192.168.1.100:11434/"));
|
||||
assert_eq!(p.base_url, "http://192.168.1.100:11434");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn custom_url_no_trailing_slash() {
|
||||
let p = OllamaProvider::new(Some("http://myserver:11434"));
|
||||
assert_eq!(p.base_url, "http://myserver:11434");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn empty_url_uses_empty() {
|
||||
let p = OllamaProvider::new(Some(""));
|
||||
assert_eq!(p.base_url, "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn request_serializes_with_system() {
|
||||
let req = ChatRequest {
|
||||
model: "llama3".to_string(),
|
||||
messages: vec![
|
||||
Message { role: "system".to_string(), content: "You are ZeroClaw".to_string() },
|
||||
Message { role: "user".to_string(), content: "hello".to_string() },
|
||||
],
|
||||
stream: false,
|
||||
options: Options { temperature: 0.7 },
|
||||
};
|
||||
let json = serde_json::to_string(&req).unwrap();
|
||||
assert!(json.contains("\"stream\":false"));
|
||||
assert!(json.contains("llama3"));
|
||||
assert!(json.contains("system"));
|
||||
assert!(json.contains("\"temperature\":0.7"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn request_serializes_without_system() {
|
||||
let req = ChatRequest {
|
||||
model: "mistral".to_string(),
|
||||
messages: vec![
|
||||
Message { role: "user".to_string(), content: "test".to_string() },
|
||||
],
|
||||
stream: false,
|
||||
options: Options { temperature: 0.0 },
|
||||
};
|
||||
let json = serde_json::to_string(&req).unwrap();
|
||||
assert!(!json.contains("\"role\":\"system\""));
|
||||
assert!(json.contains("mistral"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn response_deserializes() {
|
||||
let json = r#"{"message":{"role":"assistant","content":"Hello from Ollama!"}}"#;
|
||||
let resp: ChatResponse = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(resp.message.content, "Hello from Ollama!");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn response_with_empty_content() {
|
||||
let json = r#"{"message":{"role":"assistant","content":""}}"#;
|
||||
let resp: ChatResponse = serde_json::from_str(json).unwrap();
|
||||
assert!(resp.message.content.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn response_with_multiline() {
|
||||
let json = r#"{"message":{"role":"assistant","content":"line1\nline2\nline3"}}"#;
|
||||
let resp: ChatResponse = serde_json::from_str(json).unwrap();
|
||||
assert!(resp.message.content.contains("line1"));
|
||||
}
|
||||
}
|
||||
211
src/providers/openai.rs
Normal file
211
src/providers/openai.rs
Normal file
|
|
@ -0,0 +1,211 @@
|
|||
use crate::providers::traits::Provider;
|
||||
use async_trait::async_trait;
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub struct OpenAiProvider {
|
||||
api_key: Option<String>,
|
||||
client: Client,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct ChatRequest {
|
||||
model: String,
|
||||
messages: Vec<Message>,
|
||||
temperature: f64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct Message {
|
||||
role: String,
|
||||
content: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ChatResponse {
|
||||
choices: Vec<Choice>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Choice {
|
||||
message: ResponseMessage,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ResponseMessage {
|
||||
content: String,
|
||||
}
|
||||
|
||||
impl OpenAiProvider {
|
||||
pub fn new(api_key: Option<&str>) -> Self {
|
||||
Self {
|
||||
api_key: api_key.map(ToString::to_string),
|
||||
client: Client::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Provider for OpenAiProvider {
|
||||
async fn chat_with_system(
|
||||
&self,
|
||||
system_prompt: Option<&str>,
|
||||
message: &str,
|
||||
model: &str,
|
||||
temperature: f64,
|
||||
) -> anyhow::Result<String> {
|
||||
let api_key = self.api_key.as_ref().ok_or_else(|| {
|
||||
anyhow::anyhow!("OpenAI API key not set. Set OPENAI_API_KEY or edit config.toml.")
|
||||
})?;
|
||||
|
||||
let mut messages = Vec::new();
|
||||
|
||||
if let Some(sys) = system_prompt {
|
||||
messages.push(Message {
|
||||
role: "system".to_string(),
|
||||
content: sys.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
messages.push(Message {
|
||||
role: "user".to_string(),
|
||||
content: message.to_string(),
|
||||
});
|
||||
|
||||
let request = ChatRequest {
|
||||
model: model.to_string(),
|
||||
messages,
|
||||
temperature,
|
||||
};
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.post("https://api.openai.com/v1/chat/completions")
|
||||
.header("Authorization", format!("Bearer {api_key}"))
|
||||
.json(&request)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let error = response.text().await?;
|
||||
anyhow::bail!("OpenAI API error: {error}");
|
||||
}
|
||||
|
||||
let chat_response: ChatResponse = response.json().await?;
|
||||
|
||||
chat_response
|
||||
.choices
|
||||
.into_iter()
|
||||
.next()
|
||||
.map(|c| c.message.content)
|
||||
.ok_or_else(|| anyhow::anyhow!("No response from OpenAI"))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn creates_with_key() {
|
||||
let p = OpenAiProvider::new(Some("sk-proj-abc123"));
|
||||
assert_eq!(p.api_key.as_deref(), Some("sk-proj-abc123"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn creates_without_key() {
|
||||
let p = OpenAiProvider::new(None);
|
||||
assert!(p.api_key.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn creates_with_empty_key() {
|
||||
let p = OpenAiProvider::new(Some(""));
|
||||
assert_eq!(p.api_key.as_deref(), Some(""));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn chat_fails_without_key() {
|
||||
let p = OpenAiProvider::new(None);
|
||||
let result = p.chat_with_system(None, "hello", "gpt-4o", 0.7).await;
|
||||
assert!(result.is_err());
|
||||
assert!(result.unwrap_err().to_string().contains("API key not set"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn chat_with_system_fails_without_key() {
|
||||
let p = OpenAiProvider::new(None);
|
||||
let result = p
|
||||
.chat_with_system(Some("You are ZeroClaw"), "test", "gpt-4o", 0.5)
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn request_serializes_with_system_message() {
|
||||
let req = ChatRequest {
|
||||
model: "gpt-4o".to_string(),
|
||||
messages: vec![
|
||||
Message { role: "system".to_string(), content: "You are ZeroClaw".to_string() },
|
||||
Message { role: "user".to_string(), content: "hello".to_string() },
|
||||
],
|
||||
temperature: 0.7,
|
||||
};
|
||||
let json = serde_json::to_string(&req).unwrap();
|
||||
assert!(json.contains("\"role\":\"system\""));
|
||||
assert!(json.contains("\"role\":\"user\""));
|
||||
assert!(json.contains("gpt-4o"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn request_serializes_without_system() {
|
||||
let req = ChatRequest {
|
||||
model: "gpt-4o".to_string(),
|
||||
messages: vec![
|
||||
Message { role: "user".to_string(), content: "hello".to_string() },
|
||||
],
|
||||
temperature: 0.0,
|
||||
};
|
||||
let json = serde_json::to_string(&req).unwrap();
|
||||
assert!(!json.contains("system"));
|
||||
assert!(json.contains("\"temperature\":0.0"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn response_deserializes_single_choice() {
|
||||
let json = r#"{"choices":[{"message":{"content":"Hi!"}}]}"#;
|
||||
let resp: ChatResponse = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(resp.choices.len(), 1);
|
||||
assert_eq!(resp.choices[0].message.content, "Hi!");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn response_deserializes_empty_choices() {
|
||||
let json = r#"{"choices":[]}"#;
|
||||
let resp: ChatResponse = serde_json::from_str(json).unwrap();
|
||||
assert!(resp.choices.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn response_deserializes_multiple_choices() {
|
||||
let json = r#"{"choices":[{"message":{"content":"A"}},{"message":{"content":"B"}}]}"#;
|
||||
let resp: ChatResponse = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(resp.choices.len(), 2);
|
||||
assert_eq!(resp.choices[0].message.content, "A");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn response_with_unicode() {
|
||||
let json = r#"{"choices":[{"message":{"content":"こんにちは 🦀"}}]}"#;
|
||||
let resp: ChatResponse = serde_json::from_str(json).unwrap();
|
||||
assert_eq!(resp.choices[0].message.content, "こんにちは 🦀");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn response_with_long_content() {
|
||||
let long = "x".repeat(100_000);
|
||||
let json = format!(r#"{{"choices":[{{"message":{{"content":"{long}"}}}}]}}"#);
|
||||
let resp: ChatResponse = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(resp.choices[0].message.content.len(), 100_000);
|
||||
}
|
||||
}
|
||||
107
src/providers/openrouter.rs
Normal file
107
src/providers/openrouter.rs
Normal file
|
|
@ -0,0 +1,107 @@
|
|||
use crate::providers::traits::Provider;
|
||||
use async_trait::async_trait;
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub struct OpenRouterProvider {
|
||||
api_key: Option<String>,
|
||||
client: Client,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct ChatRequest {
|
||||
model: String,
|
||||
messages: Vec<Message>,
|
||||
temperature: f64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct Message {
|
||||
role: String,
|
||||
content: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ChatResponse {
|
||||
choices: Vec<Choice>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Choice {
|
||||
message: ResponseMessage,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ResponseMessage {
|
||||
content: String,
|
||||
}
|
||||
|
||||
impl OpenRouterProvider {
|
||||
pub fn new(api_key: Option<&str>) -> Self {
|
||||
Self {
|
||||
api_key: api_key.map(ToString::to_string),
|
||||
client: Client::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Provider for OpenRouterProvider {
|
||||
async fn chat_with_system(
|
||||
&self,
|
||||
system_prompt: Option<&str>,
|
||||
message: &str,
|
||||
model: &str,
|
||||
temperature: f64,
|
||||
) -> anyhow::Result<String> {
|
||||
let api_key = self.api_key.as_ref()
|
||||
.ok_or_else(|| anyhow::anyhow!("OpenRouter API key not set. Run `zeroclaw onboard` or set OPENROUTER_API_KEY env var."))?;
|
||||
|
||||
let mut messages = Vec::new();
|
||||
|
||||
if let Some(sys) = system_prompt {
|
||||
messages.push(Message {
|
||||
role: "system".to_string(),
|
||||
content: sys.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
messages.push(Message {
|
||||
role: "user".to_string(),
|
||||
content: message.to_string(),
|
||||
});
|
||||
|
||||
let request = ChatRequest {
|
||||
model: model.to_string(),
|
||||
messages,
|
||||
temperature,
|
||||
};
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.post("https://openrouter.ai/api/v1/chat/completions")
|
||||
.header("Authorization", format!("Bearer {api_key}"))
|
||||
.header(
|
||||
"HTTP-Referer",
|
||||
"https://github.com/theonlyhennygod/zeroclaw",
|
||||
)
|
||||
.header("X-Title", "ZeroClaw")
|
||||
.json(&request)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let error = response.text().await?;
|
||||
anyhow::bail!("OpenRouter API error: {error}");
|
||||
}
|
||||
|
||||
let chat_response: ChatResponse = response.json().await?;
|
||||
|
||||
chat_response
|
||||
.choices
|
||||
.into_iter()
|
||||
.next()
|
||||
.map(|c| c.message.content)
|
||||
.ok_or_else(|| anyhow::anyhow!("No response from OpenRouter"))
|
||||
}
|
||||
}
|
||||
22
src/providers/traits.rs
Normal file
22
src/providers/traits.rs
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
use async_trait::async_trait;
|
||||
|
||||
#[async_trait]
|
||||
pub trait Provider: Send + Sync {
|
||||
async fn chat(
|
||||
&self,
|
||||
message: &str,
|
||||
model: &str,
|
||||
temperature: f64,
|
||||
) -> anyhow::Result<String> {
|
||||
self.chat_with_system(None, message, model, temperature)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn chat_with_system(
|
||||
&self,
|
||||
system_prompt: Option<&str>,
|
||||
message: &str,
|
||||
model: &str,
|
||||
temperature: f64,
|
||||
) -> anyhow::Result<String>;
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue