feat: initial release — ZeroClaw v0.1.0
- 22 AI providers (OpenRouter, Anthropic, OpenAI, Mistral, etc.) - 7 channels (CLI, Telegram, Discord, Slack, iMessage, Matrix, Webhook) - 5-step onboarding wizard with Project Context personalization - OpenClaw-aligned system prompt (SOUL.md, IDENTITY.md, USER.md, AGENTS.md, etc.) - SQLite memory backend with auto-save - Skills system with on-demand loading - Security: autonomy levels, command allowlists, cost limits - 532 tests passing, 0 clippy warnings
This commit is contained in:
commit
05cb353f7f
71 changed files with 15757 additions and 0 deletions
65
examples/custom_provider.rs
Normal file
65
examples/custom_provider.rs
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
//! Example: Implementing a custom Provider for ZeroClaw
|
||||
//!
|
||||
//! This shows how to add a new LLM backend in ~30 lines of code.
|
||||
//! Copy this file, modify the API call, and register in `src/providers/mod.rs`.
|
||||
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
|
||||
// In a real implementation, you'd import from the crate:
|
||||
// use zeroclaw::providers::traits::Provider;
|
||||
|
||||
/// Minimal Provider trait (mirrors src/providers/traits.rs)
|
||||
#[async_trait]
|
||||
pub trait Provider: Send + Sync {
|
||||
async fn chat(&self, message: &str, model: &str, temperature: f64) -> Result<String>;
|
||||
}
|
||||
|
||||
/// Example: Ollama local provider
|
||||
pub struct OllamaProvider {
|
||||
base_url: String,
|
||||
client: reqwest::Client,
|
||||
}
|
||||
|
||||
impl OllamaProvider {
|
||||
pub fn new(base_url: Option<&str>) -> Self {
|
||||
Self {
|
||||
base_url: base_url.unwrap_or("http://localhost:11434").to_string(),
|
||||
client: reqwest::Client::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Provider for OllamaProvider {
|
||||
async fn chat(&self, message: &str, model: &str, temperature: f64) -> Result<String> {
|
||||
let url = format!("{}/api/generate", self.base_url);
|
||||
|
||||
let body = serde_json::json!({
|
||||
"model": model,
|
||||
"prompt": message,
|
||||
"temperature": temperature,
|
||||
"stream": false,
|
||||
});
|
||||
|
||||
let resp = self
|
||||
.client
|
||||
.post(&url)
|
||||
.json(&body)
|
||||
.send()
|
||||
.await?
|
||||
.json::<serde_json::Value>()
|
||||
.await?;
|
||||
|
||||
resp["response"]
|
||||
.as_str()
|
||||
.map(|s| s.to_string())
|
||||
.ok_or_else(|| anyhow::anyhow!("No response field in Ollama reply"))
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("This is an example — see CONTRIBUTING.md for integration steps.");
|
||||
println!("Register your provider in src/providers/mod.rs:");
|
||||
println!(" \"ollama\" => Ok(Box::new(ollama::OllamaProvider::new(None))),");
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue