diff --git a/README.md b/README.md
index 7cbc822..efc06de 100644
--- a/README.md
+++ b/README.md
@@ -15,7 +15,7 @@
The fastest, smallest, fully autonomous AI assistant — deploy anywhere, swap anything.
```
-~3MB binary · <10ms startup · 649 tests · 22 providers · Pluggable everything
+~3MB binary · <10ms startup · 657 tests · 22+ providers · Pluggable everything
```
## Quick Start
@@ -54,54 +54,13 @@ cargo run --release -- tools test memory_recall '{"query": "Rust"}'
Every subsystem is a **trait** — swap implementations with a config change, zero code changes.
-```
-┌─────────────────────────────────────────────────────────────────────┐
-│ ZeroClaw Architecture │
-├─────────────────────────────────────────────────────────────────────┤
-│ │
-│ ┌──────────────┐ ┌──────────────────────────────────────────┐ │
-│ │ Chat Apps │ │ Security Layer │ │
-│ │ │ │ │ │
-│ │ Telegram ───┤ │ ┌─────────────┐ ┌──────────────────┐ │ │
-│ │ Discord ───┤ │ │ Auth Gate │ │ Rate Limiter │ │ │
-│ │ Slack ───┼───►│ │ │ │ │ │ │
-│ │ iMessage ───┤ │ │ • allowed_ │ │ • sliding window │ │ │
-│ │ Matrix ───┤ │ │ users │ │ • max actions/hr │ │ │
-│ │ CLI ───┤ │ │ • webhook │ │ • max cost/day │ │ │
-│ │ Webhook ───┤ │ │ secret │ │ │ │ │
-│ └──────────────┘ │ └──────┬──────┘ └────────┬─────────┘ │ │
-│ │ │ │ │ │
-│ └─────────┼──────────────────┼────────────┘ │
-│ ▼ ▼ │
-│ ┌──────────────────────────────────────┐ │
-│ │ Agent Loop │ │
-│ │ │ │
-│ │ Message ──► LLM ──► Tools ──► Reply │ │
-│ │ ▲ │ │ │
-│ │ │ ┌─────────────┘ │ │
-│ │ │ ▼ │ │
-│ │ ┌──────────────┐ ┌─────────────┐ │ │
-│ │ │ Context │ │ Sandbox │ │ │
-│ │ │ │ │ │ │ │
-│ │ │ • Memory │ │ • allowlist │ │ │
-│ │ │ • Skills │ │ • path jail │ │ │
-│ │ │ • Workspace │ │ • forbidden │ │ │
-│ │ │ MD files │ │ paths │ │ │
-│ │ └──────────────┘ └─────────────┘ │ │
-│ └──────────────────────────────────────┘ │
-│ │
-│ ┌──────────────────────────────────────────────────────────────┐ │
-│ │ AI Providers (22) │ │
-│ │ OpenRouter · Anthropic · OpenAI · Mistral · Groq · Venice │ │
-│ │ Ollama · xAI · DeepSeek · Cerebras · Fireworks · Together │ │
-│ │ Cloudflare · Moonshot · GLM · MiniMax · Qianfan · + more │ │
-│ └──────────────────────────────────────────────────────────────┘ │
-└─────────────────────────────────────────────────────────────────────┘
-```
+
+
+
| Subsystem | Trait | Ships with | Extend |
|-----------|-------|------------|--------|
-| **AI Models** | `Provider` | 22 providers (OpenRouter, Anthropic, OpenAI, Venice, Groq, Mistral, etc.) | Any OpenAI-compatible API |
+| **AI Models** | `Provider` | 22+ providers (OpenRouter, Anthropic, OpenAI, Venice, Groq, Mistral, etc.) | `custom:https://your-api.com` — any OpenAI-compatible API |
| **Channels** | `Channel` | CLI, Telegram, Discord, Slack, iMessage, Matrix, Webhook | Any messaging API |
| **Memory** | `Memory` | SQLite (default), Markdown | Any persistence |
| **Tools** | `Tool` | shell, file_read, file_write, memory_store, memory_recall, memory_forget | Any capability |
@@ -342,7 +301,7 @@ interval_minutes = 30
```bash
cargo build # Dev build
cargo build --release # Release build (~3MB)
-cargo test # 649 tests
+cargo test # 657 tests
cargo clippy # Lint (0 warnings)
# Run the SQLite vs Markdown benchmark
diff --git a/docs/architecture.svg b/docs/architecture.svg
new file mode 100644
index 0000000..8dcfb77
--- /dev/null
+++ b/docs/architecture.svg
@@ -0,0 +1,249 @@
+
diff --git a/src/onboard/wizard.rs b/src/onboard/wizard.rs
index eaef76b..679957c 100644
--- a/src/onboard/wizard.rs
+++ b/src/onboard/wizard.rs
@@ -55,19 +55,22 @@ pub fn run_wizard() -> Result {
);
println!();
- print_step(1, 5, "Workspace Setup");
+ print_step(1, 6, "Workspace Setup");
let (workspace_dir, config_path) = setup_workspace()?;
- print_step(2, 5, "AI Provider & API Key");
+ print_step(2, 6, "AI Provider & API Key");
let (provider, api_key, model) = setup_provider()?;
- print_step(3, 5, "Channels (How You Talk to ZeroClaw)");
+ print_step(3, 6, "Channels (How You Talk to ZeroClaw)");
let channels_config = setup_channels()?;
- print_step(4, 5, "Project Context (Personalize Your Agent)");
+ print_step(4, 6, "Tunnel (Expose to Internet)");
+ let tunnel_config = setup_tunnel()?;
+
+ print_step(5, 6, "Project Context (Personalize Your Agent)");
let project_ctx = setup_project_context()?;
- print_step(5, 5, "Workspace Files");
+ print_step(6, 6, "Workspace Files");
scaffold_workspace(&workspace_dir, &project_ctx)?;
// ── Build config ──
@@ -93,7 +96,7 @@ pub fn run_wizard() -> Result {
heartbeat: HeartbeatConfig::default(),
channels_config,
memory: MemoryConfig::default(), // SQLite + auto-save by default
- tunnel: crate::config::TunnelConfig::default(),
+ tunnel: tunnel_config,
};
println!(
@@ -208,11 +211,12 @@ fn setup_workspace() -> Result<(PathBuf, PathBuf)> {
fn setup_provider() -> Result<(String, String, String)> {
// ── Tier selection ──
let tiers = vec![
- "Recommended (OpenRouter, Venice, Anthropic, OpenAI)",
- "Fast inference (Groq, Fireworks, Together AI)",
- "Gateway / proxy (Vercel AI, Cloudflare AI, Amazon Bedrock)",
- "Specialized (Moonshot/Kimi, GLM/Zhipu, MiniMax, Qianfan, Z.AI, Synthetic, OpenCode Zen, Cohere)",
- "Local / private (Ollama — no API key needed)",
+ "⭐ Recommended (OpenRouter, Venice, Anthropic, OpenAI)",
+ "⚡ Fast inference (Groq, Fireworks, Together AI)",
+ "🌐 Gateway / proxy (Vercel AI, Cloudflare AI, Amazon Bedrock)",
+ "🔬 Specialized (Moonshot/Kimi, GLM/Zhipu, MiniMax, Qianfan, Z.AI, Synthetic, OpenCode Zen, Cohere)",
+ "🏠 Local / private (Ollama — no API key needed)",
+ "🔧 Custom — bring your own OpenAI-compatible API",
];
let tier_idx = Select::new()
@@ -255,9 +259,53 @@ fn setup_provider() -> Result<(String, String, String)> {
("opencode", "OpenCode Zen — code-focused AI"),
("cohere", "Cohere — Command R+ & embeddings"),
],
- _ => vec![("ollama", "Ollama — local models (Llama, Mistral, Phi)")],
+ 4 => vec![("ollama", "Ollama — local models (Llama, Mistral, Phi)")],
+ _ => vec![], // Custom — handled below
};
+ // ── Custom / BYOP flow ──
+ if providers.is_empty() {
+ println!();
+ println!(
+ " {} {}",
+ style("Custom Provider Setup").white().bold(),
+ style("— any OpenAI-compatible API").dim()
+ );
+ print_bullet("ZeroClaw works with ANY API that speaks the OpenAI chat completions format.");
+ print_bullet("Examples: LiteLLM, LocalAI, vLLM, text-generation-webui, LM Studio, etc.");
+ println!();
+
+ let base_url: String = Input::new()
+ .with_prompt(" API base URL (e.g. http://localhost:1234 or https://my-api.com)")
+ .interact_text()?;
+
+ let base_url = base_url.trim().trim_end_matches('/').to_string();
+ if base_url.is_empty() {
+ anyhow::bail!("Custom provider requires a base URL.");
+ }
+
+ let api_key: String = Input::new()
+ .with_prompt(" API key (or Enter to skip if not needed)")
+ .allow_empty(true)
+ .interact_text()?;
+
+ let model: String = Input::new()
+ .with_prompt(" Model name (e.g. llama3, gpt-4o, mistral)")
+ .default("default".into())
+ .interact_text()?;
+
+ let provider_name = format!("custom:{base_url}");
+
+ println!(
+ " {} Provider: {} | Model: {}",
+ style("✓").green().bold(),
+ style(&provider_name).green(),
+ style(&model).green()
+ );
+
+ return Ok((provider_name, api_key, model));
+ }
+
let provider_labels: Vec<&str> = providers.iter().map(|(_, label)| *label).collect();
let provider_idx = Select::new()
@@ -1055,6 +1103,159 @@ fn setup_channels() -> Result {
Ok(config)
}
+// ── Step 4: Tunnel ──────────────────────────────────────────────
+
+#[allow(clippy::too_many_lines)]
+fn setup_tunnel() -> Result {
+ use crate::config::schema::{
+ CloudflareTunnelConfig, CustomTunnelConfig, NgrokTunnelConfig, TailscaleTunnelConfig,
+ TunnelConfig,
+ };
+
+ print_bullet("A tunnel exposes your gateway to the internet securely.");
+ print_bullet("Skip this if you only use CLI or local channels.");
+ println!();
+
+ let options = vec![
+ "Skip — local only (default)",
+ "Cloudflare Tunnel — Zero Trust, free tier",
+ "Tailscale — private tailnet or public Funnel",
+ "ngrok — instant public URLs",
+ "Custom — bring your own (bore, frp, ssh, etc.)",
+ ];
+
+ let choice = Select::new()
+ .with_prompt(" Select tunnel provider")
+ .items(&options)
+ .default(0)
+ .interact()?;
+
+ let config = match choice {
+ 1 => {
+ println!();
+ print_bullet("Get your tunnel token from the Cloudflare Zero Trust dashboard.");
+ let token: String = Input::new()
+ .with_prompt(" Cloudflare tunnel token")
+ .interact_text()?;
+ if token.trim().is_empty() {
+ println!(" {} Skipped", style("→").dim());
+ TunnelConfig::default()
+ } else {
+ println!(
+ " {} Tunnel: {}",
+ style("✓").green().bold(),
+ style("Cloudflare").green()
+ );
+ TunnelConfig {
+ provider: "cloudflare".into(),
+ cloudflare: Some(CloudflareTunnelConfig { token }),
+ ..TunnelConfig::default()
+ }
+ }
+ }
+ 2 => {
+ println!();
+ print_bullet("Tailscale must be installed and authenticated (tailscale up).");
+ let funnel = Confirm::new()
+ .with_prompt(" Use Funnel (public internet)? No = tailnet only")
+ .default(false)
+ .interact()?;
+ println!(
+ " {} Tunnel: {} ({})",
+ style("✓").green().bold(),
+ style("Tailscale").green(),
+ if funnel {
+ "Funnel — public"
+ } else {
+ "Serve — tailnet only"
+ }
+ );
+ TunnelConfig {
+ provider: "tailscale".into(),
+ tailscale: Some(TailscaleTunnelConfig {
+ funnel,
+ hostname: None,
+ }),
+ ..TunnelConfig::default()
+ }
+ }
+ 3 => {
+ println!();
+ print_bullet(
+ "Get your auth token at https://dashboard.ngrok.com/get-started/your-authtoken",
+ );
+ let auth_token: String = Input::new()
+ .with_prompt(" ngrok auth token")
+ .interact_text()?;
+ if auth_token.trim().is_empty() {
+ println!(" {} Skipped", style("→").dim());
+ TunnelConfig::default()
+ } else {
+ let domain: String = Input::new()
+ .with_prompt(" Custom domain (optional, Enter to skip)")
+ .allow_empty(true)
+ .interact_text()?;
+ println!(
+ " {} Tunnel: {}",
+ style("✓").green().bold(),
+ style("ngrok").green()
+ );
+ TunnelConfig {
+ provider: "ngrok".into(),
+ ngrok: Some(NgrokTunnelConfig {
+ auth_token,
+ domain: if domain.is_empty() {
+ None
+ } else {
+ Some(domain)
+ },
+ }),
+ ..TunnelConfig::default()
+ }
+ }
+ }
+ 4 => {
+ println!();
+ print_bullet("Enter the command to start your tunnel.");
+ print_bullet("Use {port} and {host} as placeholders.");
+ print_bullet("Example: bore local {port} --to bore.pub");
+ let cmd: String = Input::new()
+ .with_prompt(" Start command")
+ .interact_text()?;
+ if cmd.trim().is_empty() {
+ println!(" {} Skipped", style("→").dim());
+ TunnelConfig::default()
+ } else {
+ println!(
+ " {} Tunnel: {} ({})",
+ style("✓").green().bold(),
+ style("Custom").green(),
+ style(&cmd).dim()
+ );
+ TunnelConfig {
+ provider: "custom".into(),
+ custom: Some(CustomTunnelConfig {
+ start_command: cmd,
+ health_url: None,
+ url_pattern: None,
+ }),
+ ..TunnelConfig::default()
+ }
+ }
+ }
+ _ => {
+ println!(
+ " {} Tunnel: {}",
+ style("✓").green().bold(),
+ style("none (local only)").dim()
+ );
+ TunnelConfig::default()
+ }
+ };
+
+ Ok(config)
+}
+
// ── Step 6: Scaffold workspace files ─────────────────────────────
#[allow(clippy::too_many_lines)]
diff --git a/src/providers/mod.rs b/src/providers/mod.rs
index 436984e..8828a18 100644
--- a/src/providers/mod.rs
+++ b/src/providers/mod.rs
@@ -88,8 +88,24 @@ pub fn create_provider(name: &str, api_key: Option<&str>) -> anyhow::Result {
+ let base_url = name.strip_prefix("custom:").unwrap_or("");
+ if base_url.is_empty() {
+ anyhow::bail!("Custom provider requires a URL. Format: custom:https://your-api.com");
+ }
+ Ok(Box::new(OpenAiCompatibleProvider::new(
+ "Custom",
+ base_url,
+ api_key,
+ AuthStyle::Bearer,
+ )))
+ }
+
_ => anyhow::bail!(
- "Unknown provider: {name}. Run `zeroclaw integrations list -c ai` to see all available providers."
+ "Unknown provider: {name}. Run `zeroclaw integrations list -c ai` to see all available providers.\n\
+ Tip: Use \"custom:https://your-api.com\" for any OpenAI-compatible endpoint."
),
}
}
@@ -231,6 +247,37 @@ mod tests {
assert!(create_provider("cohere", Some("key")).is_ok());
}
+ // ── Custom / BYOP provider ─────────────────────────────
+
+ #[test]
+ fn factory_custom_url() {
+ let p = create_provider("custom:https://my-llm.example.com", Some("key"));
+ assert!(p.is_ok());
+ }
+
+ #[test]
+ fn factory_custom_localhost() {
+ let p = create_provider("custom:http://localhost:1234", Some("key"));
+ assert!(p.is_ok());
+ }
+
+ #[test]
+ fn factory_custom_no_key() {
+ let p = create_provider("custom:https://my-llm.example.com", None);
+ assert!(p.is_ok());
+ }
+
+ #[test]
+ fn factory_custom_empty_url_errors() {
+ match create_provider("custom:", None) {
+ Err(e) => assert!(
+ e.to_string().contains("requires a URL"),
+ "Expected 'requires a URL', got: {e}"
+ ),
+ Ok(_) => panic!("Expected error for empty custom URL"),
+ }
+ }
+
// ── Error cases ──────────────────────────────────────────
#[test]