feat(ollama): unify local and remote endpoint routing

Integrate cloud endpoint behavior into existing ollama provider flow, avoid a separate standalone doc, and keep configuration minimal via api_url/api_key.

Also align reply_target and memory trait call sites needed for current baseline compatibility.
This commit is contained in:
Chummy 2026-02-17 22:49:40 +08:00
parent 85de9b5625
commit d94d7baa14
4 changed files with 195 additions and 24 deletions

View file

@ -451,6 +451,23 @@ format = "openclaw" # "openclaw" (default, markdown files) or "aieos
# aieos_inline = '{"identity":{"names":{"first":"Nova"}}}' # inline AIEOS JSON
```
### Ollama Local and Remote Endpoints
ZeroClaw uses one provider key (`ollama`) for both local and remote Ollama deployments:
- Local Ollama: keep `api_url` unset, run `ollama serve`, and use models like `llama3.2`.
- Remote Ollama endpoint (including Ollama Cloud): set `api_url` to the remote endpoint and set `api_key` (or `OLLAMA_API_KEY`) when required.
- Optional `:cloud` suffix: model IDs like `qwen3:cloud` are normalized to `qwen3` before the request.
Example remote configuration:
```toml
default_provider = "ollama"
default_model = "qwen3:cloud"
api_url = "https://ollama.com"
api_key = "ollama_api_key_here"
```
## Python Companion Package (`zeroclaw-tools`)
For LLM providers with inconsistent native tool calling (e.g., GLM-5/Zhipu), ZeroClaw ships a Python companion package with **LangGraph-based tool calling** for guaranteed consistency:

View file

@ -73,7 +73,7 @@ pub fn run_wizard() -> Result<Config> {
let (workspace_dir, config_path) = setup_workspace()?;
print_step(2, 9, "AI Provider & API Key");
let (provider, api_key, model) = setup_provider(&workspace_dir)?;
let (provider, api_key, model, provider_api_url) = setup_provider(&workspace_dir)?;
print_step(3, 9, "Channels (How You Talk to ZeroClaw)");
let channels_config = setup_channels()?;
@ -106,7 +106,7 @@ pub fn run_wizard() -> Result<Config> {
} else {
Some(api_key)
},
api_url: None,
api_url: provider_api_url,
default_provider: Some(provider),
default_model: Some(model),
default_temperature: 0.7,
@ -1329,7 +1329,7 @@ fn setup_workspace() -> Result<(PathBuf, PathBuf)> {
// ── Step 2: Provider & API Key ───────────────────────────────────
#[allow(clippy::too_many_lines)]
fn setup_provider(workspace_dir: &Path) -> Result<(String, String, String)> {
fn setup_provider(workspace_dir: &Path) -> Result<(String, String, String, Option<String>)> {
// ── Tier selection ──
let tiers = vec![
"⭐ Recommended (OpenRouter, Venice, Anthropic, OpenAI, Gemini)",
@ -1441,7 +1441,7 @@ fn setup_provider(workspace_dir: &Path) -> Result<(String, String, String)> {
style(&model).green()
);
return Ok((provider_name, api_key, model));
return Ok((provider_name, api_key, model, None));
}
let provider_labels: Vec<&str> = providers.iter().map(|(_, label)| *label).collect();
@ -1454,10 +1454,53 @@ fn setup_provider(workspace_dir: &Path) -> Result<(String, String, String)> {
let provider_name = providers[provider_idx].0;
// ── API key ──
// ── API key / endpoint ──
let mut provider_api_url: Option<String> = None;
let api_key = if provider_name == "ollama" {
print_bullet("Ollama runs locally — no API key needed!");
let use_remote_ollama = Confirm::new()
.with_prompt(" Use a remote Ollama endpoint (for example Ollama Cloud)?")
.default(false)
.interact()?;
if use_remote_ollama {
let raw_url: String = Input::new()
.with_prompt(" Remote Ollama endpoint URL")
.default("https://ollama.com".into())
.interact_text()?;
let normalized_url = raw_url.trim().trim_end_matches('/').to_string();
if normalized_url.is_empty() {
anyhow::bail!("Remote Ollama endpoint URL cannot be empty.");
}
provider_api_url = Some(normalized_url.clone());
print_bullet(&format!(
"Remote endpoint configured: {}",
style(&normalized_url).cyan()
));
print_bullet(&format!(
"If you use cloud-only models, append {} to the model ID.",
style(":cloud").yellow()
));
let key: String = Input::new()
.with_prompt(" API key for remote Ollama endpoint (or Enter to skip)")
.allow_empty(true)
.interact_text()?;
if key.trim().is_empty() {
print_bullet(&format!(
"No API key provided. Set {} later if required by your endpoint.",
style("OLLAMA_API_KEY").yellow()
));
}
key
} else {
print_bullet("Using local Ollama at http://localhost:11434 (no API key needed).");
String::new()
}
} else if canonical_provider_name(provider_name) == "gemini" {
// Special handling for Gemini: check for CLI auth first
if crate::providers::gemini::GeminiProvider::has_cli_credentials() {
@ -1751,7 +1794,11 @@ fn setup_provider(workspace_dir: &Path) -> Result<(String, String, String)> {
.collect();
let mut live_options: Option<Vec<(String, String)>> = None;
if supports_live_model_fetch(provider_name) {
if provider_name == "ollama" && provider_api_url.is_some() {
print_bullet(
"Skipping local Ollama model discovery because a remote endpoint is configured.",
);
} else if supports_live_model_fetch(provider_name) {
let can_fetch_without_key = matches!(provider_name, "openrouter" | "ollama");
let has_api_key = !api_key.trim().is_empty()
|| std::env::var(provider_env_var(provider_name))
@ -1907,7 +1954,7 @@ fn setup_provider(workspace_dir: &Path) -> Result<(String, String, String)> {
style(&model).green()
);
Ok((provider_name.to_string(), api_key, model))
Ok((provider_name.to_string(), api_key, model, provider_api_url))
}
/// Map provider name to its conventional env var
@ -1916,6 +1963,7 @@ fn provider_env_var(name: &str) -> &'static str {
"openrouter" => "OPENROUTER_API_KEY",
"anthropic" => "ANTHROPIC_API_KEY",
"openai" => "OPENAI_API_KEY",
"ollama" => "OLLAMA_API_KEY",
"venice" => "VENICE_API_KEY",
"groq" => "GROQ_API_KEY",
"mistral" => "MISTRAL_API_KEY",
@ -4614,7 +4662,7 @@ mod tests {
assert_eq!(provider_env_var("openrouter"), "OPENROUTER_API_KEY");
assert_eq!(provider_env_var("anthropic"), "ANTHROPIC_API_KEY");
assert_eq!(provider_env_var("openai"), "OPENAI_API_KEY");
assert_eq!(provider_env_var("ollama"), "API_KEY"); // fallback
assert_eq!(provider_env_var("ollama"), "OLLAMA_API_KEY");
assert_eq!(provider_env_var("xai"), "XAI_API_KEY");
assert_eq!(provider_env_var("grok"), "XAI_API_KEY"); // alias
assert_eq!(provider_env_var("together"), "TOGETHER_API_KEY"); // alias

View file

@ -172,6 +172,7 @@ fn resolve_provider_credential(name: &str, credential_override: Option<&str>) ->
"anthropic" => vec!["ANTHROPIC_OAUTH_TOKEN", "ANTHROPIC_API_KEY"],
"openrouter" => vec!["OPENROUTER_API_KEY"],
"openai" => vec!["OPENAI_API_KEY"],
"ollama" => vec!["OLLAMA_API_KEY"],
"venice" => vec!["VENICE_API_KEY"],
"groq" => vec!["GROQ_API_KEY"],
"mistral" => vec!["MISTRAL_API_KEY"],
@ -274,7 +275,7 @@ pub fn create_provider_with_url(
"anthropic" => Ok(Box::new(anthropic::AnthropicProvider::new(key))),
"openai" => Ok(Box::new(openai::OpenAiProvider::new(key))),
// Ollama uses api_url for custom base URL (e.g. remote Ollama instance)
"ollama" => Ok(Box::new(ollama::OllamaProvider::new(api_url))),
"ollama" => Ok(Box::new(ollama::OllamaProvider::new(api_url, key))),
"gemini" | "google" | "google-gemini" => {
Ok(Box::new(gemini::GeminiProvider::new(key)))
}
@ -600,7 +601,7 @@ mod tests {
#[test]
fn factory_ollama() {
assert!(create_provider("ollama", None).is_ok());
// Ollama ignores the api_key parameter since it's a local service
// Ollama may use API key when a remote endpoint is configured.
assert!(create_provider("ollama", Some("dummy")).is_ok());
assert!(create_provider("ollama", Some("any-value-here")).is_ok());
}
@ -951,6 +952,13 @@ mod tests {
assert!(provider.is_ok());
}
#[test]
fn ollama_cloud_with_custom_url() {
let provider =
create_provider_with_url("ollama", Some("ollama-key"), Some("https://ollama.com"));
assert!(provider.is_ok());
}
#[test]
fn factory_all_providers_create_successfully() {
let providers = [

View file

@ -5,6 +5,7 @@ use serde::{Deserialize, Serialize};
pub struct OllamaProvider {
base_url: String,
api_key: Option<String>,
client: Client,
}
@ -63,12 +64,18 @@ struct OllamaFunction {
// ─── Implementation ───────────────────────────────────────────────────────────
impl OllamaProvider {
pub fn new(base_url: Option<&str>) -> Self {
pub fn new(base_url: Option<&str>, api_key: Option<&str>) -> Self {
let api_key = api_key.and_then(|value| {
let trimmed = value.trim();
(!trimmed.is_empty()).then(|| trimmed.to_string())
});
Self {
base_url: base_url
.unwrap_or("http://localhost:11434")
.trim_end_matches('/')
.to_string(),
api_key,
client: Client::builder()
.timeout(std::time::Duration::from_secs(300))
.connect_timeout(std::time::Duration::from_secs(10))
@ -77,12 +84,43 @@ impl OllamaProvider {
}
}
fn is_local_endpoint(&self) -> bool {
reqwest::Url::parse(&self.base_url)
.ok()
.and_then(|url| url.host_str().map(|host| host.to_string()))
.is_some_and(|host| matches!(host.as_str(), "localhost" | "127.0.0.1" | "::1"))
}
fn resolve_request_details(&self, model: &str) -> anyhow::Result<(String, bool)> {
let requests_cloud = model.ends_with(":cloud");
let normalized_model = model.strip_suffix(":cloud").unwrap_or(model).to_string();
if requests_cloud && self.is_local_endpoint() {
anyhow::bail!(
"Model '{}' requested cloud routing, but Ollama endpoint is local. Configure api_url with a remote Ollama endpoint.",
model
);
}
if requests_cloud && self.api_key.is_none() {
anyhow::bail!(
"Model '{}' requested cloud routing, but no API key is configured. Set OLLAMA_API_KEY or config api_key.",
model
);
}
let should_auth = self.api_key.is_some() && !self.is_local_endpoint();
Ok((normalized_model, should_auth))
}
/// Send a request to Ollama and get the parsed response
async fn send_request(
&self,
messages: Vec<Message>,
model: &str,
temperature: f64,
should_auth: bool,
) -> anyhow::Result<ApiChatResponse> {
let request = ChatRequest {
model: model.to_string(),
@ -101,7 +139,15 @@ impl OllamaProvider {
temperature
);
let response = self.client.post(&url).json(&request).send().await?;
let mut request_builder = self.client.post(&url).json(&request);
if should_auth {
if let Some(key) = self.api_key.as_ref() {
request_builder = request_builder.bearer_auth(key);
}
}
let response = request_builder.send().await?;
let status = response.status();
tracing::debug!("Ollama response status: {}", status);
@ -220,6 +266,8 @@ impl Provider for OllamaProvider {
model: &str,
temperature: f64,
) -> anyhow::Result<String> {
let (normalized_model, should_auth) = self.resolve_request_details(model)?;
let mut messages = Vec::new();
if let Some(sys) = system_prompt {
@ -234,7 +282,9 @@ impl Provider for OllamaProvider {
content: message.to_string(),
});
let response = self.send_request(messages, model, temperature).await?;
let response = self
.send_request(messages, &normalized_model, temperature, should_auth)
.await?;
// If model returned tool calls, format them for loop_.rs's parse_tool_calls
if !response.message.tool_calls.is_empty() {
@ -272,6 +322,8 @@ impl Provider for OllamaProvider {
model: &str,
temperature: f64,
) -> anyhow::Result<String> {
let (normalized_model, should_auth) = self.resolve_request_details(model)?;
let api_messages: Vec<Message> = messages
.iter()
.map(|m| Message {
@ -280,7 +332,9 @@ impl Provider for OllamaProvider {
})
.collect();
let response = self.send_request(api_messages, model, temperature).await?;
let response = self
.send_request(api_messages, &normalized_model, temperature, should_auth)
.await?;
// If model returned tool calls, format them for loop_.rs's parse_tool_calls
if !response.message.tool_calls.is_empty() {
@ -330,28 +384,72 @@ mod tests {
#[test]
fn default_url() {
let p = OllamaProvider::new(None);
let p = OllamaProvider::new(None, None);
assert_eq!(p.base_url, "http://localhost:11434");
}
#[test]
fn custom_url_trailing_slash() {
let p = OllamaProvider::new(Some("http://192.168.1.100:11434/"));
let p = OllamaProvider::new(Some("http://192.168.1.100:11434/"), None);
assert_eq!(p.base_url, "http://192.168.1.100:11434");
}
#[test]
fn custom_url_no_trailing_slash() {
let p = OllamaProvider::new(Some("http://myserver:11434"));
let p = OllamaProvider::new(Some("http://myserver:11434"), None);
assert_eq!(p.base_url, "http://myserver:11434");
}
#[test]
fn empty_url_uses_empty() {
let p = OllamaProvider::new(Some(""));
let p = OllamaProvider::new(Some(""), None);
assert_eq!(p.base_url, "");
}
#[test]
fn cloud_suffix_strips_model_name() {
let p = OllamaProvider::new(Some("https://ollama.com"), Some("ollama-key"));
let (model, should_auth) = p.resolve_request_details("qwen3:cloud").unwrap();
assert_eq!(model, "qwen3");
assert!(should_auth);
}
#[test]
fn cloud_suffix_with_local_endpoint_errors() {
let p = OllamaProvider::new(None, Some("ollama-key"));
let error = p
.resolve_request_details("qwen3:cloud")
.expect_err("cloud suffix should fail on local endpoint");
assert!(error
.to_string()
.contains("requested cloud routing, but Ollama endpoint is local"));
}
#[test]
fn cloud_suffix_without_api_key_errors() {
let p = OllamaProvider::new(Some("https://ollama.com"), None);
let error = p
.resolve_request_details("qwen3:cloud")
.expect_err("cloud suffix should require API key");
assert!(error
.to_string()
.contains("requested cloud routing, but no API key is configured"));
}
#[test]
fn remote_endpoint_auth_enabled_when_key_present() {
let p = OllamaProvider::new(Some("https://ollama.com"), Some("ollama-key"));
let (_model, should_auth) = p.resolve_request_details("qwen3").unwrap();
assert!(should_auth);
}
#[test]
fn local_endpoint_auth_disabled_even_with_key() {
let p = OllamaProvider::new(None, Some("ollama-key"));
let (_model, should_auth) = p.resolve_request_details("llama3").unwrap();
assert!(!should_auth);
}
#[test]
fn response_deserializes() {
let json = r#"{"message":{"role":"assistant","content":"Hello from Ollama!"}}"#;
@ -392,7 +490,7 @@ mod tests {
#[test]
fn extract_tool_name_handles_nested_tool_call() {
let provider = OllamaProvider::new(None);
let provider = OllamaProvider::new(None, None);
let tc = OllamaToolCall {
id: Some("call_123".into()),
function: OllamaFunction {
@ -410,7 +508,7 @@ mod tests {
#[test]
fn extract_tool_name_handles_prefixed_name() {
let provider = OllamaProvider::new(None);
let provider = OllamaProvider::new(None, None);
let tc = OllamaToolCall {
id: Some("call_123".into()),
function: OllamaFunction {
@ -425,7 +523,7 @@ mod tests {
#[test]
fn extract_tool_name_handles_normal_call() {
let provider = OllamaProvider::new(None);
let provider = OllamaProvider::new(None, None);
let tc = OllamaToolCall {
id: Some("call_123".into()),
function: OllamaFunction {
@ -440,7 +538,7 @@ mod tests {
#[test]
fn format_tool_calls_produces_valid_json() {
let provider = OllamaProvider::new(None);
let provider = OllamaProvider::new(None, None);
let tool_calls = vec![OllamaToolCall {
id: Some("call_abc".into()),
function: OllamaFunction {