style: cargo fmt — fix all formatting for CI

Ran cargo fmt across entire codebase to pass CI's cargo fmt --check.
No logic changes, only whitespace/formatting.
This commit is contained in:
argenis de la rosa 2026-02-13 16:03:50 -05:00
parent a5887ad2dc
commit bc31e4389b
24 changed files with 613 additions and 242 deletions

View file

@ -53,9 +53,7 @@ impl Provider for AnthropicProvider {
temperature: f64,
) -> anyhow::Result<String> {
let api_key = self.api_key.as_ref().ok_or_else(|| {
anyhow::anyhow!(
"Anthropic API key not set. Set ANTHROPIC_API_KEY or edit config.toml."
)
anyhow::anyhow!("Anthropic API key not set. Set ANTHROPIC_API_KEY or edit config.toml.")
})?;
let request = ChatRequest {
@ -122,10 +120,15 @@ mod tests {
#[tokio::test]
async fn chat_fails_without_key() {
let p = AnthropicProvider::new(None);
let result = p.chat_with_system(None, "hello", "claude-3-opus", 0.7).await;
let result = p
.chat_with_system(None, "hello", "claude-3-opus", 0.7)
.await;
assert!(result.is_err());
let err = result.unwrap_err().to_string();
assert!(err.contains("API key not set"), "Expected key error, got: {err}");
assert!(
err.contains("API key not set"),
"Expected key error, got: {err}"
);
}
#[tokio::test]
@ -150,7 +153,10 @@ mod tests {
temperature: 0.7,
};
let json = serde_json::to_string(&req).unwrap();
assert!(!json.contains("system"), "system field should be skipped when None");
assert!(
!json.contains("system"),
"system field should be skipped when None"
);
assert!(json.contains("claude-3-opus"));
assert!(json.contains("hello"));
}
@ -188,7 +194,8 @@ mod tests {
#[test]
fn chat_response_multiple_blocks() {
let json = r#"{"content":[{"type":"text","text":"First"},{"type":"text","text":"Second"}]}"#;
let json =
r#"{"content":[{"type":"text","text":"First"},{"type":"text","text":"Second"}]}"#;
let resp: ChatResponse = serde_json::from_str(json).unwrap();
assert_eq!(resp.content.len(), 2);
assert_eq!(resp.content[0].text, "First");

View file

@ -170,9 +170,14 @@ mod tests {
#[tokio::test]
async fn chat_fails_without_key() {
let p = make_provider("Venice", "https://api.venice.ai", None);
let result = p.chat_with_system(None, "hello", "llama-3.3-70b", 0.7).await;
let result = p
.chat_with_system(None, "hello", "llama-3.3-70b", 0.7)
.await;
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("Venice API key not set"));
assert!(result
.unwrap_err()
.to_string()
.contains("Venice API key not set"));
}
#[test]
@ -180,8 +185,14 @@ mod tests {
let req = ChatRequest {
model: "llama-3.3-70b".to_string(),
messages: vec![
Message { role: "system".to_string(), content: "You are ZeroClaw".to_string() },
Message { role: "user".to_string(), content: "hello".to_string() },
Message {
role: "system".to_string(),
content: "You are ZeroClaw".to_string(),
},
Message {
role: "user".to_string(),
content: "hello".to_string(),
},
],
temperature: 0.7,
};
@ -208,7 +219,10 @@ mod tests {
#[test]
fn x_api_key_auth_style() {
let p = OpenAiCompatibleProvider::new(
"moonshot", "https://api.moonshot.cn", Some("ms-key"), AuthStyle::XApiKey,
"moonshot",
"https://api.moonshot.cn",
Some("ms-key"),
AuthStyle::XApiKey,
);
assert!(matches!(p.auth_header, AuthStyle::XApiKey));
}
@ -216,7 +230,10 @@ mod tests {
#[test]
fn custom_auth_style() {
let p = OpenAiCompatibleProvider::new(
"custom", "https://api.example.com", Some("key"), AuthStyle::Custom("X-Custom-Key".into()),
"custom",
"https://api.example.com",
Some("key"),
AuthStyle::Custom("X-Custom-Key".into()),
);
assert!(matches!(p.auth_header, AuthStyle::Custom(_)));
}
@ -238,7 +255,8 @@ mod tests {
assert!(result.is_err(), "{} should fail without key", p.name);
assert!(
result.unwrap_err().to_string().contains("API key not set"),
"{} error should mention key", p.name
"{} error should mention key",
p.name
);
}
}

View file

@ -250,11 +250,29 @@ mod tests {
#[test]
fn factory_all_providers_create_successfully() {
let providers = [
"openrouter", "anthropic", "openai", "ollama",
"venice", "vercel", "cloudflare", "moonshot", "synthetic",
"opencode", "zai", "glm", "minimax", "bedrock", "qianfan",
"groq", "mistral", "xai", "deepseek", "together",
"fireworks", "perplexity", "cohere",
"openrouter",
"anthropic",
"openai",
"ollama",
"venice",
"vercel",
"cloudflare",
"moonshot",
"synthetic",
"opencode",
"zai",
"glm",
"minimax",
"bedrock",
"qianfan",
"groq",
"mistral",
"xai",
"deepseek",
"together",
"fireworks",
"perplexity",
"cohere",
];
for name in providers {
assert!(

View file

@ -85,7 +85,9 @@ impl Provider for OllamaProvider {
if !response.status().is_success() {
let error = response.text().await?;
anyhow::bail!("Ollama error: {error}. Is Ollama running? (brew install ollama && ollama serve)");
anyhow::bail!(
"Ollama error: {error}. Is Ollama running? (brew install ollama && ollama serve)"
);
}
let chat_response: ChatResponse = response.json().await?;
@ -126,8 +128,14 @@ mod tests {
let req = ChatRequest {
model: "llama3".to_string(),
messages: vec![
Message { role: "system".to_string(), content: "You are ZeroClaw".to_string() },
Message { role: "user".to_string(), content: "hello".to_string() },
Message {
role: "system".to_string(),
content: "You are ZeroClaw".to_string(),
},
Message {
role: "user".to_string(),
content: "hello".to_string(),
},
],
stream: false,
options: Options { temperature: 0.7 },
@ -143,9 +151,10 @@ mod tests {
fn request_serializes_without_system() {
let req = ChatRequest {
model: "mistral".to_string(),
messages: vec![
Message { role: "user".to_string(), content: "test".to_string() },
],
messages: vec![Message {
role: "user".to_string(),
content: "test".to_string(),
}],
stream: false,
options: Options { temperature: 0.0 },
};

View file

@ -146,8 +146,14 @@ mod tests {
let req = ChatRequest {
model: "gpt-4o".to_string(),
messages: vec![
Message { role: "system".to_string(), content: "You are ZeroClaw".to_string() },
Message { role: "user".to_string(), content: "hello".to_string() },
Message {
role: "system".to_string(),
content: "You are ZeroClaw".to_string(),
},
Message {
role: "user".to_string(),
content: "hello".to_string(),
},
],
temperature: 0.7,
};
@ -161,9 +167,10 @@ mod tests {
fn request_serializes_without_system() {
let req = ChatRequest {
model: "gpt-4o".to_string(),
messages: vec![
Message { role: "user".to_string(), content: "hello".to_string() },
],
messages: vec![Message {
role: "user".to_string(),
content: "hello".to_string(),
}],
temperature: 0.0,
};
let json = serde_json::to_string(&req).unwrap();

View file

@ -2,12 +2,7 @@ use async_trait::async_trait;
#[async_trait]
pub trait Provider: Send + Sync {
async fn chat(
&self,
message: &str,
model: &str,
temperature: f64,
) -> anyhow::Result<String> {
async fn chat(&self, message: &str, model: &str, temperature: f64) -> anyhow::Result<String> {
self.chat_with_system(None, message, model, temperature)
.await
}