fix: resolve all clippy --all-targets warnings across 15 files

- gateway/mod.rs: move send_json before test module (items_after_test_module)
- memory/vector.rs: fix float_cmp, cast_precision_loss, approx_constant
- memory/chunker.rs: fix format_collect, format_push_string, write_with_newline
- memory/sqlite.rs: fix useless_vec
- heartbeat/engine.rs: fix format_collect, write_with_newline
- config/schema.rs: fix needless_raw_string_hashes
- tools/composio.rs: fix needless_raw_string_hashes
- integrations/registry.rs: fix uninlined_format_args, unused import
- tunnel/mod.rs: fix doc_markdown
- skills/mod.rs: allow similar_names in test module
- channels/cli.rs: fix unreadable_literal
- observability/mod.rs: fix manual_string_new
- runtime/mod.rs: fix manual_string_new
- examples/custom_memory.rs: add Default impl (new_without_default)
- examples/custom_channel.rs: fix needless_borrows_for_generic_args
This commit is contained in:
argenis de la rosa 2026-02-14 03:52:57 -05:00
parent 18582fe9c8
commit 1fd51f1984
15 changed files with 82 additions and 49 deletions

View file

@ -54,7 +54,7 @@ impl Channel for TelegramChannel {
async fn send(&self, message: &str, chat_id: &str) -> Result<()> { async fn send(&self, message: &str, chat_id: &str) -> Result<()> {
self.client self.client
.post(&self.api_url("sendMessage")) .post(self.api_url("sendMessage"))
.json(&serde_json::json!({ .json(&serde_json::json!({
"chat_id": chat_id, "chat_id": chat_id,
"text": message, "text": message,
@ -71,7 +71,7 @@ impl Channel for TelegramChannel {
loop { loop {
let resp = self let resp = self
.client .client
.get(&self.api_url("getUpdates")) .get(self.api_url("getUpdates"))
.query(&[("offset", offset.to_string()), ("timeout", "30".into())]) .query(&[("offset", offset.to_string()), ("timeout", "30".into())])
.send() .send()
.await? .await?
@ -110,7 +110,7 @@ impl Channel for TelegramChannel {
async fn health_check(&self) -> bool { async fn health_check(&self) -> bool {
self.client self.client
.get(&self.api_url("getMe")) .get(self.api_url("getMe"))
.send() .send()
.await .await
.map(|r| r.status().is_success()) .map(|r| r.status().is_success())

View file

@ -48,14 +48,20 @@ pub struct InMemoryBackend {
store: Mutex<HashMap<String, MemoryEntry>>, store: Mutex<HashMap<String, MemoryEntry>>,
} }
impl InMemoryBackend { impl Default for InMemoryBackend {
pub fn new() -> Self { fn default() -> Self {
Self { Self {
store: Mutex::new(HashMap::new()), store: Mutex::new(HashMap::new()),
} }
} }
} }
impl InMemoryBackend {
pub fn new() -> Self {
Self::default()
}
}
#[async_trait] #[async_trait]
impl Memory for InMemoryBackend { impl Memory for InMemoryBackend {
fn name(&self) -> &str { fn name(&self) -> &str {

View file

@ -92,13 +92,13 @@ mod tests {
sender: "user".into(), sender: "user".into(),
content: "hello".into(), content: "hello".into(),
channel: "cli".into(), channel: "cli".into(),
timestamp: 1234567890, timestamp: 1_234_567_890,
}; };
assert_eq!(msg.id, "test-id"); assert_eq!(msg.id, "test-id");
assert_eq!(msg.sender, "user"); assert_eq!(msg.sender, "user");
assert_eq!(msg.content, "hello"); assert_eq!(msg.content, "hello");
assert_eq!(msg.channel, "cli"); assert_eq!(msg.channel, "cli");
assert_eq!(msg.timestamp, 1234567890); assert_eq!(msg.timestamp, 1_234_567_890);
} }
#[test] #[test]

View file

@ -1013,9 +1013,9 @@ default_temperature = 0.7
#[test] #[test]
fn composio_config_partial_toml() { fn composio_config_partial_toml() {
let toml_str = r#" let toml_str = r"
enabled = true enabled = true
"#; ";
let parsed: ComposioConfig = toml::from_str(toml_str).unwrap(); let parsed: ComposioConfig = toml::from_str(toml_str).unwrap();
assert!(parsed.enabled); assert!(parsed.enabled);
assert!(parsed.api_key.is_none()); assert!(parsed.api_key.is_none());

View file

@ -326,6 +326,26 @@ async fn send_response(
stream.write_all(response.as_bytes()).await stream.write_all(response.as_bytes()).await
} }
async fn send_json(
stream: &mut tokio::net::TcpStream,
status: u16,
body: &serde_json::Value,
) -> std::io::Result<()> {
let reason = match status {
200 => "OK",
400 => "Bad Request",
404 => "Not Found",
500 => "Internal Server Error",
_ => "Unknown",
};
let json = serde_json::to_string(body).unwrap_or_default();
let response = format!(
"HTTP/1.1 {status} {reason}\r\nContent-Type: application/json\r\nContent-Length: {}\r\nConnection: close\r\n\r\n{json}",
json.len()
);
stream.write_all(response.as_bytes()).await
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@ -502,23 +522,3 @@ mod tests {
assert_eq!(extract_header("\r\n\r\n", "X-Webhook-Secret"), None); assert_eq!(extract_header("\r\n\r\n", "X-Webhook-Secret"), None);
} }
} }
async fn send_json(
stream: &mut tokio::net::TcpStream,
status: u16,
body: &serde_json::Value,
) -> std::io::Result<()> {
let reason = match status {
200 => "OK",
400 => "Bad Request",
404 => "Not Found",
500 => "Internal Server Error",
_ => "Unknown",
};
let json = serde_json::to_string(body).unwrap_or_default();
let response = format!(
"HTTP/1.1 {status} {reason}\r\nContent-Type: application/json\r\nContent-Length: {}\r\nConnection: close\r\n\r\n{json}",
json.len()
);
stream.write_all(response.as_bytes()).await
}

View file

@ -193,7 +193,11 @@ mod tests {
#[test] #[test]
fn parse_tasks_many_tasks() { fn parse_tasks_many_tasks() {
let content: String = (0..100).map(|i| format!("- Task {i}\n")).collect(); let content: String = (0..100).fold(String::new(), |mut s, i| {
use std::fmt::Write;
let _ = writeln!(s, "- Task {i}");
s
});
let tasks = HeartbeatEngine::parse_tasks(&content); let tasks = HeartbeatEngine::parse_tasks(&content);
assert_eq!(tasks.len(), 100); assert_eq!(tasks.len(), 100);
assert_eq!(tasks[99], "Task 99"); assert_eq!(tasks[99], "Task 99");

View file

@ -667,7 +667,7 @@ pub fn all_integrations() -> Vec<IntegrationEntry> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::config::schema::{ChannelsConfig, IMessageConfig, MatrixConfig, TelegramConfig}; use crate::config::schema::{IMessageConfig, MatrixConfig, TelegramConfig};
use crate::config::Config; use crate::config::Config;
#[test] #[test]
@ -685,7 +685,7 @@ mod tests {
let entries = all_integrations(); let entries = all_integrations();
for cat in IntegrationCategory::all() { for cat in IntegrationCategory::all() {
let count = entries.iter().filter(|e| e.category == *cat).count(); let count = entries.iter().filter(|e| e.category == *cat).count();
assert!(count > 0, "Category {:?} has no entries", cat); assert!(count > 0, "Category {cat:?} has no entries");
} }
} }

View file

@ -206,9 +206,14 @@ mod tests {
#[test] #[test]
fn respects_max_tokens() { fn respects_max_tokens() {
// Build multi-line text (one sentence per line) to exercise line-level splitting // Build multi-line text (one sentence per line) to exercise line-level splitting
let long_text: String = (0..200) let long_text: String = (0..200).fold(String::new(), |mut s, i| {
.map(|i| format!("This is sentence number {i} with some extra words to fill it up.\n")) use std::fmt::Write;
.collect(); let _ = writeln!(
s,
"This is sentence number {i} with some extra words to fill it up."
);
s
});
let chunks = chunk_markdown(&long_text, 50); // 50 tokens ≈ 200 chars let chunks = chunk_markdown(&long_text, 50); // 50 tokens ≈ 200 chars
assert!( assert!(
chunks.len() > 1, chunks.len() > 1,
@ -229,7 +234,8 @@ mod tests {
fn preserves_heading_in_split_sections() { fn preserves_heading_in_split_sections() {
let mut text = String::from("## Big Section\n"); let mut text = String::from("## Big Section\n");
for i in 0..100 { for i in 0..100 {
text.push_str(&format!("Line {i} with some content here.\n\n")); use std::fmt::Write;
let _ = write!(text, "Line {i} with some content here.\n\n");
} }
let chunks = chunk_markdown(&text, 50); let chunks = chunk_markdown(&text, 50);
assert!(chunks.len() > 1); assert!(chunks.len() > 1);
@ -355,7 +361,11 @@ mod tests {
fn no_content_loss() { fn no_content_loss() {
let text = "# A\nContent A line 1\nContent A line 2\n\n## B\nContent B\n\n## C\nContent C"; let text = "# A\nContent A line 1\nContent A line 2\n\n## B\nContent B\n\n## C\nContent C";
let chunks = chunk_markdown(text, 512); let chunks = chunk_markdown(text, 512);
let reassembled: String = chunks.iter().map(|c| format!("{}\n", c.content)).collect(); let reassembled: String = chunks.iter().fold(String::new(), |mut s, c| {
use std::fmt::Write;
let _ = writeln!(s, "{}", c.content);
s
});
// All original content words should appear // All original content words should appear
for word in ["Content", "line", "1", "2"] { for word in ["Content", "line", "1", "2"] {
assert!( assert!(

View file

@ -763,7 +763,7 @@ mod tests {
#[tokio::test] #[tokio::test]
async fn sqlite_category_roundtrip() { async fn sqlite_category_roundtrip() {
let (_tmp, mem) = temp_sqlite(); let (_tmp, mem) = temp_sqlite();
let categories = vec![ let categories = [
MemoryCategory::Core, MemoryCategory::Core,
MemoryCategory::Daily, MemoryCategory::Daily,
MemoryCategory::Conversation, MemoryCategory::Conversation,

View file

@ -132,6 +132,12 @@ pub fn hybrid_merge(
} }
#[cfg(test)] #[cfg(test)]
#[allow(
clippy::float_cmp,
clippy::approx_constant,
clippy::cast_precision_loss,
clippy::cast_possible_truncation
)]
mod tests { mod tests {
use super::*; use super::*;
@ -271,13 +277,15 @@ mod tests {
let b = vec![-1.0, 0.0]; let b = vec![-1.0, 0.0];
// Cosine = -1.0, clamped to 0.0 // Cosine = -1.0, clamped to 0.0
let sim = cosine_similarity(&a, &b); let sim = cosine_similarity(&a, &b);
assert_eq!(sim, 0.0); assert!(sim.abs() < f32::EPSILON);
} }
#[test] #[test]
fn cosine_high_dimensional() { fn cosine_high_dimensional() {
let a: Vec<f32> = (0..1536).map(|i| (i as f32) * 0.001).collect(); let a: Vec<f32> = (0..1536).map(|i| (f64::from(i) * 0.001) as f32).collect();
let b: Vec<f32> = (0..1536).map(|i| (i as f32) * 0.001 + 0.0001).collect(); let b: Vec<f32> = (0..1536)
.map(|i| (f64::from(i) * 0.001 + 0.0001) as f32)
.collect();
let sim = cosine_similarity(&a, &b); let sim = cosine_similarity(&a, &b);
assert!( assert!(
sim > 0.99, sim > 0.99,
@ -288,14 +296,14 @@ mod tests {
#[test] #[test]
fn cosine_single_element() { fn cosine_single_element() {
assert!((cosine_similarity(&[5.0], &[5.0]) - 1.0).abs() < 0.001); assert!((cosine_similarity(&[5.0], &[5.0]) - 1.0).abs() < 0.001);
assert_eq!(cosine_similarity(&[5.0], &[-5.0]), 0.0); assert!(cosine_similarity(&[5.0], &[-5.0]).abs() < f32::EPSILON);
} }
#[test] #[test]
fn cosine_both_zero_vectors() { fn cosine_both_zero_vectors() {
let a = vec![0.0, 0.0]; let a = vec![0.0, 0.0];
let b = vec![0.0, 0.0]; let b = vec![0.0, 0.0];
assert_eq!(cosine_similarity(&a, &b), 0.0); assert!(cosine_similarity(&a, &b).abs() < f32::EPSILON);
} }
// ── Edge cases: vec↔bytes serialization ────────────────────── // ── Edge cases: vec↔bytes serialization ──────────────────────
@ -306,7 +314,7 @@ mod tests {
let bytes = vec![0u8, 0, 0, 0, 0xFF]; let bytes = vec![0u8, 0, 0, 0, 0xFF];
let result = bytes_to_vec(&bytes); let result = bytes_to_vec(&bytes);
assert_eq!(result.len(), 1); assert_eq!(result.len(), 1);
assert_eq!(result[0], 0.0); assert!(result[0].abs() < f32::EPSILON);
} }
#[test] #[test]
@ -351,7 +359,7 @@ mod tests {
let merged = hybrid_merge(&vec_results, &kw_results, 0.0, 0.0, 10); let merged = hybrid_merge(&vec_results, &kw_results, 0.0, 0.0, 10);
// All final scores should be 0.0 // All final scores should be 0.0
for r in &merged { for r in &merged {
assert_eq!(r.final_score, 0.0); assert!(r.final_score.abs() < f32::EPSILON);
} }
} }

View file

@ -62,7 +62,9 @@ mod tests {
#[test] #[test]
fn factory_empty_string_falls_back_to_noop() { fn factory_empty_string_falls_back_to_noop() {
let cfg = ObservabilityConfig { backend: "".into() }; let cfg = ObservabilityConfig {
backend: String::new(),
};
assert_eq!(create_observer(&cfg).name(), "noop"); assert_eq!(create_observer(&cfg).name(), "noop");
} }

View file

@ -64,7 +64,9 @@ mod tests {
#[test] #[test]
fn factory_empty_falls_back() { fn factory_empty_falls_back() {
let cfg = RuntimeConfig { kind: "".into() }; let cfg = RuntimeConfig {
kind: String::new(),
};
let rt = create_runtime(&cfg); let rt = create_runtime(&cfg);
assert_eq!(rt.name(), "native"); assert_eq!(rt.name(), "native");
} }

View file

@ -329,6 +329,7 @@ pub fn handle_command(command: super::SkillCommands, workspace_dir: &Path) -> Re
} }
#[cfg(test)] #[cfg(test)]
#[allow(clippy::similar_names)]
mod tests { mod tests {
use super::*; use super::*;
use std::fs; use std::fs;

View file

@ -396,7 +396,7 @@ mod tests {
#[test] #[test]
fn composio_actions_response_missing_items_defaults() { fn composio_actions_response_missing_items_defaults() {
let json_str = r#"{}"#; let json_str = r"{}";
let resp: ComposioActionsResponse = serde_json::from_str(json_str).unwrap(); let resp: ComposioActionsResponse = serde_json::from_str(json_str).unwrap();
assert!(resp.items.is_empty()); assert!(resp.items.is_empty());
} }

View file

@ -129,7 +129,7 @@ mod tests {
CloudflareTunnelConfig, CustomTunnelConfig, NgrokTunnelConfig, TunnelConfig, CloudflareTunnelConfig, CustomTunnelConfig, NgrokTunnelConfig, TunnelConfig,
}; };
/// Helper: assert create_tunnel returns an error containing `needle`. /// Helper: assert `create_tunnel` returns an error containing `needle`.
fn assert_tunnel_err(cfg: &TunnelConfig, needle: &str) { fn assert_tunnel_err(cfg: &TunnelConfig, needle: &str) {
match create_tunnel(cfg) { match create_tunnel(cfg) {
Err(e) => assert!( Err(e) => assert!(