From 2b8547b386a6578e156b06075a6096e0b3c321c1 Mon Sep 17 00:00:00 2001 From: "T. Budiman" Date: Thu, 19 Feb 2026 09:50:57 +0700 Subject: [PATCH] feat(gateway): enrich webhook and WhatsApp with workspace system prompt Add workspace context (IDENTITY.md, AGENTS.md, etc.) to gateway webhook and WhatsApp message handlers by using chat_with_system() with a build_system_prompt()-generated system prompt instead of simple_chat(). This aligns gateway behavior with other channels (Telegram, Discord, etc.) and the agent loop, which all pass system prompts via structured ChatMessage::system() or chat_with_system(). Changes: - handle_webhook: build system prompt and use chat_with_system() - handle_whatsapp_message: build system prompt and use chat_with_system() Risk: Low - uses existing build_system_prompt() function, no new dependencies Rollback: Revert commit removes system prompt enrichment --- src/gateway/mod.rs | 43 ++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 40 insertions(+), 3 deletions(-) diff --git a/src/gateway/mod.rs b/src/gateway/mod.rs index 36293c2..94d405d 100644 --- a/src/gateway/mod.rs +++ b/src/gateway/mod.rs @@ -786,9 +786,28 @@ async fn handle_webhook( messages_count: 1, }); + // Build system prompt with workspace context (IDENTITY.md, AGENTS.md, etc.) + let system_prompt = { + let config_guard = state.config.lock(); + crate::channels::build_system_prompt( + &config_guard.workspace_dir, + &state.model, + &[], // tools - empty for simple chat + &[], // skills + Some(&config_guard.identity), + None, // bootstrap_max_chars - use default + ) + }; + + // Call the LLM with separate system prompt match state .provider - .simple_chat(message, &state.model, state.temperature) + .chat_with_system( + Some(&system_prompt), + message, + &state.model, + state.temperature, + ) .await { Ok(response) => { @@ -990,10 +1009,28 @@ async fn handle_whatsapp_message( .await; } - // Call the LLM + // Build system prompt with workspace context (IDENTITY.md, AGENTS.md, etc.) + let system_prompt = { + let config_guard = state.config.lock(); + crate::channels::build_system_prompt( + &config_guard.workspace_dir, + &state.model, + &[], // tools - empty for simple chat + &[], // skills + Some(&config_guard.identity), + None, // bootstrap_max_chars - use default + ) + }; + + // Call the LLM with separate system prompt match state .provider - .simple_chat(&msg.content, &state.model, state.temperature) + .chat_with_system( + Some(&system_prompt), + &msg.content, + &state.model, + state.temperature, + ) .await { Ok(response) => {