feat(gateway): enrich webhook and WhatsApp with workspace system prompt

Add workspace context (IDENTITY.md, AGENTS.md, etc.) to gateway webhook
and WhatsApp message handlers by using chat_with_system() with a
build_system_prompt()-generated system prompt instead of simple_chat().

This aligns gateway behavior with other channels (Telegram, Discord, etc.)
and the agent loop, which all pass system prompts via structured
ChatMessage::system() or chat_with_system().

Changes:
- handle_webhook: build system prompt and use chat_with_system()
- handle_whatsapp_message: build system prompt and use chat_with_system()

Risk: Low - uses existing build_system_prompt() function, no new dependencies
Rollback: Revert commit removes system prompt enrichment
This commit is contained in:
T. Budiman 2026-02-19 09:50:57 +07:00 committed by Chummy
parent 2016382f42
commit 2b8547b386

View file

@ -786,9 +786,28 @@ async fn handle_webhook(
messages_count: 1, messages_count: 1,
}); });
// Build system prompt with workspace context (IDENTITY.md, AGENTS.md, etc.)
let system_prompt = {
let config_guard = state.config.lock();
crate::channels::build_system_prompt(
&config_guard.workspace_dir,
&state.model,
&[], // tools - empty for simple chat
&[], // skills
Some(&config_guard.identity),
None, // bootstrap_max_chars - use default
)
};
// Call the LLM with separate system prompt
match state match state
.provider .provider
.simple_chat(message, &state.model, state.temperature) .chat_with_system(
Some(&system_prompt),
message,
&state.model,
state.temperature,
)
.await .await
{ {
Ok(response) => { Ok(response) => {
@ -990,10 +1009,28 @@ async fn handle_whatsapp_message(
.await; .await;
} }
// Call the LLM // Build system prompt with workspace context (IDENTITY.md, AGENTS.md, etc.)
let system_prompt = {
let config_guard = state.config.lock();
crate::channels::build_system_prompt(
&config_guard.workspace_dir,
&state.model,
&[], // tools - empty for simple chat
&[], // skills
Some(&config_guard.identity),
None, // bootstrap_max_chars - use default
)
};
// Call the LLM with separate system prompt
match state match state
.provider .provider
.simple_chat(&msg.content, &state.model, state.temperature) .chat_with_system(
Some(&system_prompt),
&msg.content,
&state.model,
state.temperature,
)
.await .await
{ {
Ok(response) => { Ok(response) => {