feat(gateway): enrich webhook and WhatsApp with workspace system prompt
Add workspace context (IDENTITY.md, AGENTS.md, etc.) to gateway webhook and WhatsApp message handlers by using chat_with_system() with a build_system_prompt()-generated system prompt instead of simple_chat(). This aligns gateway behavior with other channels (Telegram, Discord, etc.) and the agent loop, which all pass system prompts via structured ChatMessage::system() or chat_with_system(). Changes: - handle_webhook: build system prompt and use chat_with_system() - handle_whatsapp_message: build system prompt and use chat_with_system() Risk: Low - uses existing build_system_prompt() function, no new dependencies Rollback: Revert commit removes system prompt enrichment
This commit is contained in:
parent
2016382f42
commit
2b8547b386
1 changed files with 40 additions and 3 deletions
|
|
@ -786,9 +786,28 @@ async fn handle_webhook(
|
|||
messages_count: 1,
|
||||
});
|
||||
|
||||
// Build system prompt with workspace context (IDENTITY.md, AGENTS.md, etc.)
|
||||
let system_prompt = {
|
||||
let config_guard = state.config.lock();
|
||||
crate::channels::build_system_prompt(
|
||||
&config_guard.workspace_dir,
|
||||
&state.model,
|
||||
&[], // tools - empty for simple chat
|
||||
&[], // skills
|
||||
Some(&config_guard.identity),
|
||||
None, // bootstrap_max_chars - use default
|
||||
)
|
||||
};
|
||||
|
||||
// Call the LLM with separate system prompt
|
||||
match state
|
||||
.provider
|
||||
.simple_chat(message, &state.model, state.temperature)
|
||||
.chat_with_system(
|
||||
Some(&system_prompt),
|
||||
message,
|
||||
&state.model,
|
||||
state.temperature,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(response) => {
|
||||
|
|
@ -990,10 +1009,28 @@ async fn handle_whatsapp_message(
|
|||
.await;
|
||||
}
|
||||
|
||||
// Call the LLM
|
||||
// Build system prompt with workspace context (IDENTITY.md, AGENTS.md, etc.)
|
||||
let system_prompt = {
|
||||
let config_guard = state.config.lock();
|
||||
crate::channels::build_system_prompt(
|
||||
&config_guard.workspace_dir,
|
||||
&state.model,
|
||||
&[], // tools - empty for simple chat
|
||||
&[], // skills
|
||||
Some(&config_guard.identity),
|
||||
None, // bootstrap_max_chars - use default
|
||||
)
|
||||
};
|
||||
|
||||
// Call the LLM with separate system prompt
|
||||
match state
|
||||
.provider
|
||||
.simple_chat(&msg.content, &state.model, state.temperature)
|
||||
.chat_with_system(
|
||||
Some(&system_prompt),
|
||||
&msg.content,
|
||||
&state.model,
|
||||
state.temperature,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(response) => {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue