diff --git a/systems/x86_64-linux/mx/nextcloud-claude-bot/bot.py b/systems/x86_64-linux/mx/nextcloud-claude-bot/bot.py index 6962f5c..3788d2a 100644 --- a/systems/x86_64-linux/mx/nextcloud-claude-bot/bot.py +++ b/systems/x86_64-linux/mx/nextcloud-claude-bot/bot.py @@ -101,7 +101,8 @@ def verify_signature(body: bytes, signature: str, random: Optional[str] = None) return False -DEFAULT_SYSTEM_PROMPT = """Du bist Claude, ein KI-Assistent im Nextcloud Talk Chat. +BOT_SYSTEM_PROMPT = """\ +Du bist ein KI-Assistent im Nextcloud Talk Chat. Deine Antworten werden direkt in den Chatraum gepostet. Halte deine Antworten kurz und prägnant, da es ein Chat ist. Nutze Markdown für Formatierung wenn sinnvoll. @@ -111,18 +112,16 @@ Du erhältst: - : Die aktuelle Nachricht, auf die du antworten sollst""" -def build_prompt(conversation_token: str, current_message: str, current_user: str) -> str: - """Build prompt with in-memory conversation history using XML structure.""" - parts = [] - - # Add system prompt (hardcoded + optional custom) - parts.append("") - parts.append(DEFAULT_SYSTEM_PROMPT) +def build_system_prompt() -> str: + """Build the full system prompt from hardcoded + optional custom parts.""" if SYSTEM_PROMPT: - parts.append("") - parts.append(SYSTEM_PROMPT.strip()) - parts.append("") - parts.append("") + return f"{BOT_SYSTEM_PROMPT}\n\n{SYSTEM_PROMPT.strip()}" + return BOT_SYSTEM_PROMPT + + +def build_prompt(conversation_token: str, current_message: str, current_user: str) -> str: + """Build user prompt with in-memory conversation history using XML structure.""" + parts = [] # Add chat history if available history = conversations.get(conversation_token, []) @@ -143,9 +142,14 @@ def build_prompt(conversation_token: str, current_message: str, current_user: st async def call_claude(prompt: str) -> str: """Call Claude CLI and return response.""" - cmd = [CLAUDE_PATH, "--print"] + cmd = [ + CLAUDE_PATH, "--print", + "--tools", "WebSearch,WebFetch", + "--allowedTools", "WebSearch,WebFetch", + "--append-system-prompt", build_system_prompt(), + ] - log.info(f"Calling Claude: {' '.join(cmd)}") + log.info(f"Calling Claude: {cmd[0]} --print --append-system-prompt ...") try: proc = await asyncio.create_subprocess_exec(