feat(bot): switch to in-memory conversation history
- Replaced Nextcloud chat history fetching with in-memory storage for conversation history. - Added limits to history length based on an environment variable (`CONTEXT_MESSAGES`). - Simplified prompt-building logic by removing async history fetching.
This commit is contained in:
parent
b35373b0ec
commit
9342933987
1 changed files with 24 additions and 56 deletions
|
|
@ -49,8 +49,10 @@ log = logging.getLogger(__name__)
|
||||||
|
|
||||||
app = FastAPI(title="Nextcloud Claude Bot")
|
app = FastAPI(title="Nextcloud Claude Bot")
|
||||||
|
|
||||||
# Number of recent messages to fetch for context
|
# In-memory conversation history per conversation token
|
||||||
CONTEXT_MESSAGES = int(os.environ.get("CONTEXT_MESSAGES", "6"))
|
# Format: {token: [(user, message), ...]}
|
||||||
|
conversations: dict[str, list[tuple[str, str]]] = {}
|
||||||
|
MAX_HISTORY = int(os.environ.get("CONTEXT_MESSAGES", "6"))
|
||||||
|
|
||||||
|
|
||||||
def generate_bot_auth_headers(body: str = "") -> dict:
|
def generate_bot_auth_headers(body: str = "") -> dict:
|
||||||
|
|
@ -68,35 +70,6 @@ def generate_bot_auth_headers(body: str = "") -> dict:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async def fetch_chat_history(conversation_token: str, limit: int = CONTEXT_MESSAGES) -> list[dict]:
|
|
||||||
"""Fetch recent messages from Nextcloud Talk conversation."""
|
|
||||||
if not NEXTCLOUD_URL:
|
|
||||||
log.warning("NEXTCLOUD_URL not configured, cannot fetch history")
|
|
||||||
return []
|
|
||||||
|
|
||||||
url = f"{NEXTCLOUD_URL}/ocs/v2.php/apps/spreed/api/v1/chat/{conversation_token}"
|
|
||||||
params = {
|
|
||||||
"limit": limit,
|
|
||||||
"lookIntoFuture": 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
headers = generate_bot_auth_headers()
|
|
||||||
headers["Accept"] = "application/json"
|
|
||||||
|
|
||||||
async with httpx.AsyncClient() as client:
|
|
||||||
try:
|
|
||||||
resp = await client.get(url, params=params, headers=headers)
|
|
||||||
if resp.status_code == 200:
|
|
||||||
data = resp.json()
|
|
||||||
messages = data.get("ocs", {}).get("data", [])
|
|
||||||
# Messages come newest first, reverse for chronological order
|
|
||||||
return list(reversed(messages))
|
|
||||||
else:
|
|
||||||
log.warning(f"Failed to fetch chat history: {resp.status_code} {resp.text[:200]}")
|
|
||||||
return []
|
|
||||||
except Exception as e:
|
|
||||||
log.exception("Error fetching chat history")
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
def verify_signature(body: bytes, signature: str, random: Optional[str] = None) -> bool:
|
def verify_signature(body: bytes, signature: str, random: Optional[str] = None) -> bool:
|
||||||
|
|
@ -128,32 +101,17 @@ def verify_signature(body: bytes, signature: str, random: Optional[str] = None)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
async def build_prompt(conversation_token: str, current_message: str, current_user: str) -> str:
|
def build_prompt(conversation_token: str, current_message: str, current_user: str) -> str:
|
||||||
"""Build prompt with conversation history from Nextcloud."""
|
"""Build prompt with in-memory conversation history."""
|
||||||
parts = []
|
parts = []
|
||||||
|
|
||||||
if SYSTEM_PROMPT:
|
if SYSTEM_PROMPT:
|
||||||
parts.append(f"System: {SYSTEM_PROMPT}\n")
|
parts.append(f"System: {SYSTEM_PROMPT}\n")
|
||||||
|
|
||||||
# Fetch recent history from Nextcloud
|
# Add recent history from memory
|
||||||
history = await fetch_chat_history(conversation_token)
|
history = conversations.get(conversation_token, [])
|
||||||
|
for role, msg in history[-MAX_HISTORY:]:
|
||||||
# Add recent history (excluding the current message which triggered this)
|
parts.append(f"{role}: {msg}")
|
||||||
for msg in history:
|
|
||||||
actor_type = msg.get("actorType", "")
|
|
||||||
actor_id = msg.get("actorId", "")
|
|
||||||
message_text = msg.get("message", "")
|
|
||||||
msg_type = msg.get("messageType", "")
|
|
||||||
|
|
||||||
# Skip system messages
|
|
||||||
if msg_type == "system":
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Determine if this is a user or the bot
|
|
||||||
if actor_type == "bots":
|
|
||||||
parts.append(f"Assistant: {message_text}")
|
|
||||||
elif actor_type == "users":
|
|
||||||
parts.append(f"User ({actor_id}): {message_text}")
|
|
||||||
|
|
||||||
# Add current message
|
# Add current message
|
||||||
parts.append(f"User ({current_user}): {current_message}")
|
parts.append(f"User ({current_user}): {current_message}")
|
||||||
|
|
@ -331,17 +289,27 @@ Schreib mir einfach eine Nachricht und ich antworte dir.
|
||||||
**Befehle:**
|
**Befehle:**
|
||||||
• `/help` oder `/hilfe` – Diese Hilfe anzeigen
|
• `/help` oder `/hilfe` – Diese Hilfe anzeigen
|
||||||
|
|
||||||
Der Bot nutzt die letzten Nachrichten aus dem Chat als Kontext."""
|
Der Bot merkt sich die letzten Nachrichten pro Raum (bis zum Neustart)."""
|
||||||
await send_reply(conversation_token, help_text, reply_to=message_id)
|
await send_reply(conversation_token, help_text, reply_to=message_id)
|
||||||
return JSONResponse({"status": "ok", "action": "help"})
|
return JSONResponse({"status": "ok", "action": "help"})
|
||||||
|
|
||||||
# Build prompt with chat history and call Claude
|
# Build prompt with chat history and call Claude
|
||||||
prompt = await build_prompt(conversation_token, message_text, actor_id)
|
prompt = build_prompt(conversation_token, message_text, actor_id)
|
||||||
response = await call_claude(prompt)
|
response = await call_claude(prompt)
|
||||||
|
|
||||||
|
# Store in history
|
||||||
|
if conversation_token not in conversations:
|
||||||
|
conversations[conversation_token] = []
|
||||||
|
conversations[conversation_token].append((f"User ({actor_id})", message_text))
|
||||||
|
conversations[conversation_token].append(("Assistant", response))
|
||||||
|
|
||||||
|
# Trim history
|
||||||
|
if len(conversations[conversation_token]) > MAX_HISTORY * 2:
|
||||||
|
conversations[conversation_token] = conversations[conversation_token][-MAX_HISTORY * 2:]
|
||||||
|
|
||||||
# Send response
|
# Send response
|
||||||
await send_reply(conversation_token, response, reply_to=message_id)
|
await send_reply(conversation_token, response, reply_to=message_id)
|
||||||
|
|
||||||
return JSONResponse({"status": "ok"})
|
return JSONResponse({"status": "ok"})
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -353,7 +321,7 @@ async def health():
|
||||||
"nextcloud_url": NEXTCLOUD_URL,
|
"nextcloud_url": NEXTCLOUD_URL,
|
||||||
"claude_path": CLAUDE_PATH,
|
"claude_path": CLAUDE_PATH,
|
||||||
"allowed_users": ALLOWED_USERS if ALLOWED_USERS else "all",
|
"allowed_users": ALLOWED_USERS if ALLOWED_USERS else "all",
|
||||||
"context_messages": CONTEXT_MESSAGES,
|
"max_history": MAX_HISTORY,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue