fix(provider): implement chat_with_history for OpenAI Codex and Gemini
Both providers only implemented chat_with_system, so the default chat_with_history trait method was discarding all conversation history except the last user message. This caused the Telegram bot to lose context between messages. Changes: - OpenAiCodexProvider: extract send_responses_request helper, add chat_with_history that maps full ChatMessage history to ResponsesInput - GeminiProvider: extract send_generate_content helper, add chat_with_history that maps ChatMessage history to Gemini Content (with assistant→model role mapping) Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
6eec888ff0
commit
1b57be7223
2 changed files with 149 additions and 37 deletions
|
|
@ -3,7 +3,7 @@
|
|||
//! - Gemini CLI OAuth tokens (reuse existing ~/.gemini/ authentication)
|
||||
//! - Google Cloud ADC (`GOOGLE_APPLICATION_CREDENTIALS`)
|
||||
|
||||
use crate::providers::traits::Provider;
|
||||
use crate::providers::traits::{ChatMessage, Provider};
|
||||
use async_trait::async_trait;
|
||||
use directories::UserDirs;
|
||||
use reqwest::Client;
|
||||
|
|
@ -326,12 +326,11 @@ impl GeminiProvider {
|
|||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Provider for GeminiProvider {
|
||||
async fn chat_with_system(
|
||||
impl GeminiProvider {
|
||||
async fn send_generate_content(
|
||||
&self,
|
||||
system_prompt: Option<&str>,
|
||||
message: &str,
|
||||
contents: Vec<Content>,
|
||||
system_instruction: Option<Content>,
|
||||
model: &str,
|
||||
temperature: f64,
|
||||
) -> anyhow::Result<String> {
|
||||
|
|
@ -345,21 +344,8 @@ impl Provider for GeminiProvider {
|
|||
)
|
||||
})?;
|
||||
|
||||
// Build request
|
||||
let system_instruction = system_prompt.map(|sys| Content {
|
||||
role: None,
|
||||
parts: vec![Part {
|
||||
text: sys.to_string(),
|
||||
}],
|
||||
});
|
||||
|
||||
let request = GenerateContentRequest {
|
||||
contents: vec![Content {
|
||||
role: Some("user".to_string()),
|
||||
parts: vec![Part {
|
||||
text: message.to_string(),
|
||||
}],
|
||||
}],
|
||||
contents,
|
||||
system_instruction,
|
||||
generation_config: GenerationConfig {
|
||||
temperature,
|
||||
|
|
@ -382,12 +368,10 @@ impl Provider for GeminiProvider {
|
|||
|
||||
let result: GenerateContentResponse = response.json().await?;
|
||||
|
||||
// Check for API error in response body
|
||||
if let Some(err) = result.error {
|
||||
anyhow::bail!("Gemini API error: {}", err.message);
|
||||
}
|
||||
|
||||
// Extract text from response
|
||||
result
|
||||
.candidates
|
||||
.and_then(|c| c.into_iter().next())
|
||||
|
|
@ -395,6 +379,84 @@ impl Provider for GeminiProvider {
|
|||
.and_then(|p| p.text)
|
||||
.ok_or_else(|| anyhow::anyhow!("No response from Gemini"))
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Provider for GeminiProvider {
|
||||
async fn chat_with_system(
|
||||
&self,
|
||||
system_prompt: Option<&str>,
|
||||
message: &str,
|
||||
model: &str,
|
||||
temperature: f64,
|
||||
) -> anyhow::Result<String> {
|
||||
let system_instruction = system_prompt.map(|sys| Content {
|
||||
role: None,
|
||||
parts: vec![Part {
|
||||
text: sys.to_string(),
|
||||
}],
|
||||
});
|
||||
|
||||
let contents = vec![Content {
|
||||
role: Some("user".to_string()),
|
||||
parts: vec![Part {
|
||||
text: message.to_string(),
|
||||
}],
|
||||
}];
|
||||
|
||||
self.send_generate_content(contents, system_instruction, model, temperature)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn chat_with_history(
|
||||
&self,
|
||||
messages: &[ChatMessage],
|
||||
model: &str,
|
||||
temperature: f64,
|
||||
) -> anyhow::Result<String> {
|
||||
let mut system_parts: Vec<&str> = Vec::new();
|
||||
let mut contents: Vec<Content> = Vec::new();
|
||||
|
||||
for msg in messages {
|
||||
match msg.role.as_str() {
|
||||
"system" => {
|
||||
system_parts.push(&msg.content);
|
||||
}
|
||||
"user" => {
|
||||
contents.push(Content {
|
||||
role: Some("user".to_string()),
|
||||
parts: vec![Part {
|
||||
text: msg.content.clone(),
|
||||
}],
|
||||
});
|
||||
}
|
||||
"assistant" => {
|
||||
// Gemini API uses "model" role instead of "assistant"
|
||||
contents.push(Content {
|
||||
role: Some("model".to_string()),
|
||||
parts: vec![Part {
|
||||
text: msg.content.clone(),
|
||||
}],
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let system_instruction = if system_parts.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(Content {
|
||||
role: None,
|
||||
parts: vec![Part {
|
||||
text: system_parts.join("\n\n"),
|
||||
}],
|
||||
})
|
||||
};
|
||||
|
||||
self.send_generate_content(contents, system_instruction, model, temperature)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn warmup(&self) -> anyhow::Result<()> {
|
||||
if let Some(auth) = self.auth.as_ref() {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
use crate::auth::openai_oauth::extract_account_id_from_jwt;
|
||||
use crate::auth::AuthService;
|
||||
use crate::providers::traits::Provider;
|
||||
use crate::providers::traits::{ChatMessage, Provider};
|
||||
use crate::providers::ProviderRuntimeOptions;
|
||||
use async_trait::async_trait;
|
||||
use reqwest::Client;
|
||||
|
|
@ -335,14 +335,12 @@ async fn decode_responses_body(response: reqwest::Response) -> anyhow::Result<St
|
|||
extract_responses_text(&parsed).ok_or_else(|| anyhow::anyhow!("No response from OpenAI Codex"))
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Provider for OpenAiCodexProvider {
|
||||
async fn chat_with_system(
|
||||
impl OpenAiCodexProvider {
|
||||
async fn send_responses_request(
|
||||
&self,
|
||||
system_prompt: Option<&str>,
|
||||
message: &str,
|
||||
input: Vec<ResponsesInput>,
|
||||
instructions: String,
|
||||
model: &str,
|
||||
_temperature: f64,
|
||||
) -> anyhow::Result<String> {
|
||||
let profile = self
|
||||
.auth
|
||||
|
|
@ -368,14 +366,8 @@ impl Provider for OpenAiCodexProvider {
|
|||
|
||||
let request = ResponsesRequest {
|
||||
model: normalized_model.to_string(),
|
||||
input: vec![ResponsesInput {
|
||||
role: "user".to_string(),
|
||||
content: vec![ResponsesInputContent {
|
||||
kind: "input_text".to_string(),
|
||||
text: message.to_string(),
|
||||
}],
|
||||
}],
|
||||
instructions: resolve_instructions(system_prompt),
|
||||
input,
|
||||
instructions,
|
||||
store: false,
|
||||
stream: true,
|
||||
text: ResponsesTextOptions {
|
||||
|
|
@ -411,6 +403,64 @@ impl Provider for OpenAiCodexProvider {
|
|||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Provider for OpenAiCodexProvider {
|
||||
async fn chat_with_system(
|
||||
&self,
|
||||
system_prompt: Option<&str>,
|
||||
message: &str,
|
||||
model: &str,
|
||||
_temperature: f64,
|
||||
) -> anyhow::Result<String> {
|
||||
let input = vec![ResponsesInput {
|
||||
role: "user".to_string(),
|
||||
content: vec![ResponsesInputContent {
|
||||
kind: "input_text".to_string(),
|
||||
text: message.to_string(),
|
||||
}],
|
||||
}];
|
||||
self.send_responses_request(input, resolve_instructions(system_prompt), model)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn chat_with_history(
|
||||
&self,
|
||||
messages: &[ChatMessage],
|
||||
model: &str,
|
||||
_temperature: f64,
|
||||
) -> anyhow::Result<String> {
|
||||
let mut system_parts: Vec<&str> = Vec::new();
|
||||
let mut input: Vec<ResponsesInput> = Vec::new();
|
||||
|
||||
for msg in messages {
|
||||
match msg.role.as_str() {
|
||||
"system" => {
|
||||
system_parts.push(&msg.content);
|
||||
}
|
||||
"user" | "assistant" => {
|
||||
input.push(ResponsesInput {
|
||||
role: msg.role.clone(),
|
||||
content: vec![ResponsesInputContent {
|
||||
kind: "input_text".to_string(),
|
||||
text: msg.content.clone(),
|
||||
}],
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let instructions = if system_parts.is_empty() {
|
||||
DEFAULT_CODEX_INSTRUCTIONS.to_string()
|
||||
} else {
|
||||
system_parts.join("\n\n")
|
||||
};
|
||||
|
||||
self.send_responses_request(input, instructions, model)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue