feat(providers): add warmup() for OpenAI, Anthropic, Gemini, Compatible, GLM
All five providers have HTTP clients but did not implement warmup(), relying on the trait default no-op. This adds lightweight warmup calls to establish TLS + HTTP/2 connection pools on startup, reducing first-request latency. Each warmup is skipped when credentials are absent, matching the OpenRouter pattern. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
a85a4a8194
commit
1336c2f03e
5 changed files with 114 additions and 0 deletions
|
|
@ -322,6 +322,18 @@ impl Provider for OpenAiProvider {
|
|||
fn supports_native_tools(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
async fn warmup(&self) -> anyhow::Result<()> {
|
||||
if let Some(credential) = self.credential.as_ref() {
|
||||
self.client
|
||||
.get("https://api.openai.com/v1/models")
|
||||
.header("Authorization", format!("Bearer {credential}"))
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
@ -437,4 +449,11 @@ mod tests {
|
|||
let resp: ChatResponse = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(resp.choices[0].message.content.len(), 100_000);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn warmup_without_key_is_noop() {
|
||||
let provider = OpenAiProvider::new(None);
|
||||
let result = provider.warmup().await;
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue