feat(providers): add warmup() for OpenAI, Anthropic, Gemini, Compatible, GLM

All five providers have HTTP clients but did not implement warmup(),
relying on the trait default no-op. This adds lightweight warmup calls
to establish TLS + HTTP/2 connection pools on startup, reducing
first-request latency. Each warmup is skipped when credentials are
absent, matching the OpenRouter pattern.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Edvard 2026-02-17 18:55:05 -05:00 committed by Chummy
parent a85a4a8194
commit 1336c2f03e
5 changed files with 114 additions and 0 deletions

View file

@ -500,6 +500,20 @@ impl Provider for AnthropicProvider {
fn supports_native_tools(&self) -> bool {
true
}
async fn warmup(&self) -> anyhow::Result<()> {
if let Some(credential) = self.credential.as_ref() {
let mut request = self
.client
.post(format!("{}/v1/messages", self.base_url))
.header("anthropic-version", "2023-06-01");
request = self.apply_auth(request, credential);
// Send a minimal request; the goal is TLS + HTTP/2 setup, not a valid response.
// Anthropic has no lightweight GET endpoint, so we accept any non-network error.
let _ = request.send().await?;
}
Ok(())
}
}
#[cfg(test)]
@ -1082,4 +1096,11 @@ mod tests {
assert!(!json.contains("cache_control"));
assert!(json.contains(r#""system":"System""#));
}
#[tokio::test]
async fn warmup_without_key_is_noop() {
let provider = AnthropicProvider::new(None);
let result = provider.warmup().await;
assert!(result.is_ok());
}
}

View file

@ -775,6 +775,20 @@ impl Provider for OpenAiCompatibleProvider {
})
.boxed()
}
async fn warmup(&self) -> anyhow::Result<()> {
if let Some(credential) = self.credential.as_ref() {
// Hit the chat completions URL with a GET to establish the connection pool.
// The server will likely return 405 Method Not Allowed, which is fine -
// the goal is TLS handshake and HTTP/2 negotiation.
let url = self.chat_completions_url();
let _ = self
.apply_auth_header(self.client.get(&url), credential)
.send()
.await?;
}
Ok(())
}
}
#[cfg(test)]
@ -1129,4 +1143,11 @@ mod tests {
"https://opencode.ai/zen/v1/chat/completions"
);
}
#[tokio::test]
async fn warmup_without_key_is_noop() {
let provider = make_provider("test", "https://example.com", None);
let result = provider.warmup().await;
assert!(result.is_ok());
}
}

View file

@ -396,6 +396,27 @@ impl Provider for GeminiProvider {
.and_then(|p| p.text)
.ok_or_else(|| anyhow::anyhow!("No response from Gemini"))
}
async fn warmup(&self) -> anyhow::Result<()> {
if let Some(auth) = self.auth.as_ref() {
let url = if auth.is_api_key() {
format!(
"https://generativelanguage.googleapis.com/v1beta/models?key={}",
auth.credential()
)
} else {
"https://generativelanguage.googleapis.com/v1beta/models".to_string()
};
let mut request = self.client.get(&url);
if let GeminiAuth::OAuthToken(token) = auth {
request = request.bearer_auth(token);
}
request.send().await?.error_for_status()?;
}
Ok(())
}
}
#[cfg(test)]
@ -665,4 +686,11 @@ mod tests {
assert!(response.error.is_some());
assert_eq!(response.error.unwrap().message, "Invalid API key");
}
#[tokio::test]
async fn warmup_without_key_is_noop() {
let provider = GeminiProvider::new(None);
let result = provider.warmup().await;
assert!(result.is_ok());
}
}

View file

@ -253,6 +253,24 @@ impl Provider for GlmProvider {
.map(|c| c.message.content)
.ok_or_else(|| anyhow::anyhow!("No response from GLM"))
}
async fn warmup(&self) -> anyhow::Result<()> {
if self.api_key_id.is_empty() || self.api_key_secret.is_empty() {
return Ok(());
}
// Generate and cache a JWT token, establishing TLS to the GLM API.
let token = self.generate_token()?;
let url = format!("{}/chat/completions", self.base_url);
// GET will likely return 405 but establishes the TLS + HTTP/2 connection pool.
let _ = self
.client
.get(&url)
.header("Authorization", format!("Bearer {token}"))
.send()
.await?;
Ok(())
}
}
#[cfg(test)]
@ -335,4 +353,11 @@ mod tests {
assert!(!encoded.contains('+'));
assert!(!encoded.contains('/'));
}
#[tokio::test]
async fn warmup_without_key_is_noop() {
let provider = GlmProvider::new(None);
let result = provider.warmup().await;
assert!(result.is_ok());
}
}

View file

@ -322,6 +322,18 @@ impl Provider for OpenAiProvider {
fn supports_native_tools(&self) -> bool {
true
}
async fn warmup(&self) -> anyhow::Result<()> {
if let Some(credential) = self.credential.as_ref() {
self.client
.get("https://api.openai.com/v1/models")
.header("Authorization", format!("Bearer {credential}"))
.send()
.await?
.error_for_status()?;
}
Ok(())
}
}
#[cfg(test)]
@ -437,4 +449,11 @@ mod tests {
let resp: ChatResponse = serde_json::from_str(&json).unwrap();
assert_eq!(resp.choices[0].message.content.len(), 100_000);
}
#[tokio::test]
async fn warmup_without_key_is_noop() {
let provider = OpenAiProvider::new(None);
let result = provider.warmup().await;
assert!(result.is_ok());
}
}