add Vercel and OpenRouter as OpenAI-compatible LLM providers

This commit is contained in:
Musa Malik 2026-04-20 00:23:23 +00:00
parent 9812540602
commit a72f558165
3 changed files with 97 additions and 4 deletions

View file

@ -191,6 +191,8 @@ properties:
- xiaomi
- gemini
- digitalocean
- vercel
- openrouter
routing_preferences:
type: array
items:
@ -240,6 +242,8 @@ properties:
- xiaomi
- gemini
- digitalocean
- vercel
- openrouter
routing_preferences:
type: array
items:

View file

@ -175,7 +175,9 @@ impl SupportedAPIsFromClient {
match self {
SupportedAPIsFromClient::AnthropicMessagesAPI(AnthropicApi::Messages) => {
match provider_id {
ProviderId::Anthropic => build_endpoint("/v1", "/messages"),
ProviderId::Anthropic | ProviderId::Vercel => {
build_endpoint("/v1", "/messages")
}
ProviderId::AmazonBedrock => {
if request_path.starts_with("/v1/") && !is_streaming {
build_endpoint("", &format!("/model/{}/converse", model_id))
@ -192,7 +194,9 @@ impl SupportedAPIsFromClient {
// For Responses API, check if provider supports it, otherwise translate to chat/completions
match provider_id {
// Providers that support /v1/responses natively
ProviderId::OpenAI | ProviderId::XAI => route_by_provider("/responses"),
ProviderId::OpenAI | ProviderId::XAI | ProviderId::Vercel => {
route_by_provider("/responses")
}
// All other providers: translate to /chat/completions
_ => route_by_provider("/chat/completions"),
}

View file

@ -45,6 +45,8 @@ pub enum ProviderId {
Qwen,
AmazonBedrock,
DigitalOcean,
Vercel,
OpenRouter,
}
impl TryFrom<&str> for ProviderId {
@ -75,6 +77,8 @@ impl TryFrom<&str> for ProviderId {
"digitalocean" => Ok(ProviderId::DigitalOcean),
"do" => Ok(ProviderId::DigitalOcean), // alias
"do_ai" => Ok(ProviderId::DigitalOcean), // alias
"vercel" => Ok(ProviderId::Vercel),
"openrouter" => Ok(ProviderId::OpenRouter),
_ => Err(format!("Unknown provider: {}", value)),
}
}
@ -137,6 +141,17 @@ impl ProviderId {
SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions)
}
// Vercel AI Gateway natively supports all three API types
(ProviderId::Vercel, SupportedAPIsFromClient::AnthropicMessagesAPI(_)) => {
SupportedUpstreamAPIs::AnthropicMessagesAPI(AnthropicApi::Messages)
}
(ProviderId::Vercel, SupportedAPIsFromClient::OpenAIChatCompletions(_)) => {
SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions)
}
(ProviderId::Vercel, SupportedAPIsFromClient::OpenAIResponsesAPI(_)) => {
SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses)
}
// OpenAI-compatible providers only support OpenAI chat completions
(
ProviderId::OpenAI
@ -154,7 +169,8 @@ impl ProviderId {
| ProviderId::Moonshotai
| ProviderId::Zhipu
| ProviderId::Qwen
| ProviderId::DigitalOcean,
| ProviderId::DigitalOcean
| ProviderId::OpenRouter,
SupportedAPIsFromClient::AnthropicMessagesAPI(_),
) => SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions),
@ -174,7 +190,8 @@ impl ProviderId {
| ProviderId::Moonshotai
| ProviderId::Zhipu
| ProviderId::Qwen
| ProviderId::DigitalOcean,
| ProviderId::DigitalOcean
| ProviderId::OpenRouter,
SupportedAPIsFromClient::OpenAIChatCompletions(_),
) => SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions),
@ -242,6 +259,8 @@ impl Display for ProviderId {
ProviderId::Qwen => write!(f, "qwen"),
ProviderId::AmazonBedrock => write!(f, "amazon_bedrock"),
ProviderId::DigitalOcean => write!(f, "digitalocean"),
ProviderId::Vercel => write!(f, "vercel"),
ProviderId::OpenRouter => write!(f, "openrouter"),
}
}
}
@ -344,6 +363,72 @@ mod tests {
);
}
#[test]
fn test_vercel_and_openrouter_parsing() {
assert_eq!(ProviderId::try_from("vercel"), Ok(ProviderId::Vercel));
assert!(ProviderId::try_from("vercel_ai").is_err());
assert_eq!(ProviderId::try_from("openrouter"), Ok(ProviderId::OpenRouter));
assert!(ProviderId::try_from("open_router").is_err());
}
#[test]
fn test_vercel_compatible_api() {
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};
let openai_client = SupportedAPIsFromClient::OpenAIChatCompletions(OpenAIApi::ChatCompletions);
let upstream = ProviderId::Vercel.compatible_api_for_client(&openai_client, false);
assert!(
matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)),
"Vercel should map OpenAI client to OpenAIChatCompletions upstream"
);
let anthropic_client = SupportedAPIsFromClient::AnthropicMessagesAPI(AnthropicApi::Messages);
let upstream = ProviderId::Vercel.compatible_api_for_client(&anthropic_client, false);
assert!(
matches!(upstream, SupportedUpstreamAPIs::AnthropicMessagesAPI(_)),
"Vercel should map Anthropic client to AnthropicMessagesAPI upstream natively"
);
let responses_client = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
let upstream = ProviderId::Vercel.compatible_api_for_client(&responses_client, false);
assert!(
matches!(upstream, SupportedUpstreamAPIs::OpenAIResponsesAPI(_)),
"Vercel should map Responses API client to OpenAIResponsesAPI upstream natively"
);
}
#[test]
fn test_openrouter_compatible_api() {
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};
let openai_client = SupportedAPIsFromClient::OpenAIChatCompletions(OpenAIApi::ChatCompletions);
let upstream = ProviderId::OpenRouter.compatible_api_for_client(&openai_client, false);
assert!(
matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)),
"OpenRouter should map OpenAI client to OpenAIChatCompletions upstream"
);
let anthropic_client = SupportedAPIsFromClient::AnthropicMessagesAPI(AnthropicApi::Messages);
let upstream = ProviderId::OpenRouter.compatible_api_for_client(&anthropic_client, false);
assert!(
matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)),
"OpenRouter should translate Anthropic client to OpenAIChatCompletions upstream"
);
let responses_client = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
let upstream = ProviderId::OpenRouter.compatible_api_for_client(&responses_client, false);
assert!(
matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)),
"OpenRouter should translate Responses API client to OpenAIChatCompletions upstream"
);
}
#[test]
fn test_vercel_and_openrouter_empty_models() {
assert!(ProviderId::Vercel.models().is_empty());
assert!(ProviderId::OpenRouter.models().is_empty());
}
#[test]
fn test_xai_uses_responses_api_for_responses_clients() {
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};