add Vercel and OpenRouter provider support

This commit is contained in:
Spherrrical 2026-04-23 15:11:15 -07:00
parent d39d7ddd1c
commit f890eb648c
7 changed files with 157 additions and 4 deletions

View file

@ -28,6 +28,8 @@ SUPPORTED_PROVIDERS_WITHOUT_BASE_URL = [
"xai",
"moonshotai",
"zhipu",
"vercel",
"openrouter",
]
SUPPORTED_PROVIDERS = (

View file

@ -928,6 +928,60 @@ static_resources:
validation_context:
trusted_ca:
filename: {{ upstream_tls_ca_path | default('/etc/ssl/certs/ca-certificates.crt') }}
- name: vercel
connect_timeout: {{ upstream_connect_timeout | default('5s') }}
type: LOGICAL_DNS
dns_lookup_family: V4_ONLY
lb_policy: ROUND_ROBIN
load_assignment:
cluster_name: vercel
endpoints:
- lb_endpoints:
- endpoint:
address:
socket_address:
address: ai-gateway.vercel.sh
port_value: 443
hostname: "ai-gateway.vercel.sh"
transport_socket:
name: envoy.transport_sockets.tls
typed_config:
"@type": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.UpstreamTlsContext
sni: ai-gateway.vercel.sh
common_tls_context:
tls_params:
tls_minimum_protocol_version: TLSv1_2
tls_maximum_protocol_version: TLSv1_3
validation_context:
trusted_ca:
filename: {{ upstream_tls_ca_path | default('/etc/ssl/certs/ca-certificates.crt') }}
- name: openrouter
connect_timeout: {{ upstream_connect_timeout | default('5s') }}
type: LOGICAL_DNS
dns_lookup_family: V4_ONLY
lb_policy: ROUND_ROBIN
load_assignment:
cluster_name: openrouter
endpoints:
- lb_endpoints:
- endpoint:
address:
socket_address:
address: openrouter.ai
port_value: 443
hostname: "openrouter.ai"
transport_socket:
name: envoy.transport_sockets.tls
typed_config:
"@type": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.UpstreamTlsContext
sni: openrouter.ai
common_tls_context:
tls_params:
tls_minimum_protocol_version: TLSv1_2
tls_maximum_protocol_version: TLSv1_3
validation_context:
trusted_ca:
filename: {{ upstream_tls_ca_path | default('/etc/ssl/certs/ca-certificates.crt') }}
- name: mistral_7b_instruct
connect_timeout: 0.5s
type: STRICT_DNS

View file

@ -190,6 +190,8 @@ properties:
- openai
- xiaomi
- gemini
- vercel
- openrouter
routing_preferences:
type: array
items:
@ -238,6 +240,8 @@ properties:
- openai
- xiaomi
- gemini
- vercel
- openrouter
routing_preferences:
type: array
items:

View file

@ -391,6 +391,10 @@ pub enum LlmProviderType {
AmazonBedrock,
#[serde(rename = "plano")]
Plano,
#[serde(rename = "vercel")]
Vercel,
#[serde(rename = "openrouter")]
OpenRouter,
}
impl Display for LlmProviderType {
@ -412,6 +416,8 @@ impl Display for LlmProviderType {
LlmProviderType::Qwen => write!(f, "qwen"),
LlmProviderType::AmazonBedrock => write!(f, "amazon_bedrock"),
LlmProviderType::Plano => write!(f, "plano"),
LlmProviderType::Vercel => write!(f, "vercel"),
LlmProviderType::OpenRouter => write!(f, "openrouter"),
}
}
}

View file

@ -36,8 +36,10 @@ fn main() {
eprintln!("Error fetching models: {}", e);
eprintln!("\nMake sure required tools are set up:");
eprintln!(" AWS CLI configured for Bedrock (for Amazon models)");
eprintln!(" export OPENAI_API_KEY=your-key-here # Optional");
eprintln!(" export DEEPSEEK_API_KEY=your-key-here # Optional");
eprintln!(" export OPENAI_API_KEY=your-key-here # Optional");
eprintln!(" export DEEPSEEK_API_KEY=your-key-here # Optional");
eprintln!(" export VERCEL_AI_GATEWAY_KEY=your-key-here # Optional");
eprintln!(" export OPENROUTER_API_KEY=your-key-here # Optional");
eprintln!(" cargo run --bin fetch_models");
std::process::exit(1);
}
@ -322,6 +324,18 @@ fn fetch_all_models() -> Result<ProviderModels, Box<dyn std::error::Error>> {
"https://api.xiaomimimo.com/v1/models",
"xiaomi",
),
(
"vercel",
"VERCEL_AI_GATEWAY_KEY",
"https://ai-gateway.vercel.sh/v1/models",
"vercel",
),
(
"openrouter",
"OPENROUTER_API_KEY",
"https://openrouter.ai/api/v1/models",
"openrouter",
),
];
// Fetch from OpenAI-compatible providers

View file

@ -151,6 +151,13 @@ impl SupportedAPIsFromClient {
build_endpoint("/v1", endpoint_suffix)
}
}
ProviderId::OpenRouter => {
if request_path.starts_with("/v1/") {
build_endpoint("/api/v1", endpoint_suffix)
} else {
build_endpoint("/v1", endpoint_suffix)
}
}
ProviderId::AmazonBedrock => {
if request_path.starts_with("/v1/") {
if !is_streaming {
@ -703,6 +710,23 @@ mod tests {
);
}
#[test]
fn test_openrouter_endpoint() {
let api = SupportedAPIsFromClient::OpenAIChatCompletions(OpenAIApi::ChatCompletions);
assert_eq!(
api.target_endpoint_for_provider(
&ProviderId::OpenRouter,
"/v1/chat/completions",
"openai/gpt-4o",
false,
None,
false
),
"/api/v1/chat/completions"
);
}
#[test]
fn test_responses_api_targets_xai_native_responses_endpoint() {
let api = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);

View file

@ -44,6 +44,8 @@ pub enum ProviderId {
Zhipu,
Qwen,
AmazonBedrock,
Vercel,
OpenRouter,
}
impl TryFrom<&str> for ProviderId {
@ -71,6 +73,8 @@ impl TryFrom<&str> for ProviderId {
"qwen" => Ok(ProviderId::Qwen),
"amazon_bedrock" => Ok(ProviderId::AmazonBedrock),
"amazon" => Ok(ProviderId::AmazonBedrock), // alias
"vercel" => Ok(ProviderId::Vercel),
"openrouter" => Ok(ProviderId::OpenRouter),
_ => Err(format!("Unknown provider: {}", value)),
}
}
@ -95,6 +99,9 @@ impl ProviderId {
ProviderId::Moonshotai => "moonshotai",
ProviderId::Zhipu => "z-ai",
ProviderId::Qwen => "qwen",
// Vercel and OpenRouter are open-ended gateways; model lists are unbounded.
// Users configure these with wildcards (e.g. vercel/*); no static expansion needed.
ProviderId::Vercel | ProviderId::OpenRouter => return Vec::new(),
_ => return Vec::new(),
};
@ -148,7 +155,9 @@ impl ProviderId {
| ProviderId::Ollama
| ProviderId::Moonshotai
| ProviderId::Zhipu
| ProviderId::Qwen,
| ProviderId::Qwen
| ProviderId::Vercel
| ProviderId::OpenRouter,
SupportedAPIsFromClient::AnthropicMessagesAPI(_),
) => SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions),
@ -167,7 +176,9 @@ impl ProviderId {
| ProviderId::Ollama
| ProviderId::Moonshotai
| ProviderId::Zhipu
| ProviderId::Qwen,
| ProviderId::Qwen
| ProviderId::Vercel
| ProviderId::OpenRouter,
SupportedAPIsFromClient::OpenAIChatCompletions(_),
) => SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions),
@ -234,6 +245,8 @@ impl Display for ProviderId {
ProviderId::Zhipu => write!(f, "zhipu"),
ProviderId::Qwen => write!(f, "qwen"),
ProviderId::AmazonBedrock => write!(f, "amazon_bedrock"),
ProviderId::Vercel => write!(f, "vercel"),
ProviderId::OpenRouter => write!(f, "openrouter"),
}
}
}
@ -336,6 +349,42 @@ mod tests {
);
}
#[test]
fn test_vercel_openrouter_parsing() {
assert_eq!(ProviderId::try_from("vercel"), Ok(ProviderId::Vercel));
assert_eq!(
ProviderId::try_from("openrouter"),
Ok(ProviderId::OpenRouter)
);
}
#[test]
fn test_vercel_openrouter_models_empty() {
// Vercel and OpenRouter are open-ended gateways; users configure them with wildcards
// (e.g. vercel/*) so no static model list is maintained.
assert!(ProviderId::Vercel.models().is_empty());
assert!(ProviderId::OpenRouter.models().is_empty());
}
#[test]
fn test_vercel_openrouter_use_chat_completions() {
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};
let client_api = SupportedAPIsFromClient::OpenAIChatCompletions(OpenAIApi::ChatCompletions);
let vercel_upstream = ProviderId::Vercel.compatible_api_for_client(&client_api, false);
assert!(matches!(
vercel_upstream,
SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions)
));
let openrouter_upstream =
ProviderId::OpenRouter.compatible_api_for_client(&client_api, false);
assert!(matches!(
openrouter_upstream,
SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions)
));
}
#[test]
fn test_xai_uses_responses_api_for_responses_clients() {
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};