diff --git a/config/plano_config_schema.yaml b/config/plano_config_schema.yaml index 3439ebee..bdde05d4 100644 --- a/config/plano_config_schema.yaml +++ b/config/plano_config_schema.yaml @@ -191,6 +191,8 @@ properties: - xiaomi - gemini - digitalocean + - vercel + - openrouter routing_preferences: type: array items: @@ -240,6 +242,8 @@ properties: - xiaomi - gemini - digitalocean + - vercel + - openrouter routing_preferences: type: array items: diff --git a/crates/hermesllm/src/clients/endpoints.rs b/crates/hermesllm/src/clients/endpoints.rs index 39b34358..67a60def 100644 --- a/crates/hermesllm/src/clients/endpoints.rs +++ b/crates/hermesllm/src/clients/endpoints.rs @@ -175,7 +175,9 @@ impl SupportedAPIsFromClient { match self { SupportedAPIsFromClient::AnthropicMessagesAPI(AnthropicApi::Messages) => { match provider_id { - ProviderId::Anthropic => build_endpoint("/v1", "/messages"), + ProviderId::Anthropic | ProviderId::Vercel => { + build_endpoint("/v1", "/messages") + } ProviderId::AmazonBedrock => { if request_path.starts_with("/v1/") && !is_streaming { build_endpoint("", &format!("/model/{}/converse", model_id)) @@ -192,7 +194,9 @@ impl SupportedAPIsFromClient { // For Responses API, check if provider supports it, otherwise translate to chat/completions match provider_id { // Providers that support /v1/responses natively - ProviderId::OpenAI | ProviderId::XAI => route_by_provider("/responses"), + ProviderId::OpenAI | ProviderId::XAI | ProviderId::Vercel => { + route_by_provider("/responses") + } // All other providers: translate to /chat/completions _ => route_by_provider("/chat/completions"), } diff --git a/crates/hermesllm/src/providers/id.rs b/crates/hermesllm/src/providers/id.rs index ee0fcff3..8a47b8c5 100644 --- a/crates/hermesllm/src/providers/id.rs +++ b/crates/hermesllm/src/providers/id.rs @@ -45,6 +45,8 @@ pub enum ProviderId { Qwen, AmazonBedrock, DigitalOcean, + Vercel, + OpenRouter, } impl TryFrom<&str> for ProviderId { @@ -75,6 +77,8 @@ impl TryFrom<&str> for ProviderId { "digitalocean" => Ok(ProviderId::DigitalOcean), "do" => Ok(ProviderId::DigitalOcean), // alias "do_ai" => Ok(ProviderId::DigitalOcean), // alias + "vercel" => Ok(ProviderId::Vercel), + "openrouter" => Ok(ProviderId::OpenRouter), _ => Err(format!("Unknown provider: {}", value)), } } @@ -137,6 +141,17 @@ impl ProviderId { SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions) } + // Vercel AI Gateway natively supports all three API types + (ProviderId::Vercel, SupportedAPIsFromClient::AnthropicMessagesAPI(_)) => { + SupportedUpstreamAPIs::AnthropicMessagesAPI(AnthropicApi::Messages) + } + (ProviderId::Vercel, SupportedAPIsFromClient::OpenAIChatCompletions(_)) => { + SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions) + } + (ProviderId::Vercel, SupportedAPIsFromClient::OpenAIResponsesAPI(_)) => { + SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses) + } + // OpenAI-compatible providers only support OpenAI chat completions ( ProviderId::OpenAI @@ -154,7 +169,8 @@ impl ProviderId { | ProviderId::Moonshotai | ProviderId::Zhipu | ProviderId::Qwen - | ProviderId::DigitalOcean, + | ProviderId::DigitalOcean + | ProviderId::OpenRouter, SupportedAPIsFromClient::AnthropicMessagesAPI(_), ) => SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions), @@ -174,7 +190,8 @@ impl ProviderId { | ProviderId::Moonshotai | ProviderId::Zhipu | ProviderId::Qwen - | ProviderId::DigitalOcean, + | ProviderId::DigitalOcean + | ProviderId::OpenRouter, SupportedAPIsFromClient::OpenAIChatCompletions(_), ) => SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions), @@ -242,6 +259,8 @@ impl Display for ProviderId { ProviderId::Qwen => write!(f, "qwen"), ProviderId::AmazonBedrock => write!(f, "amazon_bedrock"), ProviderId::DigitalOcean => write!(f, "digitalocean"), + ProviderId::Vercel => write!(f, "vercel"), + ProviderId::OpenRouter => write!(f, "openrouter"), } } } @@ -344,6 +363,72 @@ mod tests { ); } + #[test] + fn test_vercel_and_openrouter_parsing() { + assert_eq!(ProviderId::try_from("vercel"), Ok(ProviderId::Vercel)); + assert!(ProviderId::try_from("vercel_ai").is_err()); + assert_eq!(ProviderId::try_from("openrouter"), Ok(ProviderId::OpenRouter)); + assert!(ProviderId::try_from("open_router").is_err()); + } + + #[test] + fn test_vercel_compatible_api() { + use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs}; + + let openai_client = SupportedAPIsFromClient::OpenAIChatCompletions(OpenAIApi::ChatCompletions); + let upstream = ProviderId::Vercel.compatible_api_for_client(&openai_client, false); + assert!( + matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)), + "Vercel should map OpenAI client to OpenAIChatCompletions upstream" + ); + + let anthropic_client = SupportedAPIsFromClient::AnthropicMessagesAPI(AnthropicApi::Messages); + let upstream = ProviderId::Vercel.compatible_api_for_client(&anthropic_client, false); + assert!( + matches!(upstream, SupportedUpstreamAPIs::AnthropicMessagesAPI(_)), + "Vercel should map Anthropic client to AnthropicMessagesAPI upstream natively" + ); + + let responses_client = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses); + let upstream = ProviderId::Vercel.compatible_api_for_client(&responses_client, false); + assert!( + matches!(upstream, SupportedUpstreamAPIs::OpenAIResponsesAPI(_)), + "Vercel should map Responses API client to OpenAIResponsesAPI upstream natively" + ); + } + + #[test] + fn test_openrouter_compatible_api() { + use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs}; + + let openai_client = SupportedAPIsFromClient::OpenAIChatCompletions(OpenAIApi::ChatCompletions); + let upstream = ProviderId::OpenRouter.compatible_api_for_client(&openai_client, false); + assert!( + matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)), + "OpenRouter should map OpenAI client to OpenAIChatCompletions upstream" + ); + + let anthropic_client = SupportedAPIsFromClient::AnthropicMessagesAPI(AnthropicApi::Messages); + let upstream = ProviderId::OpenRouter.compatible_api_for_client(&anthropic_client, false); + assert!( + matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)), + "OpenRouter should translate Anthropic client to OpenAIChatCompletions upstream" + ); + + let responses_client = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses); + let upstream = ProviderId::OpenRouter.compatible_api_for_client(&responses_client, false); + assert!( + matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)), + "OpenRouter should translate Responses API client to OpenAIChatCompletions upstream" + ); + } + + #[test] + fn test_vercel_and_openrouter_empty_models() { + assert!(ProviderId::Vercel.models().is_empty()); + assert!(ProviderId::OpenRouter.models().is_empty()); + } + #[test] fn test_xai_uses_responses_api_for_responses_clients() { use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};