mirror of
https://github.com/katanemo/plano.git
synced 2026-05-10 08:12:48 +02:00
feat(providers): add Vercel AI Gateway and OpenRouter support (#902)
Some checks are pending
CI / pre-commit (push) Waiting to run
CI / plano-tools-tests (push) Waiting to run
CI / native-smoke-test (push) Waiting to run
CI / docker-build (push) Waiting to run
CI / validate-config (push) Waiting to run
CI / security-scan (push) Blocked by required conditions
CI / test-prompt-gateway (push) Blocked by required conditions
CI / test-model-alias-routing (push) Blocked by required conditions
CI / test-responses-api-with-state (push) Blocked by required conditions
CI / e2e-plano-tests (3.10) (push) Blocked by required conditions
CI / e2e-plano-tests (3.11) (push) Blocked by required conditions
CI / e2e-plano-tests (3.12) (push) Blocked by required conditions
CI / e2e-plano-tests (3.13) (push) Blocked by required conditions
CI / e2e-plano-tests (3.14) (push) Blocked by required conditions
CI / e2e-demo-preference (push) Blocked by required conditions
CI / e2e-demo-currency (push) Blocked by required conditions
Publish docker image (latest) / build-arm64 (push) Waiting to run
Publish docker image (latest) / build-amd64 (push) Waiting to run
Publish docker image (latest) / create-manifest (push) Blocked by required conditions
Build and Deploy Documentation / build (push) Waiting to run
Some checks are pending
CI / pre-commit (push) Waiting to run
CI / plano-tools-tests (push) Waiting to run
CI / native-smoke-test (push) Waiting to run
CI / docker-build (push) Waiting to run
CI / validate-config (push) Waiting to run
CI / security-scan (push) Blocked by required conditions
CI / test-prompt-gateway (push) Blocked by required conditions
CI / test-model-alias-routing (push) Blocked by required conditions
CI / test-responses-api-with-state (push) Blocked by required conditions
CI / e2e-plano-tests (3.10) (push) Blocked by required conditions
CI / e2e-plano-tests (3.11) (push) Blocked by required conditions
CI / e2e-plano-tests (3.12) (push) Blocked by required conditions
CI / e2e-plano-tests (3.13) (push) Blocked by required conditions
CI / e2e-plano-tests (3.14) (push) Blocked by required conditions
CI / e2e-demo-preference (push) Blocked by required conditions
CI / e2e-demo-currency (push) Blocked by required conditions
Publish docker image (latest) / build-arm64 (push) Waiting to run
Publish docker image (latest) / build-amd64 (push) Waiting to run
Publish docker image (latest) / create-manifest (push) Blocked by required conditions
Build and Deploy Documentation / build (push) Waiting to run
* add Vercel and OpenRouter as OpenAI-compatible LLM providers * fix(fmt): fix cargo fmt line length issues in provider id tests Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com> * style(hermesllm): fix rustfmt formatting in provider id tests * Add Vercel and OpenRouter to zero-config planoai up defaults Wires `vercel/*` and `openrouter/*` into the synthesized default config so `planoai up` with no user config exposes both providers out of the box (env-keyed via AI_GATEWAY_API_KEY / OPENROUTER_API_KEY, pass-through otherwise). Registers both in SUPPORTED_PROVIDERS_WITHOUT_BASE_URL so wildcard model entries validate without an explicit provider_interface. --------- Co-authored-by: Musa Malik <musam@uw.edu> Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
78dc4edad9
commit
b81eb7266c
7 changed files with 229 additions and 8 deletions
|
|
@ -175,7 +175,9 @@ impl SupportedAPIsFromClient {
|
|||
match self {
|
||||
SupportedAPIsFromClient::AnthropicMessagesAPI(AnthropicApi::Messages) => {
|
||||
match provider_id {
|
||||
ProviderId::Anthropic => build_endpoint("/v1", "/messages"),
|
||||
ProviderId::Anthropic | ProviderId::Vercel => {
|
||||
build_endpoint("/v1", "/messages")
|
||||
}
|
||||
ProviderId::AmazonBedrock => {
|
||||
if request_path.starts_with("/v1/") && !is_streaming {
|
||||
build_endpoint("", &format!("/model/{}/converse", model_id))
|
||||
|
|
@ -192,9 +194,10 @@ impl SupportedAPIsFromClient {
|
|||
// For Responses API, check if provider supports it, otherwise translate to chat/completions
|
||||
match provider_id {
|
||||
// Providers that support /v1/responses natively
|
||||
ProviderId::OpenAI | ProviderId::XAI | ProviderId::ChatGPT => {
|
||||
route_by_provider("/responses")
|
||||
}
|
||||
ProviderId::OpenAI
|
||||
| ProviderId::XAI
|
||||
| ProviderId::ChatGPT
|
||||
| ProviderId::Vercel => route_by_provider("/responses"),
|
||||
// All other providers: translate to /chat/completions
|
||||
_ => route_by_provider("/chat/completions"),
|
||||
}
|
||||
|
|
@ -720,4 +723,36 @@ mod tests {
|
|||
"/v1/responses"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_responses_api_targets_chatgpt_native_responses_endpoint() {
|
||||
let api = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
|
||||
assert_eq!(
|
||||
api.target_endpoint_for_provider(
|
||||
&ProviderId::ChatGPT,
|
||||
"/v1/responses",
|
||||
"gpt-5.4",
|
||||
false,
|
||||
None,
|
||||
false
|
||||
),
|
||||
"/v1/responses"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_responses_api_targets_vercel_native_responses_endpoint() {
|
||||
let api = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
|
||||
assert_eq!(
|
||||
api.target_endpoint_for_provider(
|
||||
&ProviderId::Vercel,
|
||||
"/v1/responses",
|
||||
"gpt-5.4",
|
||||
false,
|
||||
None,
|
||||
false
|
||||
),
|
||||
"/v1/responses"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -46,6 +46,8 @@ pub enum ProviderId {
|
|||
AmazonBedrock,
|
||||
ChatGPT,
|
||||
DigitalOcean,
|
||||
Vercel,
|
||||
OpenRouter,
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for ProviderId {
|
||||
|
|
@ -77,6 +79,8 @@ impl TryFrom<&str> for ProviderId {
|
|||
"digitalocean" => Ok(ProviderId::DigitalOcean),
|
||||
"do" => Ok(ProviderId::DigitalOcean), // alias
|
||||
"do_ai" => Ok(ProviderId::DigitalOcean), // alias
|
||||
"vercel" => Ok(ProviderId::Vercel),
|
||||
"openrouter" => Ok(ProviderId::OpenRouter),
|
||||
_ => Err(format!("Unknown provider: {}", value)),
|
||||
}
|
||||
}
|
||||
|
|
@ -140,6 +144,17 @@ impl ProviderId {
|
|||
SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions)
|
||||
}
|
||||
|
||||
// Vercel AI Gateway natively supports all three API types
|
||||
(ProviderId::Vercel, SupportedAPIsFromClient::AnthropicMessagesAPI(_)) => {
|
||||
SupportedUpstreamAPIs::AnthropicMessagesAPI(AnthropicApi::Messages)
|
||||
}
|
||||
(ProviderId::Vercel, SupportedAPIsFromClient::OpenAIChatCompletions(_)) => {
|
||||
SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions)
|
||||
}
|
||||
(ProviderId::Vercel, SupportedAPIsFromClient::OpenAIResponsesAPI(_)) => {
|
||||
SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses)
|
||||
}
|
||||
|
||||
// OpenAI-compatible providers only support OpenAI chat completions
|
||||
(
|
||||
ProviderId::OpenAI
|
||||
|
|
@ -157,8 +172,9 @@ impl ProviderId {
|
|||
| ProviderId::Moonshotai
|
||||
| ProviderId::Zhipu
|
||||
| ProviderId::Qwen
|
||||
| ProviderId::ChatGPT
|
||||
| ProviderId::DigitalOcean,
|
||||
| ProviderId::DigitalOcean
|
||||
| ProviderId::OpenRouter
|
||||
| ProviderId::ChatGPT,
|
||||
SupportedAPIsFromClient::AnthropicMessagesAPI(_),
|
||||
) => SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions),
|
||||
|
||||
|
|
@ -178,8 +194,9 @@ impl ProviderId {
|
|||
| ProviderId::Moonshotai
|
||||
| ProviderId::Zhipu
|
||||
| ProviderId::Qwen
|
||||
| ProviderId::ChatGPT
|
||||
| ProviderId::DigitalOcean,
|
||||
| ProviderId::DigitalOcean
|
||||
| ProviderId::OpenRouter
|
||||
| ProviderId::ChatGPT,
|
||||
SupportedAPIsFromClient::OpenAIChatCompletions(_),
|
||||
) => SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions),
|
||||
|
||||
|
|
@ -248,6 +265,8 @@ impl Display for ProviderId {
|
|||
ProviderId::AmazonBedrock => write!(f, "amazon_bedrock"),
|
||||
ProviderId::ChatGPT => write!(f, "chatgpt"),
|
||||
ProviderId::DigitalOcean => write!(f, "digitalocean"),
|
||||
ProviderId::Vercel => write!(f, "vercel"),
|
||||
ProviderId::OpenRouter => write!(f, "openrouter"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -350,6 +369,79 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_vercel_and_openrouter_parsing() {
|
||||
assert_eq!(ProviderId::try_from("vercel"), Ok(ProviderId::Vercel));
|
||||
assert!(ProviderId::try_from("vercel_ai").is_err());
|
||||
assert_eq!(
|
||||
ProviderId::try_from("openrouter"),
|
||||
Ok(ProviderId::OpenRouter)
|
||||
);
|
||||
assert!(ProviderId::try_from("open_router").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_vercel_compatible_api() {
|
||||
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};
|
||||
|
||||
let openai_client =
|
||||
SupportedAPIsFromClient::OpenAIChatCompletions(OpenAIApi::ChatCompletions);
|
||||
let upstream = ProviderId::Vercel.compatible_api_for_client(&openai_client, false);
|
||||
assert!(
|
||||
matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)),
|
||||
"Vercel should map OpenAI client to OpenAIChatCompletions upstream"
|
||||
);
|
||||
|
||||
let anthropic_client =
|
||||
SupportedAPIsFromClient::AnthropicMessagesAPI(AnthropicApi::Messages);
|
||||
let upstream = ProviderId::Vercel.compatible_api_for_client(&anthropic_client, false);
|
||||
assert!(
|
||||
matches!(upstream, SupportedUpstreamAPIs::AnthropicMessagesAPI(_)),
|
||||
"Vercel should map Anthropic client to AnthropicMessagesAPI upstream natively"
|
||||
);
|
||||
|
||||
let responses_client = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
|
||||
let upstream = ProviderId::Vercel.compatible_api_for_client(&responses_client, false);
|
||||
assert!(
|
||||
matches!(upstream, SupportedUpstreamAPIs::OpenAIResponsesAPI(_)),
|
||||
"Vercel should map Responses API client to OpenAIResponsesAPI upstream natively"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_openrouter_compatible_api() {
|
||||
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};
|
||||
|
||||
let openai_client =
|
||||
SupportedAPIsFromClient::OpenAIChatCompletions(OpenAIApi::ChatCompletions);
|
||||
let upstream = ProviderId::OpenRouter.compatible_api_for_client(&openai_client, false);
|
||||
assert!(
|
||||
matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)),
|
||||
"OpenRouter should map OpenAI client to OpenAIChatCompletions upstream"
|
||||
);
|
||||
|
||||
let anthropic_client =
|
||||
SupportedAPIsFromClient::AnthropicMessagesAPI(AnthropicApi::Messages);
|
||||
let upstream = ProviderId::OpenRouter.compatible_api_for_client(&anthropic_client, false);
|
||||
assert!(
|
||||
matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)),
|
||||
"OpenRouter should translate Anthropic client to OpenAIChatCompletions upstream"
|
||||
);
|
||||
|
||||
let responses_client = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
|
||||
let upstream = ProviderId::OpenRouter.compatible_api_for_client(&responses_client, false);
|
||||
assert!(
|
||||
matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)),
|
||||
"OpenRouter should translate Responses API client to OpenAIChatCompletions upstream"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_vercel_and_openrouter_empty_models() {
|
||||
assert!(ProviderId::Vercel.models().is_empty());
|
||||
assert!(ProviderId::OpenRouter.models().is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_xai_uses_responses_api_for_responses_clients() {
|
||||
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};
|
||||
|
|
@ -361,4 +453,16 @@ mod tests {
|
|||
SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses)
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_chatgpt_uses_responses_api_for_responses_clients() {
|
||||
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};
|
||||
|
||||
let client_api = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
|
||||
let upstream = ProviderId::ChatGPT.compatible_api_for_client(&client_api, false);
|
||||
assert!(matches!(
|
||||
upstream,
|
||||
SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses)
|
||||
));
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue