feat(providers): add Vercel AI Gateway and OpenRouter support (#902)
Some checks are pending
CI / pre-commit (push) Waiting to run
CI / plano-tools-tests (push) Waiting to run
CI / native-smoke-test (push) Waiting to run
CI / docker-build (push) Waiting to run
CI / validate-config (push) Waiting to run
CI / security-scan (push) Blocked by required conditions
CI / test-prompt-gateway (push) Blocked by required conditions
CI / test-model-alias-routing (push) Blocked by required conditions
CI / test-responses-api-with-state (push) Blocked by required conditions
CI / e2e-plano-tests (3.10) (push) Blocked by required conditions
CI / e2e-plano-tests (3.11) (push) Blocked by required conditions
CI / e2e-plano-tests (3.12) (push) Blocked by required conditions
CI / e2e-plano-tests (3.13) (push) Blocked by required conditions
CI / e2e-plano-tests (3.14) (push) Blocked by required conditions
CI / e2e-demo-preference (push) Blocked by required conditions
CI / e2e-demo-currency (push) Blocked by required conditions
Publish docker image (latest) / build-arm64 (push) Waiting to run
Publish docker image (latest) / build-amd64 (push) Waiting to run
Publish docker image (latest) / create-manifest (push) Blocked by required conditions
Build and Deploy Documentation / build (push) Waiting to run

* add Vercel and OpenRouter as OpenAI-compatible LLM providers

* fix(fmt): fix cargo fmt line length issues in provider id tests

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>

* style(hermesllm): fix rustfmt formatting in provider id tests

* Add Vercel and OpenRouter to zero-config planoai up defaults

Wires `vercel/*` and `openrouter/*` into the synthesized default config so
`planoai up` with no user config exposes both providers out of the box
(env-keyed via AI_GATEWAY_API_KEY / OPENROUTER_API_KEY, pass-through
otherwise). Registers both in SUPPORTED_PROVIDERS_WITHOUT_BASE_URL so
wildcard model entries validate without an explicit provider_interface.

---------

Co-authored-by: Musa Malik <musam@uw.edu>
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Musa 2026-04-23 15:54:39 -07:00 committed by GitHub
parent 78dc4edad9
commit b81eb7266c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 229 additions and 8 deletions

View file

@ -31,6 +31,8 @@ SUPPORTED_PROVIDERS_WITHOUT_BASE_URL = [
"zhipu", "zhipu",
"chatgpt", "chatgpt",
"digitalocean", "digitalocean",
"vercel",
"openrouter",
] ]
CHATGPT_API_BASE = "https://chatgpt.com/backend-api/codex" CHATGPT_API_BASE = "https://chatgpt.com/backend-api/codex"

View file

@ -81,6 +81,21 @@ PROVIDER_DEFAULTS: list[ProviderDefault] = [
base_url="https://inference.do-ai.run/v1", base_url="https://inference.do-ai.run/v1",
model_pattern="digitalocean/*", model_pattern="digitalocean/*",
), ),
ProviderDefault(
name="vercel",
env_var="AI_GATEWAY_API_KEY",
base_url="https://ai-gateway.vercel.sh/v1",
model_pattern="vercel/*",
),
# OpenRouter is a first-class provider — the `openrouter/` model prefix is
# accepted by the schema and brightstaff's ProviderId parser, so no
# provider_interface override is needed.
ProviderDefault(
name="openrouter",
env_var="OPENROUTER_API_KEY",
base_url="https://openrouter.ai/api/v1",
model_pattern="openrouter/*",
),
] ]

View file

@ -253,6 +253,42 @@ llm_providers:
base_url: "http://custom.com/api/v2" base_url: "http://custom.com/api/v2"
provider_interface: openai provider_interface: openai
""",
},
{
"id": "vercel_is_supported_provider",
"expected_error": None,
"plano_config": """
version: v0.4.0
listeners:
- name: llm
type: model
port: 12000
model_providers:
- model: vercel/*
base_url: https://ai-gateway.vercel.sh/v1
passthrough_auth: true
""",
},
{
"id": "openrouter_is_supported_provider",
"expected_error": None,
"plano_config": """
version: v0.4.0
listeners:
- name: llm
type: model
port: 12000
model_providers:
- model: openrouter/*
base_url: https://openrouter.ai/api/v1
passthrough_auth: true
""", """,
}, },
{ {

View file

@ -28,6 +28,8 @@ def test_zero_env_vars_produces_pure_passthrough():
# All known providers should be listed. # All known providers should be listed.
names = {p["name"] for p in cfg["model_providers"]} names = {p["name"] for p in cfg["model_providers"]}
assert "digitalocean" in names assert "digitalocean" in names
assert "vercel" in names
assert "openrouter" in names
assert "openai" in names assert "openai" in names
assert "anthropic" in names assert "anthropic" in names
@ -84,3 +86,26 @@ def test_provider_defaults_digitalocean_is_configured():
assert by_name["digitalocean"].env_var == "DO_API_KEY" assert by_name["digitalocean"].env_var == "DO_API_KEY"
assert by_name["digitalocean"].base_url == "https://inference.do-ai.run/v1" assert by_name["digitalocean"].base_url == "https://inference.do-ai.run/v1"
assert by_name["digitalocean"].model_pattern == "digitalocean/*" assert by_name["digitalocean"].model_pattern == "digitalocean/*"
def test_provider_defaults_vercel_is_configured():
by_name = {p.name: p for p in PROVIDER_DEFAULTS}
assert "vercel" in by_name
assert by_name["vercel"].env_var == "AI_GATEWAY_API_KEY"
assert by_name["vercel"].base_url == "https://ai-gateway.vercel.sh/v1"
assert by_name["vercel"].model_pattern == "vercel/*"
def test_provider_defaults_openrouter_is_configured():
by_name = {p.name: p for p in PROVIDER_DEFAULTS}
assert "openrouter" in by_name
assert by_name["openrouter"].env_var == "OPENROUTER_API_KEY"
assert by_name["openrouter"].base_url == "https://openrouter.ai/api/v1"
assert by_name["openrouter"].model_pattern == "openrouter/*"
def test_openrouter_env_key_promotes_to_env_keyed():
cfg = synthesize_default_config(env={"OPENROUTER_API_KEY": "or-1"})
by_name = {p["name"]: p for p in cfg["model_providers"]}
assert by_name["openrouter"].get("access_key") == "$OPENROUTER_API_KEY"
assert by_name["openrouter"].get("passthrough_auth") is None

View file

@ -192,6 +192,8 @@ properties:
- gemini - gemini
- chatgpt - chatgpt
- digitalocean - digitalocean
- vercel
- openrouter
headers: headers:
type: object type: object
additionalProperties: additionalProperties:
@ -247,6 +249,8 @@ properties:
- gemini - gemini
- chatgpt - chatgpt
- digitalocean - digitalocean
- vercel
- openrouter
headers: headers:
type: object type: object
additionalProperties: additionalProperties:

View file

@ -175,7 +175,9 @@ impl SupportedAPIsFromClient {
match self { match self {
SupportedAPIsFromClient::AnthropicMessagesAPI(AnthropicApi::Messages) => { SupportedAPIsFromClient::AnthropicMessagesAPI(AnthropicApi::Messages) => {
match provider_id { match provider_id {
ProviderId::Anthropic => build_endpoint("/v1", "/messages"), ProviderId::Anthropic | ProviderId::Vercel => {
build_endpoint("/v1", "/messages")
}
ProviderId::AmazonBedrock => { ProviderId::AmazonBedrock => {
if request_path.starts_with("/v1/") && !is_streaming { if request_path.starts_with("/v1/") && !is_streaming {
build_endpoint("", &format!("/model/{}/converse", model_id)) build_endpoint("", &format!("/model/{}/converse", model_id))
@ -192,9 +194,10 @@ impl SupportedAPIsFromClient {
// For Responses API, check if provider supports it, otherwise translate to chat/completions // For Responses API, check if provider supports it, otherwise translate to chat/completions
match provider_id { match provider_id {
// Providers that support /v1/responses natively // Providers that support /v1/responses natively
ProviderId::OpenAI | ProviderId::XAI | ProviderId::ChatGPT => { ProviderId::OpenAI
route_by_provider("/responses") | ProviderId::XAI
} | ProviderId::ChatGPT
| ProviderId::Vercel => route_by_provider("/responses"),
// All other providers: translate to /chat/completions // All other providers: translate to /chat/completions
_ => route_by_provider("/chat/completions"), _ => route_by_provider("/chat/completions"),
} }
@ -720,4 +723,36 @@ mod tests {
"/v1/responses" "/v1/responses"
); );
} }
#[test]
fn test_responses_api_targets_chatgpt_native_responses_endpoint() {
let api = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
assert_eq!(
api.target_endpoint_for_provider(
&ProviderId::ChatGPT,
"/v1/responses",
"gpt-5.4",
false,
None,
false
),
"/v1/responses"
);
}
#[test]
fn test_responses_api_targets_vercel_native_responses_endpoint() {
let api = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
assert_eq!(
api.target_endpoint_for_provider(
&ProviderId::Vercel,
"/v1/responses",
"gpt-5.4",
false,
None,
false
),
"/v1/responses"
);
}
} }

View file

@ -46,6 +46,8 @@ pub enum ProviderId {
AmazonBedrock, AmazonBedrock,
ChatGPT, ChatGPT,
DigitalOcean, DigitalOcean,
Vercel,
OpenRouter,
} }
impl TryFrom<&str> for ProviderId { impl TryFrom<&str> for ProviderId {
@ -77,6 +79,8 @@ impl TryFrom<&str> for ProviderId {
"digitalocean" => Ok(ProviderId::DigitalOcean), "digitalocean" => Ok(ProviderId::DigitalOcean),
"do" => Ok(ProviderId::DigitalOcean), // alias "do" => Ok(ProviderId::DigitalOcean), // alias
"do_ai" => Ok(ProviderId::DigitalOcean), // alias "do_ai" => Ok(ProviderId::DigitalOcean), // alias
"vercel" => Ok(ProviderId::Vercel),
"openrouter" => Ok(ProviderId::OpenRouter),
_ => Err(format!("Unknown provider: {}", value)), _ => Err(format!("Unknown provider: {}", value)),
} }
} }
@ -140,6 +144,17 @@ impl ProviderId {
SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions) SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions)
} }
// Vercel AI Gateway natively supports all three API types
(ProviderId::Vercel, SupportedAPIsFromClient::AnthropicMessagesAPI(_)) => {
SupportedUpstreamAPIs::AnthropicMessagesAPI(AnthropicApi::Messages)
}
(ProviderId::Vercel, SupportedAPIsFromClient::OpenAIChatCompletions(_)) => {
SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions)
}
(ProviderId::Vercel, SupportedAPIsFromClient::OpenAIResponsesAPI(_)) => {
SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses)
}
// OpenAI-compatible providers only support OpenAI chat completions // OpenAI-compatible providers only support OpenAI chat completions
( (
ProviderId::OpenAI ProviderId::OpenAI
@ -157,8 +172,9 @@ impl ProviderId {
| ProviderId::Moonshotai | ProviderId::Moonshotai
| ProviderId::Zhipu | ProviderId::Zhipu
| ProviderId::Qwen | ProviderId::Qwen
| ProviderId::ChatGPT | ProviderId::DigitalOcean
| ProviderId::DigitalOcean, | ProviderId::OpenRouter
| ProviderId::ChatGPT,
SupportedAPIsFromClient::AnthropicMessagesAPI(_), SupportedAPIsFromClient::AnthropicMessagesAPI(_),
) => SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions), ) => SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions),
@ -178,8 +194,9 @@ impl ProviderId {
| ProviderId::Moonshotai | ProviderId::Moonshotai
| ProviderId::Zhipu | ProviderId::Zhipu
| ProviderId::Qwen | ProviderId::Qwen
| ProviderId::ChatGPT | ProviderId::DigitalOcean
| ProviderId::DigitalOcean, | ProviderId::OpenRouter
| ProviderId::ChatGPT,
SupportedAPIsFromClient::OpenAIChatCompletions(_), SupportedAPIsFromClient::OpenAIChatCompletions(_),
) => SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions), ) => SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions),
@ -248,6 +265,8 @@ impl Display for ProviderId {
ProviderId::AmazonBedrock => write!(f, "amazon_bedrock"), ProviderId::AmazonBedrock => write!(f, "amazon_bedrock"),
ProviderId::ChatGPT => write!(f, "chatgpt"), ProviderId::ChatGPT => write!(f, "chatgpt"),
ProviderId::DigitalOcean => write!(f, "digitalocean"), ProviderId::DigitalOcean => write!(f, "digitalocean"),
ProviderId::Vercel => write!(f, "vercel"),
ProviderId::OpenRouter => write!(f, "openrouter"),
} }
} }
} }
@ -350,6 +369,79 @@ mod tests {
); );
} }
#[test]
fn test_vercel_and_openrouter_parsing() {
assert_eq!(ProviderId::try_from("vercel"), Ok(ProviderId::Vercel));
assert!(ProviderId::try_from("vercel_ai").is_err());
assert_eq!(
ProviderId::try_from("openrouter"),
Ok(ProviderId::OpenRouter)
);
assert!(ProviderId::try_from("open_router").is_err());
}
#[test]
fn test_vercel_compatible_api() {
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};
let openai_client =
SupportedAPIsFromClient::OpenAIChatCompletions(OpenAIApi::ChatCompletions);
let upstream = ProviderId::Vercel.compatible_api_for_client(&openai_client, false);
assert!(
matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)),
"Vercel should map OpenAI client to OpenAIChatCompletions upstream"
);
let anthropic_client =
SupportedAPIsFromClient::AnthropicMessagesAPI(AnthropicApi::Messages);
let upstream = ProviderId::Vercel.compatible_api_for_client(&anthropic_client, false);
assert!(
matches!(upstream, SupportedUpstreamAPIs::AnthropicMessagesAPI(_)),
"Vercel should map Anthropic client to AnthropicMessagesAPI upstream natively"
);
let responses_client = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
let upstream = ProviderId::Vercel.compatible_api_for_client(&responses_client, false);
assert!(
matches!(upstream, SupportedUpstreamAPIs::OpenAIResponsesAPI(_)),
"Vercel should map Responses API client to OpenAIResponsesAPI upstream natively"
);
}
#[test]
fn test_openrouter_compatible_api() {
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};
let openai_client =
SupportedAPIsFromClient::OpenAIChatCompletions(OpenAIApi::ChatCompletions);
let upstream = ProviderId::OpenRouter.compatible_api_for_client(&openai_client, false);
assert!(
matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)),
"OpenRouter should map OpenAI client to OpenAIChatCompletions upstream"
);
let anthropic_client =
SupportedAPIsFromClient::AnthropicMessagesAPI(AnthropicApi::Messages);
let upstream = ProviderId::OpenRouter.compatible_api_for_client(&anthropic_client, false);
assert!(
matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)),
"OpenRouter should translate Anthropic client to OpenAIChatCompletions upstream"
);
let responses_client = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
let upstream = ProviderId::OpenRouter.compatible_api_for_client(&responses_client, false);
assert!(
matches!(upstream, SupportedUpstreamAPIs::OpenAIChatCompletions(_)),
"OpenRouter should translate Responses API client to OpenAIChatCompletions upstream"
);
}
#[test]
fn test_vercel_and_openrouter_empty_models() {
assert!(ProviderId::Vercel.models().is_empty());
assert!(ProviderId::OpenRouter.models().is_empty());
}
#[test] #[test]
fn test_xai_uses_responses_api_for_responses_clients() { fn test_xai_uses_responses_api_for_responses_clients() {
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs}; use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};
@ -361,4 +453,16 @@ mod tests {
SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses) SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses)
)); ));
} }
#[test]
fn test_chatgpt_uses_responses_api_for_responses_clients() {
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};
let client_api = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
let upstream = ProviderId::ChatGPT.compatible_api_for_client(&client_api, false);
assert!(matches!(
upstream,
SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses)
));
}
} }