Add Vercel and OpenRouter to zero-config planoai up defaults

Wires `vercel/*` and `openrouter/*` into the synthesized default config so
`planoai up` with no user config exposes both providers out of the box
(env-keyed via AI_GATEWAY_API_KEY / OPENROUTER_API_KEY, pass-through
otherwise). Registers both in SUPPORTED_PROVIDERS_WITHOUT_BASE_URL so
wildcard model entries validate without an explicit provider_interface.
This commit is contained in:
Spherrrical 2026-04-23 15:46:22 -07:00
parent 5cdfab2bf9
commit eb6ccd2689
4 changed files with 78 additions and 0 deletions

View file

@ -31,6 +31,8 @@ SUPPORTED_PROVIDERS_WITHOUT_BASE_URL = [
"zhipu",
"chatgpt",
"digitalocean",
"vercel",
"openrouter",
]
CHATGPT_API_BASE = "https://chatgpt.com/backend-api/codex"

View file

@ -81,6 +81,21 @@ PROVIDER_DEFAULTS: list[ProviderDefault] = [
base_url="https://inference.do-ai.run/v1",
model_pattern="digitalocean/*",
),
ProviderDefault(
name="vercel",
env_var="AI_GATEWAY_API_KEY",
base_url="https://ai-gateway.vercel.sh/v1",
model_pattern="vercel/*",
),
# OpenRouter is a first-class provider — the `openrouter/` model prefix is
# accepted by the schema and brightstaff's ProviderId parser, so no
# provider_interface override is needed.
ProviderDefault(
name="openrouter",
env_var="OPENROUTER_API_KEY",
base_url="https://openrouter.ai/api/v1",
model_pattern="openrouter/*",
),
]

View file

@ -253,6 +253,42 @@ llm_providers:
base_url: "http://custom.com/api/v2"
provider_interface: openai
""",
},
{
"id": "vercel_is_supported_provider",
"expected_error": None,
"plano_config": """
version: v0.4.0
listeners:
- name: llm
type: model
port: 12000
model_providers:
- model: vercel/*
base_url: https://ai-gateway.vercel.sh/v1
passthrough_auth: true
""",
},
{
"id": "openrouter_is_supported_provider",
"expected_error": None,
"plano_config": """
version: v0.4.0
listeners:
- name: llm
type: model
port: 12000
model_providers:
- model: openrouter/*
base_url: https://openrouter.ai/api/v1
passthrough_auth: true
""",
},
{

View file

@ -28,6 +28,8 @@ def test_zero_env_vars_produces_pure_passthrough():
# All known providers should be listed.
names = {p["name"] for p in cfg["model_providers"]}
assert "digitalocean" in names
assert "vercel" in names
assert "openrouter" in names
assert "openai" in names
assert "anthropic" in names
@ -84,3 +86,26 @@ def test_provider_defaults_digitalocean_is_configured():
assert by_name["digitalocean"].env_var == "DO_API_KEY"
assert by_name["digitalocean"].base_url == "https://inference.do-ai.run/v1"
assert by_name["digitalocean"].model_pattern == "digitalocean/*"
def test_provider_defaults_vercel_is_configured():
by_name = {p.name: p for p in PROVIDER_DEFAULTS}
assert "vercel" in by_name
assert by_name["vercel"].env_var == "AI_GATEWAY_API_KEY"
assert by_name["vercel"].base_url == "https://ai-gateway.vercel.sh/v1"
assert by_name["vercel"].model_pattern == "vercel/*"
def test_provider_defaults_openrouter_is_configured():
by_name = {p.name: p for p in PROVIDER_DEFAULTS}
assert "openrouter" in by_name
assert by_name["openrouter"].env_var == "OPENROUTER_API_KEY"
assert by_name["openrouter"].base_url == "https://openrouter.ai/api/v1"
assert by_name["openrouter"].model_pattern == "openrouter/*"
def test_openrouter_env_key_promotes_to_env_keyed():
cfg = synthesize_default_config(env={"OPENROUTER_API_KEY": "or-1"})
by_name = {p["name"]: p for p in cfg["model_providers"]}
assert by_name["openrouter"].get("access_key") == "$OPENROUTER_API_KEY"
assert by_name["openrouter"].get("passthrough_auth") is None