diff --git a/cli/planoai/config_generator.py b/cli/planoai/config_generator.py index 5eaae3c6..b372810d 100644 --- a/cli/planoai/config_generator.py +++ b/cli/planoai/config_generator.py @@ -31,6 +31,8 @@ SUPPORTED_PROVIDERS_WITHOUT_BASE_URL = [ "zhipu", "chatgpt", "digitalocean", + "vercel", + "openrouter", ] CHATGPT_API_BASE = "https://chatgpt.com/backend-api/codex" diff --git a/cli/planoai/defaults.py b/cli/planoai/defaults.py index 110d0f3b..1d9468ff 100644 --- a/cli/planoai/defaults.py +++ b/cli/planoai/defaults.py @@ -81,6 +81,21 @@ PROVIDER_DEFAULTS: list[ProviderDefault] = [ base_url="https://inference.do-ai.run/v1", model_pattern="digitalocean/*", ), + ProviderDefault( + name="vercel", + env_var="AI_GATEWAY_API_KEY", + base_url="https://ai-gateway.vercel.sh/v1", + model_pattern="vercel/*", + ), + # OpenRouter is a first-class provider — the `openrouter/` model prefix is + # accepted by the schema and brightstaff's ProviderId parser, so no + # provider_interface override is needed. + ProviderDefault( + name="openrouter", + env_var="OPENROUTER_API_KEY", + base_url="https://openrouter.ai/api/v1", + model_pattern="openrouter/*", + ), ] diff --git a/cli/test/test_config_generator.py b/cli/test/test_config_generator.py index 17fa56cc..3aec2390 100644 --- a/cli/test/test_config_generator.py +++ b/cli/test/test_config_generator.py @@ -253,6 +253,42 @@ llm_providers: base_url: "http://custom.com/api/v2" provider_interface: openai +""", + }, + { + "id": "vercel_is_supported_provider", + "expected_error": None, + "plano_config": """ +version: v0.4.0 + +listeners: + - name: llm + type: model + port: 12000 + +model_providers: + - model: vercel/* + base_url: https://ai-gateway.vercel.sh/v1 + passthrough_auth: true + +""", + }, + { + "id": "openrouter_is_supported_provider", + "expected_error": None, + "plano_config": """ +version: v0.4.0 + +listeners: + - name: llm + type: model + port: 12000 + +model_providers: + - model: openrouter/* + base_url: https://openrouter.ai/api/v1 + passthrough_auth: true + """, }, { diff --git a/cli/test/test_defaults.py b/cli/test/test_defaults.py index bb16a573..7017a70c 100644 --- a/cli/test/test_defaults.py +++ b/cli/test/test_defaults.py @@ -28,6 +28,8 @@ def test_zero_env_vars_produces_pure_passthrough(): # All known providers should be listed. names = {p["name"] for p in cfg["model_providers"]} assert "digitalocean" in names + assert "vercel" in names + assert "openrouter" in names assert "openai" in names assert "anthropic" in names @@ -84,3 +86,26 @@ def test_provider_defaults_digitalocean_is_configured(): assert by_name["digitalocean"].env_var == "DO_API_KEY" assert by_name["digitalocean"].base_url == "https://inference.do-ai.run/v1" assert by_name["digitalocean"].model_pattern == "digitalocean/*" + + +def test_provider_defaults_vercel_is_configured(): + by_name = {p.name: p for p in PROVIDER_DEFAULTS} + assert "vercel" in by_name + assert by_name["vercel"].env_var == "AI_GATEWAY_API_KEY" + assert by_name["vercel"].base_url == "https://ai-gateway.vercel.sh/v1" + assert by_name["vercel"].model_pattern == "vercel/*" + + +def test_provider_defaults_openrouter_is_configured(): + by_name = {p.name: p for p in PROVIDER_DEFAULTS} + assert "openrouter" in by_name + assert by_name["openrouter"].env_var == "OPENROUTER_API_KEY" + assert by_name["openrouter"].base_url == "https://openrouter.ai/api/v1" + assert by_name["openrouter"].model_pattern == "openrouter/*" + + +def test_openrouter_env_key_promotes_to_env_keyed(): + cfg = synthesize_default_config(env={"OPENROUTER_API_KEY": "or-1"}) + by_name = {p["name"]: p for p in cfg["model_providers"]} + assert by_name["openrouter"].get("access_key") == "$OPENROUTER_API_KEY" + assert by_name["openrouter"].get("passthrough_auth") is None