plano/demos/llm_routing/codex_router/config.yaml

39 lines
1 KiB
YAML
Raw Normal View History

version: v0.3.0
listeners:
- type: model
name: model_listener
port: 12000
model_providers:
# OpenAI Models
- model: openai/gpt-5-2025-08-07
default: true
access_key: $OPENAI_API_KEY
routing_preferences:
- name: code generation
description: generating new code snippets, functions, or boilerplate based on user prompts or requirements
- model: openai/gpt-4.1-2025-04-14
access_key: $OPENAI_API_KEY
routing_preferences:
- name: code understanding
description: understand and explain existing code snippets, functions, or libraries
# Anthropic Model
- model: anthropic/claude-sonnet-4-6
access_key: $ANTHROPIC_API_KEY
# Ollama Model (optional local fallback)
- model: ollama/llama3.1
base_url: http://host.docker.internal:11434
# Model aliases for Codex sessions
model_aliases:
# Default model Codex should request when launched by planoai cli-agent codex
arch.codex.default:
target: gpt-5-2025-08-07
tracing:
random_sampling: 100