2026-01-05 17:16:31 +01:00
|
|
|
# Sample NOMYO Router Configuration
|
|
|
|
|
|
|
|
|
|
# Basic single endpoint configuration
|
|
|
|
|
endpoints:
|
|
|
|
|
- http://localhost:11434
|
|
|
|
|
|
|
|
|
|
max_concurrent_connections: 2
|
|
|
|
|
|
2026-01-14 09:28:02 +01:00
|
|
|
# Optional router-level API key to secure the router and dashboard (leave blank to disable)
|
|
|
|
|
nomyo-router-api-key: ""
|
|
|
|
|
|
2026-01-05 17:16:31 +01:00
|
|
|
# Multi-endpoint configuration with local Ollama instances
|
|
|
|
|
# endpoints:
|
|
|
|
|
# - http://ollama-worker1:11434
|
|
|
|
|
# - http://ollama-worker2:11434
|
|
|
|
|
# - http://ollama-worker3:11434
|
|
|
|
|
|
|
|
|
|
# Mixed configuration with Ollama and OpenAI endpoints
|
|
|
|
|
# endpoints:
|
|
|
|
|
# - http://localhost:11434
|
|
|
|
|
# - https://api.openai.com/v1
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# API keys for remote endpoints
|
|
|
|
|
# Use ${VAR_NAME} syntax to reference environment variables
|
|
|
|
|
api_keys:
|
|
|
|
|
# Local Ollama instances typically don't require authentication
|
|
|
|
|
"http://localhost:11434": "ollama"
|
|
|
|
|
|
|
|
|
|
# Remote Ollama instances
|
|
|
|
|
# "http://remote-ollama:11434": "ollama"
|
|
|
|
|
|
|
|
|
|
# OpenAI API
|
|
|
|
|
# "https://api.openai.com/v1": "${OPENAI_KEY}"
|
|
|
|
|
|
|
|
|
|
# Anthropic API
|
|
|
|
|
# "https://api.anthropic.com/v1": "${ANTHROPIC_KEY}"
|
|
|
|
|
|
|
|
|
|
# Other OpenAI-compatible endpoints
|
|
|
|
|
# "https://api.mistral.ai/v1": "${MISTRAL_KEY}"
|