Salmanap/fix docs new providers model alias (#571)

* fixed docs and added ollama as a first-class LLM provider

* matching the LLM routing section on the README.md to the docs

* updated the section on preference-based routing

---------

Co-authored-by: Salman Paracha <salmanparacha@MacBook-Pro-167.local>
This commit is contained in:
Salman Paracha 2025-09-19 10:19:57 -07:00 committed by GitHub
parent 8d0b468345
commit fbe82351c0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
16 changed files with 1696 additions and 150 deletions

View file

@ -17,6 +17,7 @@ SUPPORTED_PROVIDERS = [
"together_ai",
"azure_openai",
"xai",
"ollama",
]
@ -124,10 +125,12 @@ def validate_and_render_schema():
f"Invalid model name {model_name}. Please provide model name in the format <provider>/<model_id>."
)
provider = model_name_tokens[0]
# Validate azure_openai provider requires base_url
if provider == "azure_openai" and llm_provider.get("base_url") is None:
# Validate azure_openai and ollama provider requires base_url
if (provider == "azure_openai" or provider == "ollama") and llm_provider.get(
"base_url"
) is None:
raise Exception(
f"Provider 'azure_openai' requires 'base_url' to be set for model {model_name}"
f"Provider '{provider}' requires 'base_url' to be set for model {model_name}"
)
model_id = "/".join(model_name_tokens[1:])