feat(noxa-9fw.2): make gemini cli the primary llm backend

- ProviderChain::default() order: Gemini CLI -> OpenAI -> Ollama -> Anthropic
- Add --llm-provider gemini arm to build_llm_provider() in noxa-cli
- Update unknown-provider error to mention gemini
- Update empty-chain error messages in CLI and MCP to mention gemini CLI
- Update MCP startup warn! to list gemini CLI as first option
This commit is contained in:
Jacob Magar 2026-04-11 07:32:24 -04:00
parent d800c37bfd
commit 420a1d7522
3 changed files with 79 additions and 14 deletions

View file

@ -7,7 +7,10 @@ use tracing::{debug, warn};
use crate::error::LlmError;
use crate::provider::{CompletionRequest, LlmProvider};
use crate::providers::{
anthropic::AnthropicProvider, ollama::OllamaProvider, openai::OpenAiProvider,
anthropic::AnthropicProvider,
gemini_cli::GeminiCliProvider,
ollama::OllamaProvider,
openai::OpenAiProvider,
};
pub struct ProviderChain {
@ -15,12 +18,26 @@ pub struct ProviderChain {
}
impl ProviderChain {
/// Build the default chain: Ollama -> OpenAI -> Anthropic.
/// Ollama is always added (availability checked at call time).
/// Build the default chain: Gemini CLI -> OpenAI -> Ollama -> Anthropic.
/// Gemini CLI is the primary backend (subprocess-based, requires `gemini` on PATH).
/// Cloud providers are only added if their API keys are configured.
/// Ollama is added if reachable at call time.
pub async fn default() -> Self {
let mut providers: Vec<Box<dyn LlmProvider>> = Vec::new();
let gemini = GeminiCliProvider::new(None);
if gemini.is_available().await {
debug!("gemini cli available, adding as primary provider");
providers.push(Box::new(gemini));
} else {
debug!("gemini cli not found on PATH, skipping");
}
if let Some(openai) = OpenAiProvider::new(None, None, None) {
debug!("openai configured, adding to chain");
providers.push(Box::new(openai));
}
let ollama = OllamaProvider::new(None, None);
if ollama.is_available().await {
debug!("ollama is available, adding to chain");
@ -29,11 +46,6 @@ impl ProviderChain {
debug!("ollama not available, skipping");
}
if let Some(openai) = OpenAiProvider::new(None, None, None) {
debug!("openai configured, adding to chain");
providers.push(Box::new(openai));
}
if let Some(anthropic) = AnthropicProvider::new(None, None) {
debug!("anthropic configured, adding to chain");
providers.push(Box::new(anthropic));
@ -202,4 +214,46 @@ mod tests {
assert_eq!(chain.len(), 2);
assert!(!chain.is_empty());
}
// ── Gemini-first chain ordering ───────────────────────────────────────────
#[tokio::test]
async fn gemini_first_in_single_provider_chain() {
// When we build a chain with a mock "gemini" provider first, it should
// be used before any fallback.
let chain = ProviderChain::from_providers(vec![
Box::new(MockProvider {
name: "gemini",
response: Ok("from gemini".into()),
available: true,
}),
Box::new(MockProvider {
name: "openai",
response: Ok("from openai".into()),
available: true,
}),
]);
let result = chain.complete(&test_request()).await.unwrap();
assert_eq!(result, "from gemini");
// Confirm order: first provider name is "gemini"
assert_eq!(chain.providers[0].name(), "gemini");
}
#[tokio::test]
async fn gemini_failure_falls_back_to_openai() {
let chain = ProviderChain::from_providers(vec![
Box::new(MockProvider {
name: "gemini",
response: Err("subprocess timed out".into()),
available: true,
}),
Box::new(MockProvider {
name: "openai",
response: Ok("from openai".into()),
available: true,
}),
]);
let result = chain.complete(&test_request()).await.unwrap();
assert_eq!(result, "from openai");
}
}