feat(noxa-9fw.2): make gemini cli the primary llm backend

- ProviderChain::default() order: Gemini CLI -> OpenAI -> Ollama -> Anthropic
- Add --llm-provider gemini arm to build_llm_provider() in noxa-cli
- Update unknown-provider error to mention gemini
- Update empty-chain error messages in CLI and MCP to mention gemini CLI
- Update MCP startup warn! to list gemini CLI as first option
This commit is contained in:
Jacob Magar 2026-04-11 07:32:24 -04:00
parent d800c37bfd
commit 420a1d7522
3 changed files with 79 additions and 14 deletions

View file

@ -89,7 +89,7 @@ impl NoxaMcp {
let chain = noxa_llm::ProviderChain::default().await;
let llm_chain = if chain.is_empty() {
warn!("no LLM providers available -- extract/summarize tools will fail");
warn!("no LLM providers available (gemini CLI, OPENAI_API_KEY, ANTHROPIC_API_KEY) -- extract/summarize tools will fail");
None
} else {
info!(providers = chain.len(), "LLM provider chain ready");
@ -334,7 +334,7 @@ impl NoxaMcp {
// No local LLM — fall back to cloud API directly
if self.llm_chain.is_none() {
let cloud = self.cloud.as_ref().ok_or(
"No LLM providers available. Set OPENAI_API_KEY, ANTHROPIC_API_KEY, or NOXA_API_KEY for cloud fallback.",
"No LLM providers available. Install the gemini CLI, set OPENAI_API_KEY, ANTHROPIC_API_KEY, or NOXA_API_KEY for cloud fallback.",
)?;
let mut body = json!({"url": params.url});
if let Some(ref schema) = params.schema {
@ -387,7 +387,7 @@ impl NoxaMcp {
// No local LLM — fall back to cloud API directly
if self.llm_chain.is_none() {
let cloud = self.cloud.as_ref().ok_or(
"No LLM providers available. Set OPENAI_API_KEY, ANTHROPIC_API_KEY, or NOXA_API_KEY for cloud fallback.",
"No LLM providers available. Install the gemini CLI, set OPENAI_API_KEY, ANTHROPIC_API_KEY, or NOXA_API_KEY for cloud fallback.",
)?;
let mut body = json!({"url": params.url});
if let Some(sentences) = params.max_sentences {