Add Google Gemini AI support (v1.0.6)

- Add Gemini as new AI provider option alongside Anthropic, OpenAI, Ollama
- Support models: Gemini 2.5 Flash Lite (default), Flash, Pro, 3 Flash Preview
- Add test API key endpoint to verify connection before saving
- Full support for extraction, verification, stock status, and arbitration
- Update all version files to 1.0.6

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
clucraft 2026-01-26 21:02:14 -05:00
parent fb00f5b5b2
commit 617953dcf5
12 changed files with 341 additions and 17 deletions

View file

@ -5,6 +5,18 @@ All notable changes to PriceGhost will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [1.0.6] - 2026-01-26
### Added
- **Google Gemini AI Support** - New AI provider option alongside Anthropic, OpenAI, and Ollama
- Supported models: Gemini 2.5 Flash Lite (default), Gemini 2.5 Flash, Gemini 2.5 Pro, Gemini 3 Flash Preview
- Test API key button to verify connection before saving
- Full support for AI extraction, verification, stock status checking, and price arbitration
- Get API key from [Google AI Studio](https://aistudio.google.com/apikey)
---
## [1.0.5] - 2026-01-25
### Added
@ -203,6 +215,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
| Version | Date | Description |
|---------|------|-------------|
| 1.0.6 | 2026-01-26 | Google Gemini AI support as new provider option |
| 1.0.5 | 2026-01-25 | AI model selector, per-product AI controls, Gotify support, Ollama fixes |
| 1.0.4 | 2026-01-24 | Multi-strategy price voting system with user selection for ambiguous prices |
| 1.0.3 | 2026-01-24 | Notification history with bell icon, clear button, and full history page |

View file

@ -1,14 +1,15 @@
{
"name": "priceghost-backend",
"version": "1.0.2",
"version": "1.0.5",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "priceghost-backend",
"version": "1.0.2",
"version": "1.0.5",
"dependencies": {
"@anthropic-ai/sdk": "^0.24.0",
"@google/generative-ai": "^0.24.1",
"axios": "^1.6.0",
"bcrypt": "^5.1.1",
"cheerio": "^1.0.0-rc.12",
@ -531,6 +532,15 @@
"node": ">=18"
}
},
"node_modules/@google/generative-ai": {
"version": "0.24.1",
"resolved": "https://registry.npmjs.org/@google/generative-ai/-/generative-ai-0.24.1.tgz",
"integrity": "sha512-MqO+MLfM6kjxcKoy0p1wRzG3b4ZZXtPI+z2IE26UogS2Cm/XHO+7gGRBh6gcJsOiIVoH93UwKvW4HdgiOZCy9Q==",
"license": "Apache-2.0",
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/@mapbox/node-pre-gyp": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz",

View file

@ -1,6 +1,6 @@
{
"name": "priceghost-backend",
"version": "1.0.5",
"version": "1.0.6",
"description": "PriceGhost price tracking API",
"main": "dist/index.js",
"scripts": {
@ -11,6 +11,7 @@
},
"dependencies": {
"@anthropic-ai/sdk": "^0.24.0",
"@google/generative-ai": "^0.24.1",
"axios": "^1.6.0",
"bcrypt": "^5.1.1",
"cheerio": "^1.0.0-rc.12",

View file

@ -137,6 +137,12 @@ async function runMigrations() {
IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'openai_model') THEN
ALTER TABLE users ADD COLUMN openai_model TEXT;
END IF;
IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'gemini_api_key') THEN
ALTER TABLE users ADD COLUMN gemini_api_key TEXT;
END IF;
IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'gemini_model') THEN
ALTER TABLE users ADD COLUMN gemini_model TEXT;
END IF;
IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'notifications_cleared_at') THEN
ALTER TABLE users ADD COLUMN notifications_cleared_at TIMESTAMP;
END IF;

View file

@ -40,13 +40,15 @@ export interface NotificationSettings {
export interface AISettings {
ai_enabled: boolean;
ai_verification_enabled: boolean;
ai_provider: 'anthropic' | 'openai' | 'ollama' | null;
ai_provider: 'anthropic' | 'openai' | 'ollama' | 'gemini' | null;
anthropic_api_key: string | null;
anthropic_model: string | null;
openai_api_key: string | null;
openai_model: string | null;
ollama_base_url: string | null;
ollama_model: string | null;
gemini_api_key: string | null;
gemini_model: string | null;
}
export const userQueries = {
@ -231,7 +233,7 @@ export const userQueries = {
const result = await pool.query(
`SELECT ai_enabled, COALESCE(ai_verification_enabled, false) as ai_verification_enabled,
ai_provider, anthropic_api_key, anthropic_model, openai_api_key, openai_model,
ollama_base_url, ollama_model
ollama_base_url, ollama_model, gemini_api_key, gemini_model
FROM users WHERE id = $1`,
[id]
);
@ -282,6 +284,14 @@ export const userQueries = {
fields.push(`ollama_model = $${paramIndex++}`);
values.push(settings.ollama_model);
}
if (settings.gemini_api_key !== undefined) {
fields.push(`gemini_api_key = $${paramIndex++}`);
values.push(settings.gemini_api_key);
}
if (settings.gemini_model !== undefined) {
fields.push(`gemini_model = $${paramIndex++}`);
values.push(settings.gemini_model);
}
if (fields.length === 0) return null;
@ -290,7 +300,7 @@ export const userQueries = {
`UPDATE users SET ${fields.join(', ')} WHERE id = $${paramIndex}
RETURNING ai_enabled, COALESCE(ai_verification_enabled, false) as ai_verification_enabled,
ai_provider, anthropic_api_key, anthropic_model, openai_api_key, openai_model,
ollama_base_url, ollama_model`,
ollama_base_url, ollama_model, gemini_api_key, gemini_model`,
values
);
return result.rows[0] || null;

View file

@ -314,9 +314,13 @@ router.get('/ai', async (req: AuthRequest, res: Response) => {
ai_verification_enabled: settings.ai_verification_enabled || false,
ai_provider: settings.ai_provider || null,
anthropic_api_key: settings.anthropic_api_key || null,
anthropic_model: settings.anthropic_model || null,
openai_api_key: settings.openai_api_key || null,
openai_model: settings.openai_model || null,
ollama_base_url: settings.ollama_base_url || null,
ollama_model: settings.ollama_model || null,
gemini_api_key: settings.gemini_api_key || null,
gemini_model: settings.gemini_model || null,
});
} catch (error) {
console.error('Error fetching AI settings:', error);
@ -328,16 +332,32 @@ router.get('/ai', async (req: AuthRequest, res: Response) => {
router.put('/ai', async (req: AuthRequest, res: Response) => {
try {
const userId = req.userId!;
const { ai_enabled, ai_verification_enabled, ai_provider, anthropic_api_key, openai_api_key, ollama_base_url, ollama_model } = req.body;
const {
ai_enabled,
ai_verification_enabled,
ai_provider,
anthropic_api_key,
anthropic_model,
openai_api_key,
openai_model,
ollama_base_url,
ollama_model,
gemini_api_key,
gemini_model,
} = req.body;
const settings = await userQueries.updateAISettings(userId, {
ai_enabled,
ai_verification_enabled,
ai_provider,
anthropic_api_key,
anthropic_model,
openai_api_key,
openai_model,
ollama_base_url,
ollama_model,
gemini_api_key,
gemini_model,
});
if (!settings) {
@ -350,9 +370,13 @@ router.put('/ai', async (req: AuthRequest, res: Response) => {
ai_verification_enabled: settings.ai_verification_enabled || false,
ai_provider: settings.ai_provider || null,
anthropic_api_key: settings.anthropic_api_key || null,
anthropic_model: settings.anthropic_model || null,
openai_api_key: settings.openai_api_key || null,
openai_model: settings.openai_model || null,
ollama_base_url: settings.ollama_base_url || null,
ollama_model: settings.ollama_model || null,
gemini_api_key: settings.gemini_api_key || null,
gemini_model: settings.gemini_model || null,
message: 'AI settings updated successfully',
});
} catch (error) {
@ -438,4 +462,44 @@ router.post('/ai/test-ollama', async (req: AuthRequest, res: Response) => {
}
});
// Test Gemini API key
router.post('/ai/test-gemini', async (req: AuthRequest, res: Response) => {
try {
const { api_key } = req.body;
if (!api_key) {
res.status(400).json({ error: 'API key is required' });
return;
}
// Test the API key by listing models
const { GoogleGenerativeAI } = await import('@google/generative-ai');
const genAI = new GoogleGenerativeAI(api_key);
// Try to generate a simple response to verify the key works
const model = genAI.getGenerativeModel({ model: 'gemini-2.5-flash-lite' });
await model.generateContent('Say "API key valid" in 3 words or less');
res.json({
success: true,
message: 'Successfully connected to Gemini API',
});
} catch (error) {
console.error('Error testing Gemini connection:', error);
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
if (errorMessage.includes('API_KEY_INVALID') || errorMessage.includes('API key')) {
res.status(400).json({
error: 'Invalid API key. Please check your Gemini API key.',
success: false,
});
} else {
res.status(500).json({
error: `Failed to connect to Gemini: ${errorMessage}`,
success: false,
});
}
}
});
export default router;

View file

@ -1,5 +1,6 @@
import Anthropic from '@anthropic-ai/sdk';
import OpenAI from 'openai';
import { GoogleGenerativeAI } from '@google/generative-ai';
import axios from 'axios';
import { load } from 'cheerio';
import { AISettings } from '../models';
@ -218,6 +219,7 @@ function prepareHtmlForAI(html: string): string {
// Default models to use if user hasn't selected one
const DEFAULT_ANTHROPIC_MODEL = 'claude-haiku-4-5-20251001';
const DEFAULT_OPENAI_MODEL = 'gpt-4.1-nano-2025-04-14';
const DEFAULT_GEMINI_MODEL = 'gemini-2.5-flash-lite';
async function extractWithAnthropic(
html: string,
@ -324,6 +326,28 @@ async function extractWithOllama(
return parseAIResponse(content);
}
async function extractWithGemini(
html: string,
apiKey: string,
model?: string | null
): Promise<AIExtractionResult> {
const genAI = new GoogleGenerativeAI(apiKey);
const modelToUse = model || DEFAULT_GEMINI_MODEL;
const geminiModel = genAI.getGenerativeModel({ model: modelToUse });
const preparedHtml = prepareHtmlForAI(html);
const result = await geminiModel.generateContent(EXTRACTION_PROMPT + preparedHtml);
const response = result.response;
const content = response.text();
if (!content) {
throw new Error('No response from Gemini');
}
return parseAIResponse(content);
}
// Verification functions for each provider
async function verifyWithAnthropic(
html: string,
@ -423,6 +447,33 @@ async function verifyWithOllama(
return parseVerificationResponse(content, scrapedPrice, currency);
}
async function verifyWithGemini(
html: string,
scrapedPrice: number,
currency: string,
apiKey: string,
model?: string | null
): Promise<AIVerificationResult> {
const genAI = new GoogleGenerativeAI(apiKey);
const modelToUse = model || DEFAULT_GEMINI_MODEL;
const geminiModel = genAI.getGenerativeModel({ model: modelToUse });
const preparedHtml = prepareHtmlForAI(html);
const prompt = VERIFICATION_PROMPT
.replace('$SCRAPED_PRICE$', scrapedPrice.toString())
.replace('$CURRENCY$', currency) + preparedHtml;
const result = await geminiModel.generateContent(prompt);
const response = result.response;
const content = response.text();
if (!content) {
throw new Error('No response from Gemini');
}
return parseVerificationResponse(content, scrapedPrice, currency);
}
// Stock status verification functions (for variant products with anchor price)
async function verifyStockStatusWithAnthropic(
html: string,
@ -522,6 +573,33 @@ async function verifyStockStatusWithOllama(
return parseStockStatusResponse(content);
}
async function verifyStockStatusWithGemini(
html: string,
variantPrice: number,
currency: string,
apiKey: string,
model?: string | null
): Promise<AIStockStatusResult> {
const genAI = new GoogleGenerativeAI(apiKey);
const modelToUse = model || DEFAULT_GEMINI_MODEL;
const geminiModel = genAI.getGenerativeModel({ model: modelToUse });
const preparedHtml = prepareHtmlForAI(html);
const prompt = STOCK_STATUS_PROMPT
.replace(/\$VARIANT_PRICE\$/g, variantPrice.toString())
.replace(/\$CURRENCY\$/g, currency) + preparedHtml;
const result = await geminiModel.generateContent(prompt);
const response = result.response;
const content = response.text();
if (!content) {
throw new Error('No response from Gemini');
}
return parseStockStatusResponse(content);
}
function parseStockStatusResponse(responseText: string): AIStockStatusResult {
console.log(`[AI Stock] Raw response: ${responseText.substring(0, 500)}...`);
@ -733,6 +811,8 @@ export async function extractWithAI(
return extractWithOpenAI(html, settings.openai_api_key, settings.openai_model);
} else if (settings.ai_provider === 'ollama' && settings.ollama_base_url && settings.ollama_model) {
return extractWithOllama(html, settings.ollama_base_url, settings.ollama_model);
} else if (settings.ai_provider === 'gemini' && settings.gemini_api_key) {
return extractWithGemini(html, settings.gemini_api_key, settings.gemini_model);
}
throw new Error('No valid AI provider configured');
@ -765,6 +845,10 @@ export async function tryAIExtraction(
} else if (settings.ai_provider === 'ollama' && settings.ollama_base_url && settings.ollama_model) {
console.log(`[AI] Using Ollama (${settings.ollama_model}) for ${url}`);
return await extractWithOllama(html, settings.ollama_base_url, settings.ollama_model);
} else if (settings.ai_provider === 'gemini' && settings.gemini_api_key) {
const modelToUse = settings.gemini_model || DEFAULT_GEMINI_MODEL;
console.log(`[AI] Using Gemini (${modelToUse}) for ${url}`);
return await extractWithGemini(html, settings.gemini_api_key, settings.gemini_model);
}
return null;
@ -803,6 +887,10 @@ export async function tryAIVerification(
} else if (settings.ai_provider === 'ollama' && settings.ollama_base_url && settings.ollama_model) {
console.log(`[AI Verify] Using Ollama (${settings.ollama_model}) to verify $${scrapedPrice} for ${url}`);
return await verifyWithOllama(html, scrapedPrice, currency, settings.ollama_base_url, settings.ollama_model);
} else if (settings.ai_provider === 'gemini' && settings.gemini_api_key) {
const modelToUse = settings.gemini_model || DEFAULT_GEMINI_MODEL;
console.log(`[AI Verify] Using Gemini (${modelToUse}) to verify $${scrapedPrice} for ${url}`);
return await verifyWithGemini(html, scrapedPrice, currency, settings.gemini_api_key, settings.gemini_model);
}
console.log(`[AI Verify] Verification enabled but no provider configured`);
@ -842,6 +930,10 @@ export async function tryAIStockStatusVerification(
} else if (settings.ai_provider === 'ollama' && settings.ollama_base_url && settings.ollama_model) {
console.log(`[AI Stock] Using Ollama (${settings.ollama_model}) to verify stock status for $${variantPrice} variant at ${url}`);
return await verifyStockStatusWithOllama(html, variantPrice, currency, settings.ollama_base_url, settings.ollama_model);
} else if (settings.ai_provider === 'gemini' && settings.gemini_api_key) {
const modelToUse = settings.gemini_model || DEFAULT_GEMINI_MODEL;
console.log(`[AI Stock] Using Gemini (${modelToUse}) to verify stock status for $${variantPrice} variant at ${url}`);
return await verifyStockStatusWithGemini(html, variantPrice, currency, settings.gemini_api_key, settings.gemini_model);
}
console.log(`[AI Stock] No AI provider configured for stock status verification`);
@ -984,6 +1076,34 @@ async function arbitrateWithOllama(
return parseArbitrationResponse(content, candidates);
}
async function arbitrateWithGemini(
html: string,
candidates: PriceCandidate[],
apiKey: string,
model?: string | null
): Promise<AIArbitrationResult> {
const genAI = new GoogleGenerativeAI(apiKey);
const modelToUse = model || DEFAULT_GEMINI_MODEL;
const geminiModel = genAI.getGenerativeModel({ model: modelToUse });
const candidatesList = candidates.map((c, i) =>
`${i}. ${c.price} ${c.currency} (method: ${c.method}, context: ${c.context || 'none'})`
).join('\n');
const preparedHtml = prepareHtmlForAI(html);
const prompt = ARBITRATION_PROMPT.replace('$CANDIDATES$', candidatesList) + preparedHtml;
const result = await geminiModel.generateContent(prompt);
const response = result.response;
const content = response.text();
if (!content) {
throw new Error('No response from Gemini');
}
return parseArbitrationResponse(content, candidates);
}
function parseArbitrationResponse(
responseText: string,
candidates: PriceCandidate[]
@ -1064,6 +1184,10 @@ export async function tryAIArbitration(
} else if (settings.ai_provider === 'ollama' && settings.ollama_base_url && settings.ollama_model) {
console.log(`[AI Arbitrate] Using Ollama (${settings.ollama_model}) to arbitrate ${candidates.length} prices for ${url}`);
return await arbitrateWithOllama(html, candidates, settings.ollama_base_url, settings.ollama_model);
} else if (settings.ai_provider === 'gemini' && settings.gemini_api_key) {
const modelToUse = settings.gemini_model || DEFAULT_GEMINI_MODEL;
console.log(`[AI Arbitrate] Using Gemini (${modelToUse}) to arbitrate ${candidates.length} prices for ${url}`);
return await arbitrateWithGemini(html, candidates, settings.gemini_api_key, settings.gemini_model);
}
console.log(`[AI Arbitrate] No provider configured`);

View file

@ -1,7 +1,7 @@
{
"name": "priceghost-frontend",
"private": true,
"version": "1.0.5",
"version": "1.0.6",
"type": "module",
"scripts": {
"dev": "vite",

View file

@ -1,4 +1,4 @@
{
"version": "1.0.3",
"releaseDate": "2026-01-23"
"version": "1.0.6",
"releaseDate": "2026-01-26"
}

View file

@ -248,13 +248,15 @@ export const settingsApi = {
updateAI: (data: {
ai_enabled?: boolean;
ai_verification_enabled?: boolean;
ai_provider?: 'anthropic' | 'openai' | 'ollama' | null;
ai_provider?: 'anthropic' | 'openai' | 'ollama' | 'gemini' | null;
anthropic_api_key?: string | null;
anthropic_model?: string | null;
openai_api_key?: string | null;
openai_model?: string | null;
ollama_base_url?: string | null;
ollama_model?: string | null;
gemini_api_key?: string | null;
gemini_model?: string | null;
}) => api.put<AISettings & { message: string }>('/settings/ai', data),
testAI: (url: string) =>
@ -262,19 +264,24 @@ export const settingsApi = {
testOllama: (baseUrl: string) =>
api.post<OllamaTestResult>('/settings/ai/test-ollama', { base_url: baseUrl }),
testGemini: (apiKey: string) =>
api.post<{ success: boolean; message?: string; error?: string }>('/settings/ai/test-gemini', { api_key: apiKey }),
};
// AI Settings types
export interface AISettings {
ai_enabled: boolean;
ai_verification_enabled: boolean;
ai_provider: 'anthropic' | 'openai' | 'ollama' | null;
ai_provider: 'anthropic' | 'openai' | 'ollama' | 'gemini' | null;
anthropic_api_key: string | null;
anthropic_model: string | null;
openai_api_key: string | null;
openai_model: string | null;
ollama_base_url: string | null;
ollama_model: string | null;
gemini_api_key: string | null;
gemini_model: string | null;
}
export interface OllamaTestResult {

View file

@ -58,7 +58,7 @@ export default function Settings() {
const [aiSettings, setAISettings] = useState<AISettings | null>(null);
const [aiEnabled, setAIEnabled] = useState(false);
const [aiVerificationEnabled, setAIVerificationEnabled] = useState(false);
const [aiProvider, setAIProvider] = useState<'anthropic' | 'openai' | 'ollama'>('anthropic');
const [aiProvider, setAIProvider] = useState<'anthropic' | 'openai' | 'ollama' | 'gemini'>('anthropic');
const [anthropicApiKey, setAnthropicApiKey] = useState('');
const [anthropicModel, setAnthropicModel] = useState('');
const [openaiApiKey, setOpenaiApiKey] = useState('');
@ -67,6 +67,9 @@ export default function Settings() {
const [ollamaModel, setOllamaModel] = useState('');
const [availableOllamaModels, setAvailableOllamaModels] = useState<string[]>([]);
const [isTestingOllama, setIsTestingOllama] = useState(false);
const [geminiApiKey, setGeminiApiKey] = useState('');
const [geminiModel, setGeminiModel] = useState('');
const [isTestingGemini, setIsTestingGemini] = useState(false);
const [isSavingAI, setIsSavingAI] = useState(false);
const [isTestingAI, setIsTestingAI] = useState(false);
const [testUrl, setTestUrl] = useState('');
@ -128,6 +131,8 @@ export default function Settings() {
setOpenaiModel(aiRes.data.openai_model || '');
setOllamaBaseUrl(aiRes.data.ollama_base_url || '');
setOllamaModel(aiRes.data.ollama_model || '');
setGeminiApiKey(aiRes.data.gemini_api_key || '');
setGeminiModel(aiRes.data.gemini_model || '');
} catch {
setError('Failed to load settings');
} finally {
@ -448,13 +453,17 @@ export default function Settings() {
openai_model: aiProvider === 'openai' ? openaiModel || null : undefined,
ollama_base_url: aiProvider === 'ollama' ? ollamaBaseUrl || null : undefined,
ollama_model: aiProvider === 'ollama' ? ollamaModel || null : undefined,
gemini_api_key: geminiApiKey || undefined,
gemini_model: aiProvider === 'gemini' ? geminiModel || null : undefined,
});
setAISettings(response.data);
setAIVerificationEnabled(response.data.ai_verification_enabled ?? false);
setAnthropicModel(response.data.anthropic_model || '');
setOpenaiModel(response.data.openai_model || '');
setGeminiModel(response.data.gemini_model || '');
setAnthropicApiKey('');
setOpenaiApiKey('');
setGeminiApiKey('');
setSuccess('AI settings saved successfully');
} catch {
setError('Failed to save AI settings');
@ -485,6 +494,27 @@ export default function Settings() {
}
};
const handleTestGemini = async () => {
clearMessages();
if (!geminiApiKey) {
setError('Please enter your Gemini API key');
return;
}
setIsTestingGemini(true);
try {
const response = await settingsApi.testGemini(geminiApiKey);
if (response.data.success) {
setSuccess('Successfully connected to Gemini API!');
} else {
setError(response.data.error || 'Failed to connect to Gemini');
}
} catch {
setError('Failed to connect to Gemini. Check your API key.');
} finally {
setIsTestingGemini(false);
}
};
const handleTestAI = async () => {
clearMessages();
if (!testUrl) {
@ -1572,7 +1602,7 @@ export default function Settings() {
<label>AI Provider</label>
<select
value={aiProvider}
onChange={(e) => setAIProvider(e.target.value as 'anthropic' | 'openai' | 'ollama')}
onChange={(e) => setAIProvider(e.target.value as 'anthropic' | 'openai' | 'ollama' | 'gemini')}
style={{
width: '100%',
padding: '0.625rem 0.75rem',
@ -1585,6 +1615,7 @@ export default function Settings() {
>
<option value="anthropic">Anthropic (Claude)</option>
<option value="openai">OpenAI (GPT)</option>
<option value="gemini">Google (Gemini)</option>
<option value="ollama">Ollama (Local)</option>
</select>
</div>
@ -1750,6 +1781,64 @@ export default function Settings() {
</div>
</>
)}
{aiProvider === 'gemini' && (
<>
<div className="settings-form-group">
<label>Gemini API Key</label>
<div style={{ display: 'flex', gap: '0.5rem' }}>
<div style={{ flex: 1 }}>
<PasswordInput
value={geminiApiKey}
onChange={(e) => setGeminiApiKey(e.target.value)}
placeholder="AIza..."
/>
</div>
<button
className="btn btn-secondary"
onClick={handleTestGemini}
disabled={isTestingGemini || !geminiApiKey}
style={{ whiteSpace: 'nowrap' }}
>
{isTestingGemini ? 'Testing...' : 'Test Key'}
</button>
</div>
<p className="hint">
Get your API key from{' '}
<a href="https://aistudio.google.com/apikey" target="_blank" rel="noopener noreferrer">
aistudio.google.com
</a>
</p>
</div>
<div className="settings-form-group">
<label>Model</label>
<select
value={geminiModel}
onChange={(e) => setGeminiModel(e.target.value)}
style={{
width: '100%',
padding: '0.625rem 0.75rem',
border: '1px solid var(--border)',
borderRadius: '0.375rem',
background: 'var(--background)',
color: 'var(--text)',
fontSize: '0.875rem'
}}
>
<option value="">Default (Gemini 2.5 Flash Lite)</option>
<option value="gemini-2.5-flash-lite">Gemini 2.5 Flash Lite (Fast, cheap)</option>
<option value="gemini-2.5-flash">Gemini 2.5 Flash (Balanced)</option>
<option value="gemini-2.5-pro">Gemini 2.5 Pro (High accuracy)</option>
<option value="gemini-3-flash-preview">Gemini 3 Flash Preview (Latest)</option>
</select>
<p className="hint">
Choose a model based on your cost/accuracy needs. Flash Lite is fastest and cheapest.
{aiSettings?.gemini_model && ` (currently: ${aiSettings.gemini_model})`}
</p>
</div>
</>
)}
</>
)}
@ -1764,7 +1853,7 @@ export default function Settings() {
</div>
</div>
{aiSettings?.ai_enabled && (aiSettings.anthropic_api_key || aiSettings.openai_api_key || (aiSettings.ollama_base_url && aiSettings.ollama_model)) && (
{aiSettings?.ai_enabled && (aiSettings.anthropic_api_key || aiSettings.openai_api_key || (aiSettings.ollama_base_url && aiSettings.ollama_model) || aiSettings.gemini_api_key) && (
<div className="settings-section">
<div className="settings-section-header">
<span className="settings-section-icon">🧪</span>

View file

@ -1,4 +1,4 @@
{
"version": "1.0.1",
"releaseDate": "2026-01-23"
"version": "1.0.6",
"releaseDate": "2026-01-26"
}