This commit is contained in:
Federico Liva 2026-03-04 11:10:53 +00:00 committed by GitHub
commit 265fbb2019
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 351 additions and 11 deletions

View file

@ -1,12 +1,12 @@
{ {
"name": "priceghost-backend", "name": "priceghost-backend",
"version": "1.0.5", "version": "1.0.6",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "priceghost-backend", "name": "priceghost-backend",
"version": "1.0.5", "version": "1.0.6",
"dependencies": { "dependencies": {
"@anthropic-ai/sdk": "^0.24.0", "@anthropic-ai/sdk": "^0.24.0",
"@google/generative-ai": "^0.24.1", "@google/generative-ai": "^0.24.1",
@ -16,6 +16,7 @@
"cors": "^2.8.5", "cors": "^2.8.5",
"dotenv": "^16.3.1", "dotenv": "^16.3.1",
"express": "^4.18.2", "express": "^4.18.2",
"groq-sdk": "^0.5.0",
"jsonwebtoken": "^9.0.2", "jsonwebtoken": "^9.0.2",
"node-cron": "^3.0.3", "node-cron": "^3.0.3",
"openai": "^4.47.0", "openai": "^4.47.0",
@ -2425,6 +2426,37 @@
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==",
"license": "ISC" "license": "ISC"
}, },
"node_modules/groq-sdk": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/groq-sdk/-/groq-sdk-0.5.0.tgz",
"integrity": "sha512-RVmhW7qZ+XZoy5fIuSdx/LGQJONpL8MHgZEW7dFwTdgkzStub2XQx6OKv28CHogijdwH41J+Npj/z2jBPu3vmw==",
"license": "Apache-2.0",
"dependencies": {
"@types/node": "^18.11.18",
"@types/node-fetch": "^2.6.4",
"abort-controller": "^3.0.0",
"agentkeepalive": "^4.2.1",
"form-data-encoder": "1.7.2",
"formdata-node": "^4.3.2",
"node-fetch": "^2.6.7",
"web-streams-polyfill": "^3.2.1"
}
},
"node_modules/groq-sdk/node_modules/@types/node": {
"version": "18.19.130",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.130.tgz",
"integrity": "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==",
"license": "MIT",
"dependencies": {
"undici-types": "~5.26.4"
}
},
"node_modules/groq-sdk/node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
"license": "MIT"
},
"node_modules/has-symbols": { "node_modules/has-symbols": {
"version": "1.1.0", "version": "1.1.0",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",

View file

@ -18,6 +18,7 @@
"cors": "^2.8.5", "cors": "^2.8.5",
"dotenv": "^16.3.1", "dotenv": "^16.3.1",
"express": "^4.18.2", "express": "^4.18.2",
"groq-sdk": "^0.5.0",
"jsonwebtoken": "^9.0.2", "jsonwebtoken": "^9.0.2",
"node-cron": "^3.0.3", "node-cron": "^3.0.3",
"openai": "^4.47.0", "openai": "^4.47.0",

View file

@ -43,7 +43,7 @@ export interface NotificationSettings {
export interface AISettings { export interface AISettings {
ai_enabled: boolean; ai_enabled: boolean;
ai_verification_enabled: boolean; ai_verification_enabled: boolean;
ai_provider: 'anthropic' | 'openai' | 'ollama' | 'gemini' | null; ai_provider: 'anthropic' | 'openai' | 'ollama' | 'gemini' | 'groq' | null;
anthropic_api_key: string | null; anthropic_api_key: string | null;
anthropic_model: string | null; anthropic_model: string | null;
openai_api_key: string | null; openai_api_key: string | null;
@ -52,6 +52,8 @@ export interface AISettings {
ollama_model: string | null; ollama_model: string | null;
gemini_api_key: string | null; gemini_api_key: string | null;
gemini_model: string | null; gemini_model: string | null;
groq_api_key: string | null;
groq_model: string | null;
} }
export const userQueries = { export const userQueries = {
@ -248,7 +250,8 @@ export const userQueries = {
const result = await pool.query( const result = await pool.query(
`SELECT ai_enabled, COALESCE(ai_verification_enabled, false) as ai_verification_enabled, `SELECT ai_enabled, COALESCE(ai_verification_enabled, false) as ai_verification_enabled,
ai_provider, anthropic_api_key, anthropic_model, openai_api_key, openai_model, ai_provider, anthropic_api_key, anthropic_model, openai_api_key, openai_model,
ollama_base_url, ollama_model, gemini_api_key, gemini_model ollama_base_url, ollama_model, gemini_api_key, gemini_model,
groq_api_key, groq_model
FROM users WHERE id = $1`, FROM users WHERE id = $1`,
[id] [id]
); );
@ -307,6 +310,14 @@ export const userQueries = {
fields.push(`gemini_model = $${paramIndex++}`); fields.push(`gemini_model = $${paramIndex++}`);
values.push(settings.gemini_model); values.push(settings.gemini_model);
} }
if (settings.groq_api_key !== undefined) {
fields.push(`groq_api_key = $${paramIndex++}`);
values.push(settings.groq_api_key);
}
if (settings.groq_model !== undefined) {
fields.push(`groq_model = $${paramIndex++}`);
values.push(settings.groq_model);
}
if (fields.length === 0) return null; if (fields.length === 0) return null;
@ -315,7 +326,8 @@ export const userQueries = {
`UPDATE users SET ${fields.join(', ')} WHERE id = $${paramIndex} `UPDATE users SET ${fields.join(', ')} WHERE id = $${paramIndex}
RETURNING ai_enabled, COALESCE(ai_verification_enabled, false) as ai_verification_enabled, RETURNING ai_enabled, COALESCE(ai_verification_enabled, false) as ai_verification_enabled,
ai_provider, anthropic_api_key, anthropic_model, openai_api_key, openai_model, ai_provider, anthropic_api_key, anthropic_model, openai_api_key, openai_model,
ollama_base_url, ollama_model, gemini_api_key, gemini_model`, ollama_base_url, ollama_model, gemini_api_key, gemini_model,
groq_api_key, groq_model`,
values values
); );
return result.rows[0] || null; return result.rows[0] || null;

View file

@ -339,6 +339,8 @@ router.get('/ai', async (req: AuthRequest, res: Response) => {
ollama_model: settings.ollama_model || null, ollama_model: settings.ollama_model || null,
gemini_api_key: settings.gemini_api_key || null, gemini_api_key: settings.gemini_api_key || null,
gemini_model: settings.gemini_model || null, gemini_model: settings.gemini_model || null,
groq_api_key: settings.groq_api_key || null,
groq_model: settings.groq_model || null,
}); });
} catch (error) { } catch (error) {
console.error('Error fetching AI settings:', error); console.error('Error fetching AI settings:', error);
@ -362,6 +364,8 @@ router.put('/ai', async (req: AuthRequest, res: Response) => {
ollama_model, ollama_model,
gemini_api_key, gemini_api_key,
gemini_model, gemini_model,
groq_api_key,
groq_model,
} = req.body; } = req.body;
const settings = await userQueries.updateAISettings(userId, { const settings = await userQueries.updateAISettings(userId, {
@ -376,6 +380,8 @@ router.put('/ai', async (req: AuthRequest, res: Response) => {
ollama_model, ollama_model,
gemini_api_key, gemini_api_key,
gemini_model, gemini_model,
groq_api_key,
groq_model,
}); });
if (!settings) { if (!settings) {
@ -395,6 +401,8 @@ router.put('/ai', async (req: AuthRequest, res: Response) => {
ollama_model: settings.ollama_model || null, ollama_model: settings.ollama_model || null,
gemini_api_key: settings.gemini_api_key || null, gemini_api_key: settings.gemini_api_key || null,
gemini_model: settings.gemini_model || null, gemini_model: settings.gemini_model || null,
groq_api_key: settings.groq_api_key || null,
groq_model: settings.groq_model || null,
message: 'AI settings updated successfully', message: 'AI settings updated successfully',
}); });
} catch (error) { } catch (error) {
@ -520,4 +528,46 @@ router.post('/ai/test-gemini', async (req: AuthRequest, res: Response) => {
} }
}); });
// Test Groq API key
router.post('/ai/test-groq', async (req: AuthRequest, res: Response) => {
try {
const { api_key } = req.body;
if (!api_key) {
res.status(400).json({ error: 'API key is required' });
return;
}
const Groq = (await import('groq-sdk')).default;
const groq = new Groq({ apiKey: api_key });
// Try to generate a simple response to verify the key works
await groq.chat.completions.create({
model: 'llama-3.3-70b-versatile',
max_tokens: 10,
messages: [{ role: 'user', content: 'Say "API key valid" in 3 words or less' }],
});
res.json({
success: true,
message: 'Successfully connected to Groq API',
});
} catch (error) {
console.error('Error testing Groq connection:', error);
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
if (errorMessage.includes('401') || errorMessage.includes('invalid') || errorMessage.includes('API key')) {
res.status(400).json({
error: 'Invalid API key. Please check your Groq API key.',
success: false,
});
} else {
res.status(500).json({
error: `Failed to connect to Groq: ${errorMessage}`,
success: false,
});
}
}
});
export default router; export default router;

View file

@ -1,5 +1,6 @@
import Anthropic from '@anthropic-ai/sdk'; import Anthropic from '@anthropic-ai/sdk';
import OpenAI from 'openai'; import OpenAI from 'openai';
import Groq from 'groq-sdk';
import { GoogleGenerativeAI } from '@google/generative-ai'; import { GoogleGenerativeAI } from '@google/generative-ai';
import axios from 'axios'; import axios from 'axios';
import { load } from 'cheerio'; import { load } from 'cheerio';
@ -220,6 +221,7 @@ function prepareHtmlForAI(html: string): string {
const DEFAULT_ANTHROPIC_MODEL = 'claude-haiku-4-5-20251001'; const DEFAULT_ANTHROPIC_MODEL = 'claude-haiku-4-5-20251001';
const DEFAULT_OPENAI_MODEL = 'gpt-4.1-nano-2025-04-14'; const DEFAULT_OPENAI_MODEL = 'gpt-4.1-nano-2025-04-14';
const DEFAULT_GEMINI_MODEL = 'gemini-2.5-flash-lite'; const DEFAULT_GEMINI_MODEL = 'gemini-2.5-flash-lite';
const DEFAULT_GROQ_MODEL = 'llama-3.3-70b-versatile';
async function extractWithAnthropic( async function extractWithAnthropic(
html: string, html: string,
@ -348,6 +350,35 @@ async function extractWithGemini(
return parseAIResponse(content); return parseAIResponse(content);
} }
async function extractWithGroq(
html: string,
apiKey: string,
model?: string | null
): Promise<AIExtractionResult> {
const groq = new Groq({ apiKey });
const preparedHtml = prepareHtmlForAI(html);
const modelToUse = model || DEFAULT_GROQ_MODEL;
const response = await groq.chat.completions.create({
model: modelToUse,
max_tokens: 1024,
messages: [
{
role: 'user',
content: EXTRACTION_PROMPT + preparedHtml,
},
],
});
const content = response.choices[0]?.message?.content;
if (!content) {
throw new Error('No response from Groq');
}
return parseAIResponse(content);
}
// Verification functions for each provider // Verification functions for each provider
async function verifyWithAnthropic( async function verifyWithAnthropic(
html: string, html: string,
@ -474,6 +505,35 @@ async function verifyWithGemini(
return parseVerificationResponse(content, scrapedPrice, currency); return parseVerificationResponse(content, scrapedPrice, currency);
} }
async function verifyWithGroq(
html: string,
scrapedPrice: number,
currency: string,
apiKey: string,
model?: string | null
): Promise<AIVerificationResult> {
const groq = new Groq({ apiKey });
const preparedHtml = prepareHtmlForAI(html);
const prompt = VERIFICATION_PROMPT
.replace('$SCRAPED_PRICE$', scrapedPrice.toString())
.replace('$CURRENCY$', currency) + preparedHtml;
const modelToUse = model || DEFAULT_GROQ_MODEL;
const response = await groq.chat.completions.create({
model: modelToUse,
max_tokens: 512,
messages: [{ role: 'user', content: prompt }],
});
const content = response.choices[0]?.message?.content;
if (!content) {
throw new Error('No response from Groq');
}
return parseVerificationResponse(content, scrapedPrice, currency);
}
// Stock status verification functions (for variant products with anchor price) // Stock status verification functions (for variant products with anchor price)
async function verifyStockStatusWithAnthropic( async function verifyStockStatusWithAnthropic(
html: string, html: string,
@ -600,6 +660,35 @@ async function verifyStockStatusWithGemini(
return parseStockStatusResponse(content); return parseStockStatusResponse(content);
} }
async function verifyStockStatusWithGroq(
html: string,
variantPrice: number,
currency: string,
apiKey: string,
model?: string | null
): Promise<AIStockStatusResult> {
const groq = new Groq({ apiKey });
const preparedHtml = prepareHtmlForAI(html);
const prompt = STOCK_STATUS_PROMPT
.replace(/\$VARIANT_PRICE\$/g, variantPrice.toString())
.replace(/\$CURRENCY\$/g, currency) + preparedHtml;
const modelToUse = model || DEFAULT_GROQ_MODEL;
const response = await groq.chat.completions.create({
model: modelToUse,
max_tokens: 256,
messages: [{ role: 'user', content: prompt }],
});
const content = response.choices[0]?.message?.content;
if (!content) {
throw new Error('No response from Groq');
}
return parseStockStatusResponse(content);
}
function parseStockStatusResponse(responseText: string): AIStockStatusResult { function parseStockStatusResponse(responseText: string): AIStockStatusResult {
console.log(`[AI Stock] Raw response: ${responseText.substring(0, 500)}...`); console.log(`[AI Stock] Raw response: ${responseText.substring(0, 500)}...`);
@ -813,6 +902,8 @@ export async function extractWithAI(
return extractWithOllama(html, settings.ollama_base_url, settings.ollama_model); return extractWithOllama(html, settings.ollama_base_url, settings.ollama_model);
} else if (settings.ai_provider === 'gemini' && settings.gemini_api_key) { } else if (settings.ai_provider === 'gemini' && settings.gemini_api_key) {
return extractWithGemini(html, settings.gemini_api_key, settings.gemini_model); return extractWithGemini(html, settings.gemini_api_key, settings.gemini_model);
} else if (settings.ai_provider === 'groq' && settings.groq_api_key) {
return extractWithGroq(html, settings.groq_api_key, settings.groq_model);
} }
throw new Error('No valid AI provider configured'); throw new Error('No valid AI provider configured');
@ -849,6 +940,10 @@ export async function tryAIExtraction(
const modelToUse = settings.gemini_model || DEFAULT_GEMINI_MODEL; const modelToUse = settings.gemini_model || DEFAULT_GEMINI_MODEL;
console.log(`[AI] Using Gemini (${modelToUse}) for ${url}`); console.log(`[AI] Using Gemini (${modelToUse}) for ${url}`);
return await extractWithGemini(html, settings.gemini_api_key, settings.gemini_model); return await extractWithGemini(html, settings.gemini_api_key, settings.gemini_model);
} else if (settings.ai_provider === 'groq' && settings.groq_api_key) {
const modelToUse = settings.groq_model || DEFAULT_GROQ_MODEL;
console.log(`[AI] Using Groq (${modelToUse}) for ${url}`);
return await extractWithGroq(html, settings.groq_api_key, settings.groq_model);
} }
return null; return null;
@ -891,6 +986,10 @@ export async function tryAIVerification(
const modelToUse = settings.gemini_model || DEFAULT_GEMINI_MODEL; const modelToUse = settings.gemini_model || DEFAULT_GEMINI_MODEL;
console.log(`[AI Verify] Using Gemini (${modelToUse}) to verify $${scrapedPrice} for ${url}`); console.log(`[AI Verify] Using Gemini (${modelToUse}) to verify $${scrapedPrice} for ${url}`);
return await verifyWithGemini(html, scrapedPrice, currency, settings.gemini_api_key, settings.gemini_model); return await verifyWithGemini(html, scrapedPrice, currency, settings.gemini_api_key, settings.gemini_model);
} else if (settings.ai_provider === 'groq' && settings.groq_api_key) {
const modelToUse = settings.groq_model || DEFAULT_GROQ_MODEL;
console.log(`[AI Verify] Using Groq (${modelToUse}) to verify $${scrapedPrice} for ${url}`);
return await verifyWithGroq(html, scrapedPrice, currency, settings.groq_api_key, settings.groq_model);
} }
console.log(`[AI Verify] Verification enabled but no provider configured`); console.log(`[AI Verify] Verification enabled but no provider configured`);
@ -934,6 +1033,10 @@ export async function tryAIStockStatusVerification(
const modelToUse = settings.gemini_model || DEFAULT_GEMINI_MODEL; const modelToUse = settings.gemini_model || DEFAULT_GEMINI_MODEL;
console.log(`[AI Stock] Using Gemini (${modelToUse}) to verify stock status for $${variantPrice} variant at ${url}`); console.log(`[AI Stock] Using Gemini (${modelToUse}) to verify stock status for $${variantPrice} variant at ${url}`);
return await verifyStockStatusWithGemini(html, variantPrice, currency, settings.gemini_api_key, settings.gemini_model); return await verifyStockStatusWithGemini(html, variantPrice, currency, settings.gemini_api_key, settings.gemini_model);
} else if (settings.ai_provider === 'groq' && settings.groq_api_key) {
const modelToUse = settings.groq_model || DEFAULT_GROQ_MODEL;
console.log(`[AI Stock] Using Groq (${modelToUse}) to verify stock status for $${variantPrice} variant at ${url}`);
return await verifyStockStatusWithGroq(html, variantPrice, currency, settings.groq_api_key, settings.groq_model);
} }
console.log(`[AI Stock] No AI provider configured for stock status verification`); console.log(`[AI Stock] No AI provider configured for stock status verification`);
@ -1104,6 +1207,36 @@ async function arbitrateWithGemini(
return parseArbitrationResponse(content, candidates); return parseArbitrationResponse(content, candidates);
} }
async function arbitrateWithGroq(
html: string,
candidates: PriceCandidate[],
apiKey: string,
model?: string | null
): Promise<AIArbitrationResult> {
const groq = new Groq({ apiKey });
const candidatesList = candidates.map((c, i) =>
`${i}. ${c.price} ${c.currency} (method: ${c.method}, context: ${c.context || 'none'})`
).join('\n');
const preparedHtml = prepareHtmlForAI(html);
const prompt = ARBITRATION_PROMPT.replace('$CANDIDATES$', candidatesList) + preparedHtml;
const modelToUse = model || DEFAULT_GROQ_MODEL;
const response = await groq.chat.completions.create({
model: modelToUse,
max_tokens: 512,
messages: [{ role: 'user', content: prompt }],
});
const content = response.choices[0]?.message?.content;
if (!content) {
throw new Error('No response from Groq');
}
return parseArbitrationResponse(content, candidates);
}
function parseArbitrationResponse( function parseArbitrationResponse(
responseText: string, responseText: string,
candidates: PriceCandidate[] candidates: PriceCandidate[]
@ -1188,6 +1321,10 @@ export async function tryAIArbitration(
const modelToUse = settings.gemini_model || DEFAULT_GEMINI_MODEL; const modelToUse = settings.gemini_model || DEFAULT_GEMINI_MODEL;
console.log(`[AI Arbitrate] Using Gemini (${modelToUse}) to arbitrate ${candidates.length} prices for ${url}`); console.log(`[AI Arbitrate] Using Gemini (${modelToUse}) to arbitrate ${candidates.length} prices for ${url}`);
return await arbitrateWithGemini(html, candidates, settings.gemini_api_key, settings.gemini_model); return await arbitrateWithGemini(html, candidates, settings.gemini_api_key, settings.gemini_model);
} else if (settings.ai_provider === 'groq' && settings.groq_api_key) {
const modelToUse = settings.groq_model || DEFAULT_GROQ_MODEL;
console.log(`[AI Arbitrate] Using Groq (${modelToUse}) to arbitrate ${candidates.length} prices for ${url}`);
return await arbitrateWithGroq(html, candidates, settings.groq_api_key, settings.groq_model);
} }
console.log(`[AI Arbitrate] No provider configured`); console.log(`[AI Arbitrate] No provider configured`);

View file

@ -128,6 +128,18 @@ CREATE TABLE IF NOT EXISTS price_history (
recorded_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP recorded_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
); );
-- Migration: Add Groq AI columns to users if they don't exist
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_name = 'users' AND column_name = 'groq_api_key'
) THEN
ALTER TABLE users ADD COLUMN groq_api_key VARCHAR(255);
ALTER TABLE users ADD COLUMN groq_model VARCHAR(255);
END IF;
END $$;
-- Index for faster price history queries -- Index for faster price history queries
CREATE INDEX IF NOT EXISTS idx_price_history_product_date CREATE INDEX IF NOT EXISTS idx_price_history_product_date
ON price_history(product_id, recorded_at); ON price_history(product_id, recorded_at);

View file

@ -1,12 +1,12 @@
{ {
"name": "priceghost-frontend", "name": "priceghost-frontend",
"version": "1.0.2", "version": "1.0.6",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "priceghost-frontend", "name": "priceghost-frontend",
"version": "1.0.2", "version": "1.0.6",
"dependencies": { "dependencies": {
"axios": "^1.6.0", "axios": "^1.6.0",
"react": "^18.2.0", "react": "^18.2.0",

View file

@ -254,7 +254,7 @@ export const settingsApi = {
updateAI: (data: { updateAI: (data: {
ai_enabled?: boolean; ai_enabled?: boolean;
ai_verification_enabled?: boolean; ai_verification_enabled?: boolean;
ai_provider?: 'anthropic' | 'openai' | 'ollama' | 'gemini' | null; ai_provider?: 'anthropic' | 'openai' | 'ollama' | 'gemini' | 'groq' | null;
anthropic_api_key?: string | null; anthropic_api_key?: string | null;
anthropic_model?: string | null; anthropic_model?: string | null;
openai_api_key?: string | null; openai_api_key?: string | null;
@ -263,6 +263,8 @@ export const settingsApi = {
ollama_model?: string | null; ollama_model?: string | null;
gemini_api_key?: string | null; gemini_api_key?: string | null;
gemini_model?: string | null; gemini_model?: string | null;
groq_api_key?: string | null;
groq_model?: string | null;
}) => api.put<AISettings & { message: string }>('/settings/ai', data), }) => api.put<AISettings & { message: string }>('/settings/ai', data),
testAI: (url: string) => testAI: (url: string) =>
@ -273,13 +275,16 @@ export const settingsApi = {
testGemini: (apiKey: string) => testGemini: (apiKey: string) =>
api.post<{ success: boolean; message?: string; error?: string }>('/settings/ai/test-gemini', { api_key: apiKey }), api.post<{ success: boolean; message?: string; error?: string }>('/settings/ai/test-gemini', { api_key: apiKey }),
testGroq: (apiKey: string) =>
api.post<{ success: boolean; message?: string; error?: string }>('/settings/ai/test-groq', { api_key: apiKey }),
}; };
// AI Settings types // AI Settings types
export interface AISettings { export interface AISettings {
ai_enabled: boolean; ai_enabled: boolean;
ai_verification_enabled: boolean; ai_verification_enabled: boolean;
ai_provider: 'anthropic' | 'openai' | 'ollama' | 'gemini' | null; ai_provider: 'anthropic' | 'openai' | 'ollama' | 'gemini' | 'groq' | null;
anthropic_api_key: string | null; anthropic_api_key: string | null;
anthropic_model: string | null; anthropic_model: string | null;
openai_api_key: string | null; openai_api_key: string | null;
@ -288,6 +293,8 @@ export interface AISettings {
ollama_model: string | null; ollama_model: string | null;
gemini_api_key: string | null; gemini_api_key: string | null;
gemini_model: string | null; gemini_model: string | null;
groq_api_key: string | null;
groq_model: string | null;
} }
export interface OllamaTestResult { export interface OllamaTestResult {

View file

@ -61,7 +61,7 @@ export default function Settings() {
const [aiSettings, setAISettings] = useState<AISettings | null>(null); const [aiSettings, setAISettings] = useState<AISettings | null>(null);
const [aiEnabled, setAIEnabled] = useState(false); const [aiEnabled, setAIEnabled] = useState(false);
const [aiVerificationEnabled, setAIVerificationEnabled] = useState(false); const [aiVerificationEnabled, setAIVerificationEnabled] = useState(false);
const [aiProvider, setAIProvider] = useState<'anthropic' | 'openai' | 'ollama' | 'gemini'>('anthropic'); const [aiProvider, setAIProvider] = useState<'anthropic' | 'openai' | 'ollama' | 'gemini' | 'groq'>('anthropic');
const [anthropicApiKey, setAnthropicApiKey] = useState(''); const [anthropicApiKey, setAnthropicApiKey] = useState('');
const [anthropicModel, setAnthropicModel] = useState(''); const [anthropicModel, setAnthropicModel] = useState('');
const [openaiApiKey, setOpenaiApiKey] = useState(''); const [openaiApiKey, setOpenaiApiKey] = useState('');
@ -73,6 +73,9 @@ export default function Settings() {
const [geminiApiKey, setGeminiApiKey] = useState(''); const [geminiApiKey, setGeminiApiKey] = useState('');
const [geminiModel, setGeminiModel] = useState(''); const [geminiModel, setGeminiModel] = useState('');
const [isTestingGemini, setIsTestingGemini] = useState(false); const [isTestingGemini, setIsTestingGemini] = useState(false);
const [groqApiKey, setGroqApiKey] = useState('');
const [groqModel, setGroqModel] = useState('');
const [isTestingGroq, setIsTestingGroq] = useState(false);
const [isSavingAI, setIsSavingAI] = useState(false); const [isSavingAI, setIsSavingAI] = useState(false);
const [isTestingAI, setIsTestingAI] = useState(false); const [isTestingAI, setIsTestingAI] = useState(false);
const [testUrl, setTestUrl] = useState(''); const [testUrl, setTestUrl] = useState('');
@ -139,6 +142,8 @@ export default function Settings() {
setOllamaModel(aiRes.data.ollama_model || ''); setOllamaModel(aiRes.data.ollama_model || '');
setGeminiApiKey(aiRes.data.gemini_api_key || ''); setGeminiApiKey(aiRes.data.gemini_api_key || '');
setGeminiModel(aiRes.data.gemini_model || ''); setGeminiModel(aiRes.data.gemini_model || '');
setGroqApiKey(aiRes.data.groq_api_key || '');
setGroqModel(aiRes.data.groq_model || '');
} catch { } catch {
setError('Failed to load settings'); setError('Failed to load settings');
} finally { } finally {
@ -466,15 +471,19 @@ export default function Settings() {
ollama_model: aiProvider === 'ollama' ? ollamaModel || null : undefined, ollama_model: aiProvider === 'ollama' ? ollamaModel || null : undefined,
gemini_api_key: geminiApiKey || undefined, gemini_api_key: geminiApiKey || undefined,
gemini_model: aiProvider === 'gemini' ? geminiModel || null : undefined, gemini_model: aiProvider === 'gemini' ? geminiModel || null : undefined,
groq_api_key: groqApiKey || undefined,
groq_model: aiProvider === 'groq' ? groqModel || null : undefined,
}); });
setAISettings(response.data); setAISettings(response.data);
setAIVerificationEnabled(response.data.ai_verification_enabled ?? false); setAIVerificationEnabled(response.data.ai_verification_enabled ?? false);
setAnthropicModel(response.data.anthropic_model || ''); setAnthropicModel(response.data.anthropic_model || '');
setOpenaiModel(response.data.openai_model || ''); setOpenaiModel(response.data.openai_model || '');
setGeminiModel(response.data.gemini_model || ''); setGeminiModel(response.data.gemini_model || '');
setGroqModel(response.data.groq_model || '');
setAnthropicApiKey(''); setAnthropicApiKey('');
setOpenaiApiKey(''); setOpenaiApiKey('');
setGeminiApiKey(''); setGeminiApiKey('');
setGroqApiKey('');
setSuccess('AI settings saved successfully'); setSuccess('AI settings saved successfully');
} catch { } catch {
setError('Failed to save AI settings'); setError('Failed to save AI settings');
@ -526,6 +535,27 @@ export default function Settings() {
} }
}; };
const handleTestGroq = async () => {
clearMessages();
if (!groqApiKey) {
setError('Please enter your Groq API key');
return;
}
setIsTestingGroq(true);
try {
const response = await settingsApi.testGroq(groqApiKey);
if (response.data.success) {
setSuccess('Successfully connected to Groq API!');
} else {
setError(response.data.error || 'Failed to connect to Groq');
}
} catch {
setError('Failed to connect to Groq. Check your API key.');
} finally {
setIsTestingGroq(false);
}
};
const handleTestAI = async () => { const handleTestAI = async () => {
clearMessages(); clearMessages();
if (!testUrl) { if (!testUrl) {
@ -1666,6 +1696,7 @@ export default function Settings() {
<option value="anthropic">Anthropic (Claude)</option> <option value="anthropic">Anthropic (Claude)</option>
<option value="openai">OpenAI (GPT)</option> <option value="openai">OpenAI (GPT)</option>
<option value="gemini">Google (Gemini)</option> <option value="gemini">Google (Gemini)</option>
<option value="groq">Groq (Free Tier)</option>
<option value="ollama">Ollama (Local)</option> <option value="ollama">Ollama (Local)</option>
</select> </select>
</div> </div>
@ -1889,6 +1920,64 @@ export default function Settings() {
</div> </div>
</> </>
)} )}
{aiProvider === 'groq' && (
<>
<div className="settings-form-group">
<label>Groq API Key</label>
<div style={{ display: 'flex', gap: '0.5rem' }}>
<div style={{ flex: 1 }}>
<PasswordInput
value={groqApiKey}
onChange={(e) => setGroqApiKey(e.target.value)}
placeholder="gsk_..."
/>
</div>
<button
className="btn btn-secondary"
onClick={handleTestGroq}
disabled={isTestingGroq || !groqApiKey}
style={{ whiteSpace: 'nowrap' }}
>
{isTestingGroq ? 'Testing...' : 'Test Key'}
</button>
</div>
<p className="hint">
Get your free API key from{' '}
<a href="https://console.groq.com/keys" target="_blank" rel="noopener noreferrer">
console.groq.com
</a>
</p>
</div>
<div className="settings-form-group">
<label>Model</label>
<select
value={groqModel}
onChange={(e) => setGroqModel(e.target.value)}
style={{
width: '100%',
padding: '0.625rem 0.75rem',
border: '1px solid var(--border)',
borderRadius: '0.375rem',
background: 'var(--background)',
color: 'var(--text)',
fontSize: '0.875rem'
}}
>
<option value="">Default (Llama 3.3 70B)</option>
<option value="llama-3.3-70b-versatile">Llama 3.3 70B Versatile (Best accuracy)</option>
<option value="llama-3.1-8b-instant">Llama 3.1 8B Instant (Fastest)</option>
<option value="mixtral-8x7b-32768">Mixtral 8x7B (Good at structured output)</option>
<option value="gemma2-9b-it">Gemma 2 9B (Lightweight)</option>
</select>
<p className="hint">
Groq offers free API access with fast inference. Llama 3.3 70B is recommended for best accuracy.
{aiSettings?.groq_model && ` (currently: ${aiSettings.groq_model})`}
</p>
</div>
</>
)}
</> </>
)} )}
@ -1903,7 +1992,7 @@ export default function Settings() {
</div> </div>
</div> </div>
{aiSettings?.ai_enabled && (aiSettings.anthropic_api_key || aiSettings.openai_api_key || (aiSettings.ollama_base_url && aiSettings.ollama_model) || aiSettings.gemini_api_key) && ( {aiSettings?.ai_enabled && (aiSettings.anthropic_api_key || aiSettings.openai_api_key || (aiSettings.ollama_base_url && aiSettings.ollama_model) || aiSettings.gemini_api_key || aiSettings.groq_api_key) && (
<div className="settings-section"> <div className="settings-section">
<div className="settings-section-header"> <div className="settings-section-header">
<span className="settings-section-icon">🧪</span> <span className="settings-section-icon">🧪</span>