Add Ollama support for local AI-powered price extraction

- Add database migration for ollama_base_url and ollama_model columns
- Update backend models and queries for Ollama settings
- Add extractWithOllama function using Ollama's /api/chat endpoint
- Add /api/settings/ai/test-ollama endpoint to test connection and list models
- Update frontend Settings page with Ollama configuration UI
- Support model selection from dropdown after testing connection

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
clucraft 2026-01-22 20:14:14 -05:00
parent 6c2aece1e8
commit 082aae8789
7 changed files with 233 additions and 10 deletions

View file

@ -46,6 +46,12 @@ async function runMigrations() {
IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'pushover_enabled') THEN
ALTER TABLE users ADD COLUMN pushover_enabled BOOLEAN DEFAULT true;
END IF;
IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'ollama_base_url') THEN
ALTER TABLE users ADD COLUMN ollama_base_url TEXT;
END IF;
IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'ollama_model') THEN
ALTER TABLE users ADD COLUMN ollama_model TEXT;
END IF;
END $$;
`);

View file

@ -34,9 +34,11 @@ export interface NotificationSettings {
export interface AISettings {
ai_enabled: boolean;
ai_provider: 'anthropic' | 'openai' | null;
ai_provider: 'anthropic' | 'openai' | 'ollama' | null;
anthropic_api_key: string | null;
openai_api_key: string | null;
ollama_base_url: string | null;
ollama_model: string | null;
}
export const userQueries = {
@ -195,7 +197,7 @@ export const userQueries = {
getAISettings: async (id: number): Promise<AISettings | null> => {
const result = await pool.query(
'SELECT ai_enabled, ai_provider, anthropic_api_key, openai_api_key FROM users WHERE id = $1',
'SELECT ai_enabled, ai_provider, anthropic_api_key, openai_api_key, ollama_base_url, ollama_model FROM users WHERE id = $1',
[id]
);
return result.rows[0] || null;
@ -225,13 +227,21 @@ export const userQueries = {
fields.push(`openai_api_key = $${paramIndex++}`);
values.push(settings.openai_api_key);
}
if (settings.ollama_base_url !== undefined) {
fields.push(`ollama_base_url = $${paramIndex++}`);
values.push(settings.ollama_base_url);
}
if (settings.ollama_model !== undefined) {
fields.push(`ollama_model = $${paramIndex++}`);
values.push(settings.ollama_model);
}
if (fields.length === 0) return null;
values.push(id.toString());
const result = await pool.query(
`UPDATE users SET ${fields.join(', ')} WHERE id = $${paramIndex}
RETURNING ai_enabled, ai_provider, anthropic_api_key, openai_api_key`,
RETURNING ai_enabled, ai_provider, anthropic_api_key, openai_api_key, ollama_base_url, ollama_model`,
values
);
return result.rows[0] || null;

View file

@ -202,6 +202,9 @@ router.get('/ai', async (req: AuthRequest, res: Response) => {
ai_provider: settings.ai_provider || null,
anthropic_configured: !!settings.anthropic_api_key,
openai_configured: !!settings.openai_api_key,
ollama_configured: !!(settings.ollama_base_url && settings.ollama_model),
ollama_base_url: settings.ollama_base_url || null,
ollama_model: settings.ollama_model || null,
});
} catch (error) {
console.error('Error fetching AI settings:', error);
@ -213,13 +216,15 @@ router.get('/ai', async (req: AuthRequest, res: Response) => {
router.put('/ai', async (req: AuthRequest, res: Response) => {
try {
const userId = req.userId!;
const { ai_enabled, ai_provider, anthropic_api_key, openai_api_key } = req.body;
const { ai_enabled, ai_provider, anthropic_api_key, openai_api_key, ollama_base_url, ollama_model } = req.body;
const settings = await userQueries.updateAISettings(userId, {
ai_enabled,
ai_provider,
anthropic_api_key,
openai_api_key,
ollama_base_url,
ollama_model,
});
if (!settings) {
@ -232,6 +237,9 @@ router.put('/ai', async (req: AuthRequest, res: Response) => {
ai_provider: settings.ai_provider || null,
anthropic_configured: !!settings.anthropic_api_key,
openai_configured: !!settings.openai_api_key,
ollama_configured: !!(settings.ollama_base_url && settings.ollama_model),
ollama_base_url: settings.ollama_base_url || null,
ollama_model: settings.ollama_model || null,
message: 'AI settings updated successfully',
});
} catch (error) {
@ -275,4 +283,46 @@ router.post('/ai/test', async (req: AuthRequest, res: Response) => {
}
});
// Test Ollama connection and list available models
router.post('/ai/test-ollama', async (req: AuthRequest, res: Response) => {
try {
const { base_url } = req.body;
if (!base_url) {
res.status(400).json({ error: 'Base URL is required' });
return;
}
// Try to fetch list of models from Ollama
const axios = (await import('axios')).default;
const response = await axios.get(`${base_url}/api/tags`, {
timeout: 10000,
});
const models = response.data?.models || [];
const modelNames = models.map((m: { name: string }) => m.name);
res.json({
success: true,
message: 'Successfully connected to Ollama',
models: modelNames,
});
} catch (error) {
console.error('Error testing Ollama connection:', error);
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
if (errorMessage.includes('ECONNREFUSED')) {
res.status(400).json({
error: 'Cannot connect to Ollama. Make sure Ollama is running.',
success: false,
});
} else {
res.status(500).json({
error: `Failed to connect to Ollama: ${errorMessage}`,
success: false,
});
}
}
});
export default router;

View file

@ -144,6 +144,42 @@ async function extractWithOpenAI(
return parseAIResponse(content);
}
async function extractWithOllama(
html: string,
baseUrl: string,
model: string
): Promise<AIExtractionResult> {
const preparedHtml = prepareHtmlForAI(html);
// Ollama uses a chat completions API similar to OpenAI
const response = await axios.post(
`${baseUrl}/api/chat`,
{
model: model,
messages: [
{
role: 'user',
content: EXTRACTION_PROMPT + preparedHtml,
},
],
stream: false,
},
{
headers: {
'Content-Type': 'application/json',
},
timeout: 120000, // Longer timeout for local models
}
);
const content = response.data?.message?.content;
if (!content) {
throw new Error('No response from Ollama');
}
return parseAIResponse(content);
}
function parseAIResponse(responseText: string): AIExtractionResult {
console.log(`[AI] Raw response: ${responseText.substring(0, 500)}...`);
@ -231,6 +267,8 @@ export async function extractWithAI(
return extractWithAnthropic(html, settings.anthropic_api_key);
} else if (settings.ai_provider === 'openai' && settings.openai_api_key) {
return extractWithOpenAI(html, settings.openai_api_key);
} else if (settings.ai_provider === 'ollama' && settings.ollama_base_url && settings.ollama_model) {
return extractWithOllama(html, settings.ollama_base_url, settings.ollama_model);
}
throw new Error('No valid AI provider configured');
@ -258,6 +296,9 @@ export async function tryAIExtraction(
} else if (settings.ai_provider === 'openai' && settings.openai_api_key) {
console.log(`[AI] Using OpenAI for ${url}`);
return await extractWithOpenAI(html, settings.openai_api_key);
} else if (settings.ai_provider === 'ollama' && settings.ollama_base_url && settings.ollama_model) {
console.log(`[AI] Using Ollama (${settings.ollama_model}) for ${url}`);
return await extractWithOllama(html, settings.ollama_base_url, settings.ollama_model);
}
return null;