mirror of
https://github.com/clucraft/PriceGhost.git
synced 2026-05-12 09:12:40 +02:00
Add Ollama support for local AI-powered price extraction
- Add database migration for ollama_base_url and ollama_model columns - Update backend models and queries for Ollama settings - Add extractWithOllama function using Ollama's /api/chat endpoint - Add /api/settings/ai/test-ollama endpoint to test connection and list models - Update frontend Settings page with Ollama configuration UI - Support model selection from dropdown after testing connection Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
6c2aece1e8
commit
082aae8789
7 changed files with 233 additions and 10 deletions
|
|
@ -202,6 +202,9 @@ router.get('/ai', async (req: AuthRequest, res: Response) => {
|
|||
ai_provider: settings.ai_provider || null,
|
||||
anthropic_configured: !!settings.anthropic_api_key,
|
||||
openai_configured: !!settings.openai_api_key,
|
||||
ollama_configured: !!(settings.ollama_base_url && settings.ollama_model),
|
||||
ollama_base_url: settings.ollama_base_url || null,
|
||||
ollama_model: settings.ollama_model || null,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error fetching AI settings:', error);
|
||||
|
|
@ -213,13 +216,15 @@ router.get('/ai', async (req: AuthRequest, res: Response) => {
|
|||
router.put('/ai', async (req: AuthRequest, res: Response) => {
|
||||
try {
|
||||
const userId = req.userId!;
|
||||
const { ai_enabled, ai_provider, anthropic_api_key, openai_api_key } = req.body;
|
||||
const { ai_enabled, ai_provider, anthropic_api_key, openai_api_key, ollama_base_url, ollama_model } = req.body;
|
||||
|
||||
const settings = await userQueries.updateAISettings(userId, {
|
||||
ai_enabled,
|
||||
ai_provider,
|
||||
anthropic_api_key,
|
||||
openai_api_key,
|
||||
ollama_base_url,
|
||||
ollama_model,
|
||||
});
|
||||
|
||||
if (!settings) {
|
||||
|
|
@ -232,6 +237,9 @@ router.put('/ai', async (req: AuthRequest, res: Response) => {
|
|||
ai_provider: settings.ai_provider || null,
|
||||
anthropic_configured: !!settings.anthropic_api_key,
|
||||
openai_configured: !!settings.openai_api_key,
|
||||
ollama_configured: !!(settings.ollama_base_url && settings.ollama_model),
|
||||
ollama_base_url: settings.ollama_base_url || null,
|
||||
ollama_model: settings.ollama_model || null,
|
||||
message: 'AI settings updated successfully',
|
||||
});
|
||||
} catch (error) {
|
||||
|
|
@ -275,4 +283,46 @@ router.post('/ai/test', async (req: AuthRequest, res: Response) => {
|
|||
}
|
||||
});
|
||||
|
||||
// Test Ollama connection and list available models
|
||||
router.post('/ai/test-ollama', async (req: AuthRequest, res: Response) => {
|
||||
try {
|
||||
const { base_url } = req.body;
|
||||
|
||||
if (!base_url) {
|
||||
res.status(400).json({ error: 'Base URL is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
// Try to fetch list of models from Ollama
|
||||
const axios = (await import('axios')).default;
|
||||
const response = await axios.get(`${base_url}/api/tags`, {
|
||||
timeout: 10000,
|
||||
});
|
||||
|
||||
const models = response.data?.models || [];
|
||||
const modelNames = models.map((m: { name: string }) => m.name);
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
message: 'Successfully connected to Ollama',
|
||||
models: modelNames,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Error testing Ollama connection:', error);
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
|
||||
if (errorMessage.includes('ECONNREFUSED')) {
|
||||
res.status(400).json({
|
||||
error: 'Cannot connect to Ollama. Make sure Ollama is running.',
|
||||
success: false,
|
||||
});
|
||||
} else {
|
||||
res.status(500).json({
|
||||
error: `Failed to connect to Ollama: ${errorMessage}`,
|
||||
success: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue