Add Ollama support for local AI-powered price extraction

- Add database migration for ollama_base_url and ollama_model columns
- Update backend models and queries for Ollama settings
- Add extractWithOllama function using Ollama's /api/chat endpoint
- Add /api/settings/ai/test-ollama endpoint to test connection and list models
- Update frontend Settings page with Ollama configuration UI
- Support model selection from dropdown after testing connection

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
clucraft 2026-01-22 20:14:14 -05:00
parent 6c2aece1e8
commit 082aae8789
7 changed files with 233 additions and 10 deletions

View file

@ -188,21 +188,36 @@ export const settingsApi = {
updateAI: (data: {
ai_enabled?: boolean;
ai_provider?: 'anthropic' | 'openai' | null;
ai_provider?: 'anthropic' | 'openai' | 'ollama' | null;
anthropic_api_key?: string | null;
openai_api_key?: string | null;
ollama_base_url?: string | null;
ollama_model?: string | null;
}) => api.put<AISettings & { message: string }>('/settings/ai', data),
testAI: (url: string) =>
api.post<AITestResult>('/settings/ai/test', { url }),
testOllama: (baseUrl: string) =>
api.post<OllamaTestResult>('/settings/ai/test-ollama', { base_url: baseUrl }),
};
// AI Settings types
export interface AISettings {
ai_enabled: boolean;
ai_provider: 'anthropic' | 'openai' | null;
ai_provider: 'anthropic' | 'openai' | 'ollama' | null;
anthropic_configured: boolean;
openai_configured: boolean;
ollama_configured: boolean;
ollama_base_url: string | null;
ollama_model: string | null;
}
export interface OllamaTestResult {
success: boolean;
message?: string;
error?: string;
models?: string[];
}
export interface AITestResult {