diff --git a/backend/src/index.ts b/backend/src/index.ts
index 131a572..1a1346e 100644
--- a/backend/src/index.ts
+++ b/backend/src/index.ts
@@ -46,6 +46,12 @@ async function runMigrations() {
IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'pushover_enabled') THEN
ALTER TABLE users ADD COLUMN pushover_enabled BOOLEAN DEFAULT true;
END IF;
+ IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'ollama_base_url') THEN
+ ALTER TABLE users ADD COLUMN ollama_base_url TEXT;
+ END IF;
+ IF NOT EXISTS (SELECT 1 FROM information_schema.columns WHERE table_name = 'users' AND column_name = 'ollama_model') THEN
+ ALTER TABLE users ADD COLUMN ollama_model TEXT;
+ END IF;
END $$;
`);
diff --git a/backend/src/models/index.ts b/backend/src/models/index.ts
index a8aacdf..2ef9505 100644
--- a/backend/src/models/index.ts
+++ b/backend/src/models/index.ts
@@ -34,9 +34,11 @@ export interface NotificationSettings {
export interface AISettings {
ai_enabled: boolean;
- ai_provider: 'anthropic' | 'openai' | null;
+ ai_provider: 'anthropic' | 'openai' | 'ollama' | null;
anthropic_api_key: string | null;
openai_api_key: string | null;
+ ollama_base_url: string | null;
+ ollama_model: string | null;
}
export const userQueries = {
@@ -195,7 +197,7 @@ export const userQueries = {
getAISettings: async (id: number): Promise => {
const result = await pool.query(
- 'SELECT ai_enabled, ai_provider, anthropic_api_key, openai_api_key FROM users WHERE id = $1',
+ 'SELECT ai_enabled, ai_provider, anthropic_api_key, openai_api_key, ollama_base_url, ollama_model FROM users WHERE id = $1',
[id]
);
return result.rows[0] || null;
@@ -225,13 +227,21 @@ export const userQueries = {
fields.push(`openai_api_key = $${paramIndex++}`);
values.push(settings.openai_api_key);
}
+ if (settings.ollama_base_url !== undefined) {
+ fields.push(`ollama_base_url = $${paramIndex++}`);
+ values.push(settings.ollama_base_url);
+ }
+ if (settings.ollama_model !== undefined) {
+ fields.push(`ollama_model = $${paramIndex++}`);
+ values.push(settings.ollama_model);
+ }
if (fields.length === 0) return null;
values.push(id.toString());
const result = await pool.query(
`UPDATE users SET ${fields.join(', ')} WHERE id = $${paramIndex}
- RETURNING ai_enabled, ai_provider, anthropic_api_key, openai_api_key`,
+ RETURNING ai_enabled, ai_provider, anthropic_api_key, openai_api_key, ollama_base_url, ollama_model`,
values
);
return result.rows[0] || null;
diff --git a/backend/src/routes/settings.ts b/backend/src/routes/settings.ts
index 4ac680e..bcaa445 100644
--- a/backend/src/routes/settings.ts
+++ b/backend/src/routes/settings.ts
@@ -202,6 +202,9 @@ router.get('/ai', async (req: AuthRequest, res: Response) => {
ai_provider: settings.ai_provider || null,
anthropic_configured: !!settings.anthropic_api_key,
openai_configured: !!settings.openai_api_key,
+ ollama_configured: !!(settings.ollama_base_url && settings.ollama_model),
+ ollama_base_url: settings.ollama_base_url || null,
+ ollama_model: settings.ollama_model || null,
});
} catch (error) {
console.error('Error fetching AI settings:', error);
@@ -213,13 +216,15 @@ router.get('/ai', async (req: AuthRequest, res: Response) => {
router.put('/ai', async (req: AuthRequest, res: Response) => {
try {
const userId = req.userId!;
- const { ai_enabled, ai_provider, anthropic_api_key, openai_api_key } = req.body;
+ const { ai_enabled, ai_provider, anthropic_api_key, openai_api_key, ollama_base_url, ollama_model } = req.body;
const settings = await userQueries.updateAISettings(userId, {
ai_enabled,
ai_provider,
anthropic_api_key,
openai_api_key,
+ ollama_base_url,
+ ollama_model,
});
if (!settings) {
@@ -232,6 +237,9 @@ router.put('/ai', async (req: AuthRequest, res: Response) => {
ai_provider: settings.ai_provider || null,
anthropic_configured: !!settings.anthropic_api_key,
openai_configured: !!settings.openai_api_key,
+ ollama_configured: !!(settings.ollama_base_url && settings.ollama_model),
+ ollama_base_url: settings.ollama_base_url || null,
+ ollama_model: settings.ollama_model || null,
message: 'AI settings updated successfully',
});
} catch (error) {
@@ -275,4 +283,46 @@ router.post('/ai/test', async (req: AuthRequest, res: Response) => {
}
});
+// Test Ollama connection and list available models
+router.post('/ai/test-ollama', async (req: AuthRequest, res: Response) => {
+ try {
+ const { base_url } = req.body;
+
+ if (!base_url) {
+ res.status(400).json({ error: 'Base URL is required' });
+ return;
+ }
+
+ // Try to fetch list of models from Ollama
+ const axios = (await import('axios')).default;
+ const response = await axios.get(`${base_url}/api/tags`, {
+ timeout: 10000,
+ });
+
+ const models = response.data?.models || [];
+ const modelNames = models.map((m: { name: string }) => m.name);
+
+ res.json({
+ success: true,
+ message: 'Successfully connected to Ollama',
+ models: modelNames,
+ });
+ } catch (error) {
+ console.error('Error testing Ollama connection:', error);
+ const errorMessage = error instanceof Error ? error.message : 'Unknown error';
+
+ if (errorMessage.includes('ECONNREFUSED')) {
+ res.status(400).json({
+ error: 'Cannot connect to Ollama. Make sure Ollama is running.',
+ success: false,
+ });
+ } else {
+ res.status(500).json({
+ error: `Failed to connect to Ollama: ${errorMessage}`,
+ success: false,
+ });
+ }
+ }
+});
+
export default router;
diff --git a/backend/src/services/ai-extractor.ts b/backend/src/services/ai-extractor.ts
index 99cf814..6e2cea2 100644
--- a/backend/src/services/ai-extractor.ts
+++ b/backend/src/services/ai-extractor.ts
@@ -144,6 +144,42 @@ async function extractWithOpenAI(
return parseAIResponse(content);
}
+async function extractWithOllama(
+ html: string,
+ baseUrl: string,
+ model: string
+): Promise {
+ const preparedHtml = prepareHtmlForAI(html);
+
+ // Ollama uses a chat completions API similar to OpenAI
+ const response = await axios.post(
+ `${baseUrl}/api/chat`,
+ {
+ model: model,
+ messages: [
+ {
+ role: 'user',
+ content: EXTRACTION_PROMPT + preparedHtml,
+ },
+ ],
+ stream: false,
+ },
+ {
+ headers: {
+ 'Content-Type': 'application/json',
+ },
+ timeout: 120000, // Longer timeout for local models
+ }
+ );
+
+ const content = response.data?.message?.content;
+ if (!content) {
+ throw new Error('No response from Ollama');
+ }
+
+ return parseAIResponse(content);
+}
+
function parseAIResponse(responseText: string): AIExtractionResult {
console.log(`[AI] Raw response: ${responseText.substring(0, 500)}...`);
@@ -231,6 +267,8 @@ export async function extractWithAI(
return extractWithAnthropic(html, settings.anthropic_api_key);
} else if (settings.ai_provider === 'openai' && settings.openai_api_key) {
return extractWithOpenAI(html, settings.openai_api_key);
+ } else if (settings.ai_provider === 'ollama' && settings.ollama_base_url && settings.ollama_model) {
+ return extractWithOllama(html, settings.ollama_base_url, settings.ollama_model);
}
throw new Error('No valid AI provider configured');
@@ -258,6 +296,9 @@ export async function tryAIExtraction(
} else if (settings.ai_provider === 'openai' && settings.openai_api_key) {
console.log(`[AI] Using OpenAI for ${url}`);
return await extractWithOpenAI(html, settings.openai_api_key);
+ } else if (settings.ai_provider === 'ollama' && settings.ollama_base_url && settings.ollama_model) {
+ console.log(`[AI] Using Ollama (${settings.ollama_model}) for ${url}`);
+ return await extractWithOllama(html, settings.ollama_base_url, settings.ollama_model);
}
return null;
diff --git a/frontend/src/api/client.ts b/frontend/src/api/client.ts
index 411bef1..01861f1 100644
--- a/frontend/src/api/client.ts
+++ b/frontend/src/api/client.ts
@@ -188,21 +188,36 @@ export const settingsApi = {
updateAI: (data: {
ai_enabled?: boolean;
- ai_provider?: 'anthropic' | 'openai' | null;
+ ai_provider?: 'anthropic' | 'openai' | 'ollama' | null;
anthropic_api_key?: string | null;
openai_api_key?: string | null;
+ ollama_base_url?: string | null;
+ ollama_model?: string | null;
}) => api.put('/settings/ai', data),
testAI: (url: string) =>
api.post('/settings/ai/test', { url }),
+
+ testOllama: (baseUrl: string) =>
+ api.post('/settings/ai/test-ollama', { base_url: baseUrl }),
};
// AI Settings types
export interface AISettings {
ai_enabled: boolean;
- ai_provider: 'anthropic' | 'openai' | null;
+ ai_provider: 'anthropic' | 'openai' | 'ollama' | null;
anthropic_configured: boolean;
openai_configured: boolean;
+ ollama_configured: boolean;
+ ollama_base_url: string | null;
+ ollama_model: string | null;
+}
+
+export interface OllamaTestResult {
+ success: boolean;
+ message?: string;
+ error?: string;
+ models?: string[];
}
export interface AITestResult {
diff --git a/frontend/src/pages/Settings.tsx b/frontend/src/pages/Settings.tsx
index a5a11fe..195e632 100644
--- a/frontend/src/pages/Settings.tsx
+++ b/frontend/src/pages/Settings.tsx
@@ -44,9 +44,13 @@ export default function Settings() {
// AI state
const [aiSettings, setAISettings] = useState(null);
const [aiEnabled, setAIEnabled] = useState(false);
- const [aiProvider, setAIProvider] = useState<'anthropic' | 'openai'>('anthropic');
+ const [aiProvider, setAIProvider] = useState<'anthropic' | 'openai' | 'ollama'>('anthropic');
const [anthropicApiKey, setAnthropicApiKey] = useState('');
const [openaiApiKey, setOpenaiApiKey] = useState('');
+ const [ollamaBaseUrl, setOllamaBaseUrl] = useState('');
+ const [ollamaModel, setOllamaModel] = useState('');
+ const [availableOllamaModels, setAvailableOllamaModels] = useState([]);
+ const [isTestingOllama, setIsTestingOllama] = useState(false);
const [isSavingAI, setIsSavingAI] = useState(false);
const [isTestingAI, setIsTestingAI] = useState(false);
const [testUrl, setTestUrl] = useState('');
@@ -87,6 +91,12 @@ export default function Settings() {
if (aiRes.data.ai_provider) {
setAIProvider(aiRes.data.ai_provider);
}
+ if (aiRes.data.ollama_base_url) {
+ setOllamaBaseUrl(aiRes.data.ollama_base_url);
+ }
+ if (aiRes.data.ollama_model) {
+ setOllamaModel(aiRes.data.ollama_model);
+ }
} catch {
setError('Failed to load settings');
} finally {
@@ -299,6 +309,8 @@ export default function Settings() {
ai_provider: aiProvider,
anthropic_api_key: anthropicApiKey || undefined,
openai_api_key: openaiApiKey || undefined,
+ ollama_base_url: aiProvider === 'ollama' ? ollamaBaseUrl || null : undefined,
+ ollama_model: aiProvider === 'ollama' ? ollamaModel || null : undefined,
});
setAISettings(response.data);
setAnthropicApiKey('');
@@ -311,6 +323,28 @@ export default function Settings() {
}
};
+ const handleTestOllama = async () => {
+ clearMessages();
+ if (!ollamaBaseUrl) {
+ setError('Please enter the Ollama base URL');
+ return;
+ }
+ setIsTestingOllama(true);
+ try {
+ const response = await settingsApi.testOllama(ollamaBaseUrl);
+ if (response.data.success) {
+ setAvailableOllamaModels(response.data.models || []);
+ setSuccess(`Connected to Ollama! Found ${response.data.models?.length || 0} models.`);
+ } else {
+ setError(response.data.error || 'Failed to connect to Ollama');
+ }
+ } catch {
+ setError('Failed to connect to Ollama. Make sure it is running.');
+ } finally {
+ setIsTestingOllama(false);
+ }
+ };
+
const handleTestAI = async () => {
clearMessages();
if (!testUrl) {
@@ -1154,7 +1188,7 @@ export default function Settings() {
AI Provider
setAIProvider(e.target.value as 'anthropic' | 'openai')}
+ onChange={(e) => setAIProvider(e.target.value as 'anthropic' | 'openai' | 'ollama')}
style={{
width: '100%',
padding: '0.625rem 0.75rem',
@@ -1167,6 +1201,7 @@ export default function Settings() {
>
Anthropic (Claude)
OpenAI (GPT)
+ Ollama (Local)
@@ -1207,6 +1242,72 @@ export default function Settings() {
)}
+
+ {aiProvider === 'ollama' && (
+ <>
+
+
Ollama Base URL
+
+ setOllamaBaseUrl(e.target.value)}
+ placeholder="http://localhost:11434"
+ style={{ flex: 1 }}
+ />
+
+ {isTestingOllama ? 'Testing...' : 'Test Connection'}
+
+
+
+ The URL where Ollama is running. Default is http://localhost:11434
+
+
+
+
+
Model
+ {availableOllamaModels.length > 0 ? (
+
setOllamaModel(e.target.value)}
+ style={{
+ width: '100%',
+ padding: '0.625rem 0.75rem',
+ border: '1px solid var(--border)',
+ borderRadius: '0.375rem',
+ background: 'var(--background)',
+ color: 'var(--text)',
+ fontSize: '0.875rem'
+ }}
+ >
+ Select a model...
+ {availableOllamaModels.map((model) => (
+ {model}
+ ))}
+
+ ) : (
+
setOllamaModel(e.target.value)}
+ placeholder={aiSettings?.ollama_model || 'llama3.2, mistral, etc.'}
+ />
+ )}
+
+ {availableOllamaModels.length > 0
+ ? 'Select from available models or test connection to refresh list'
+ : 'Enter model name or test connection to see available models'
+ }
+ {aiSettings?.ollama_configured && ` (currently: ${aiSettings.ollama_model})`}
+
+
+ >
+ )}
>
)}
@@ -1221,7 +1322,7 @@ export default function Settings() {
- {aiSettings?.ai_enabled && (aiSettings.anthropic_configured || aiSettings.openai_configured) && (
+ {aiSettings?.ai_enabled && (aiSettings.anthropic_configured || aiSettings.openai_configured || aiSettings.ollama_configured) && (
🧪
diff --git a/frontend/tsconfig.tsbuildinfo b/frontend/tsconfig.tsbuildinfo
index f5436da..4954d8a 100644
--- a/frontend/tsconfig.tsbuildinfo
+++ b/frontend/tsconfig.tsbuildinfo
@@ -1 +1 @@
-{"root":["./src/app.tsx","./src/main.tsx","./src/vite-env.d.ts","./src/api/client.ts","./src/components/authform.tsx","./src/components/layout.tsx","./src/components/pricechart.tsx","./src/components/productcard.tsx","./src/components/productform.tsx","./src/components/sparkline.tsx","./src/context/authcontext.tsx","./src/context/toastcontext.tsx","./src/hooks/useauth.ts","./src/pages/dashboard.tsx","./src/pages/login.tsx","./src/pages/productdetail.tsx","./src/pages/register.tsx","./src/pages/settings.tsx"],"version":"5.9.3"}
\ No newline at end of file
+{"root":["./src/app.tsx","./src/main.tsx","./src/vite-env.d.ts","./src/api/client.ts","./src/components/authform.tsx","./src/components/layout.tsx","./src/components/pricechart.tsx","./src/components/productcard.tsx","./src/components/productform.tsx","./src/components/sparkline.tsx","./src/components/stocktimeline.tsx","./src/context/authcontext.tsx","./src/context/toastcontext.tsx","./src/hooks/useauth.ts","./src/pages/dashboard.tsx","./src/pages/login.tsx","./src/pages/productdetail.tsx","./src/pages/register.tsx","./src/pages/settings.tsx"],"version":"5.9.3"}
\ No newline at end of file