This commit is contained in:
Anish Sarkar 2026-04-22 06:40:42 +05:30 committed by GitHub
commit f523264c13
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
67 changed files with 11368 additions and 1574 deletions

View file

@ -0,0 +1,39 @@
name: Obsidian Plugin Lint
# Lints + type-checks + builds the Obsidian plugin on every push/PR that
# touches its sources. The official obsidian-sample-plugin template ships
# its own ESLint+esbuild setup; we run that here instead of folding the
# plugin into the monorepo's Biome-based code-quality.yml so the tooling
# stays aligned with what `obsidianmd/eslint-plugin-obsidianmd` checks
# against.
on:
push:
branches: ["**"]
paths:
- "surfsense_obsidian/**"
- ".github/workflows/obsidian-plugin-lint.yml"
pull_request:
branches: ["**"]
paths:
- "surfsense_obsidian/**"
- ".github/workflows/obsidian-plugin-lint.yml"
jobs:
lint:
runs-on: ubuntu-latest
defaults:
run:
working-directory: surfsense_obsidian
steps:
- uses: actions/checkout@v6
- uses: actions/setup-node@v6
with:
node-version: 22.x
cache: npm
cache-dependency-path: surfsense_obsidian/package-lock.json
- run: npm ci
- run: npm run lint
- run: npm run build

View file

@ -0,0 +1,117 @@
name: Release Obsidian Plugin
# Tag format: `obsidian-v<version>` and `<version>` must match `surfsense_obsidian/manifest.json` exactly.
on:
push:
tags:
- "obsidian-v*"
workflow_dispatch:
inputs:
publish:
description: "Publish to GitHub Releases"
required: true
type: choice
options:
- never
- always
default: "never"
permissions:
contents: write
jobs:
build-and-release:
runs-on: ubuntu-latest
defaults:
run:
working-directory: surfsense_obsidian
steps:
- uses: actions/checkout@v6
with:
# Need write access for the manifest/versions.json mirror commit
# back to main further down.
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- uses: actions/setup-node@v6
with:
node-version: 22.x
cache: npm
cache-dependency-path: surfsense_obsidian/package-lock.json
- name: Resolve plugin version
id: version
run: |
manifest_version=$(node -p "require('./manifest.json').version")
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
# Manual runs derive the release version from manifest.json.
version="$manifest_version"
tag="obsidian-v$version"
else
tag="${GITHUB_REF_NAME}"
if [ -z "$tag" ] || [[ "$tag" != obsidian-v* ]]; then
echo "::error::Invalid tag '$tag'. Expected format: obsidian-v<version>"
exit 1
fi
version="${tag#obsidian-v}"
if [ "$version" != "$manifest_version" ]; then
echo "::error::Tag version '$version' does not match manifest version '$manifest_version'"
exit 1
fi
fi
echo "tag=$tag" >> "$GITHUB_OUTPUT"
echo "version=$version" >> "$GITHUB_OUTPUT"
- name: Resolve publish mode
id: release_mode
run: |
if [ "${{ github.event_name }}" = "push" ] || [ "${{ inputs.publish }}" = "always" ]; then
echo "should_publish=true" >> "$GITHUB_OUTPUT"
else
echo "should_publish=false" >> "$GITHUB_OUTPUT"
fi
- run: npm ci
- run: npm run lint
- run: npm run build
- name: Verify build artifacts
run: |
for f in main.js manifest.json styles.css; do
test -f "$f" || (echo "::error::Missing release artifact: $f" && exit 1)
done
- name: Mirror manifest.json + versions.json to repo root
if: steps.release_mode.outputs.should_publish == 'true'
working-directory: ${{ github.workspace }}
run: |
cp surfsense_obsidian/manifest.json manifest.json
cp surfsense_obsidian/versions.json versions.json
if git diff --quiet manifest.json versions.json; then
echo "Root manifest/versions already up to date."
exit 0
fi
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
git add manifest.json versions.json
git commit -m "chore(obsidian-plugin): mirror manifest+versions for ${{ steps.version.outputs.tag }}"
# Push to the default branch so Obsidian can fetch raw files from HEAD.
if ! git push origin HEAD:${{ github.event.repository.default_branch }}; then
echo "::warning::Failed to push mirrored manifest/versions to default branch (likely branch protection). Continuing release."
fi
# Publish release under bare `manifest.json` version (no `obsidian-v` prefix) for BRAT/store compatibility.
- name: Create GitHub release
if: steps.release_mode.outputs.should_publish == 'true'
uses: softprops/action-gh-release@v3
with:
tag_name: ${{ steps.version.outputs.version }}
name: SurfSense Obsidian Plugin ${{ steps.version.outputs.version }}
generate_release_notes: true
files: |
surfsense_obsidian/main.js
surfsense_obsidian/manifest.json
surfsense_obsidian/styles.css

View file

@ -71,6 +71,7 @@ EMBEDDING_MODEL=sentence-transformers/all-MiniLM-L6-v2
# BACKEND_URL=https://api.yourdomain.com
# NEXT_PUBLIC_FASTAPI_BACKEND_URL=https://api.yourdomain.com
# NEXT_PUBLIC_ZERO_CACHE_URL=https://zero.yourdomain.com
# FASTAPI_BACKEND_INTERNAL_URL=http://backend:8000
# ------------------------------------------------------------------------------
# Zero-cache (real-time sync)

View file

@ -198,6 +198,7 @@ services:
NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE: ${AUTH_TYPE:-LOCAL}
NEXT_PUBLIC_ETL_SERVICE: ${ETL_SERVICE:-DOCLING}
NEXT_PUBLIC_DEPLOYMENT_MODE: ${DEPLOYMENT_MODE:-self-hosted}
FASTAPI_BACKEND_INTERNAL_URL: ${FASTAPI_BACKEND_INTERNAL_URL:-http://backend:8000}
labels:
- "com.centurylinklabs.watchtower.enable=true"
depends_on:

10
manifest.json Normal file
View file

@ -0,0 +1,10 @@
{
"id": "surfsense",
"name": "SurfSense",
"version": "0.1.1",
"minAppVersion": "1.5.4",
"description": "Turn your vault into a searchable second brain with SurfSense.",
"author": "SurfSense",
"authorUrl": "https://github.com/MODSetter/SurfSense",
"isDesktopOnly": false
}

View file

@ -0,0 +1,106 @@
"""129_obsidian_plugin_vault_identity
Revision ID: 129
Revises: 128
Create Date: 2026-04-21
Locks down vault identity for the Obsidian plugin connector:
- Deactivates pre-plugin OBSIDIAN_CONNECTOR rows.
- Partial unique index on ``(user_id, (config->>'vault_id'))`` for the
``/obsidian/connect`` upsert fast path.
- Partial unique index on ``(user_id, (config->>'vault_fingerprint'))``
so two devices observing the same vault content can never produce
two connector rows. Collisions are caught by the route handler and
routed through the merge path.
"""
from __future__ import annotations
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
revision: str = "129"
down_revision: str | None = "128"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
conn = op.get_bind()
conn.execute(
sa.text(
"""
UPDATE search_source_connectors
SET
is_indexable = false,
periodic_indexing_enabled = false,
next_scheduled_at = NULL,
config = COALESCE(config, '{}'::json)::jsonb
|| jsonb_build_object(
'legacy', true,
'deactivated_at', to_char(
now() AT TIME ZONE 'UTC',
'YYYY-MM-DD"T"HH24:MI:SS"Z"'
)
)
WHERE connector_type = 'OBSIDIAN_CONNECTOR'
AND COALESCE((config::jsonb)->>'source', '') <> 'plugin'
"""
)
)
conn.execute(
sa.text(
"""
CREATE UNIQUE INDEX IF NOT EXISTS
search_source_connectors_obsidian_plugin_vault_uniq
ON search_source_connectors (user_id, ((config->>'vault_id')))
WHERE connector_type = 'OBSIDIAN_CONNECTOR'
AND config->>'source' = 'plugin'
AND config->>'vault_id' IS NOT NULL
"""
)
)
conn.execute(
sa.text(
"""
CREATE UNIQUE INDEX IF NOT EXISTS
search_source_connectors_obsidian_plugin_fingerprint_uniq
ON search_source_connectors (user_id, ((config->>'vault_fingerprint')))
WHERE connector_type = 'OBSIDIAN_CONNECTOR'
AND config->>'source' = 'plugin'
AND config->>'vault_fingerprint' IS NOT NULL
"""
)
)
def downgrade() -> None:
conn = op.get_bind()
conn.execute(
sa.text(
"DROP INDEX IF EXISTS "
"search_source_connectors_obsidian_plugin_fingerprint_uniq"
)
)
conn.execute(
sa.text(
"DROP INDEX IF EXISTS search_source_connectors_obsidian_plugin_vault_uniq"
)
)
conn.execute(
sa.text(
"""
UPDATE search_source_connectors
SET config = (config::jsonb - 'legacy' - 'deactivated_at')::json
WHERE connector_type = 'OBSIDIAN_CONNECTOR'
AND (config::jsonb) ? 'legacy'
"""
)
)

View file

@ -152,7 +152,6 @@ celery_app.conf.update(
"index_elasticsearch_documents": {"queue": CONNECTORS_QUEUE},
"index_crawled_urls": {"queue": CONNECTORS_QUEUE},
"index_bookstack_pages": {"queue": CONNECTORS_QUEUE},
"index_obsidian_vault": {"queue": CONNECTORS_QUEUE},
"index_composio_connector": {"queue": CONNECTORS_QUEUE},
# Everything else (document processing, podcasts, reindexing,
# schedule checker, cleanup) stays on the default fast queue.

View file

@ -1510,6 +1510,31 @@ class SearchSourceConnector(BaseModel, TimestampMixin):
"name",
name="uq_searchspace_user_connector_type_name",
),
# Mirrors migration 129; backs the ``/obsidian/connect`` upsert.
Index(
"search_source_connectors_obsidian_plugin_vault_uniq",
"user_id",
text("(config->>'vault_id')"),
unique=True,
postgresql_where=text(
"connector_type = 'OBSIDIAN_CONNECTOR' "
"AND config->>'source' = 'plugin' "
"AND config->>'vault_id' IS NOT NULL"
),
),
# Cross-device dedup: same vault content from different devices
# cannot produce two connector rows.
Index(
"search_source_connectors_obsidian_plugin_fingerprint_uniq",
"user_id",
text("(config->>'vault_fingerprint')"),
unique=True,
postgresql_where=text(
"connector_type = 'OBSIDIAN_CONNECTOR' "
"AND config->>'source' = 'plugin' "
"AND config->>'vault_fingerprint' IS NOT NULL"
),
),
)
name = Column(String(100), nullable=False, index=True)

View file

@ -37,6 +37,7 @@ from .new_llm_config_routes import router as new_llm_config_router
from .notes_routes import router as notes_router
from .notifications_routes import router as notifications_router
from .notion_add_connector_route import router as notion_add_connector_router
from .obsidian_plugin_routes import router as obsidian_plugin_router
from .onedrive_add_connector_route import router as onedrive_add_connector_router
from .podcasts_routes import router as podcasts_router
from .prompts_routes import router as prompts_router
@ -84,6 +85,7 @@ router.include_router(notion_add_connector_router)
router.include_router(slack_add_connector_router)
router.include_router(teams_add_connector_router)
router.include_router(onedrive_add_connector_router)
router.include_router(obsidian_plugin_router) # Obsidian plugin push API
router.include_router(discord_add_connector_router)
router.include_router(jira_add_connector_router)
router.include_router(confluence_add_connector_router)

View file

@ -0,0 +1,652 @@
"""Obsidian plugin ingestion routes (``/api/v1/obsidian/*``).
Wire surface for the ``surfsense_obsidian/`` plugin. Versioning anchor is
the ``/api/v1/`` URL prefix; additive feature detection rides the
``capabilities`` array on /health and /connect.
"""
from __future__ import annotations
import logging
from datetime import UTC, datetime
from fastapi import APIRouter, Depends, HTTPException, Query, status
from sqlalchemy import and_, case, func
from sqlalchemy.dialects.postgresql import insert as pg_insert
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.db import (
Document,
DocumentType,
SearchSourceConnector,
SearchSourceConnectorType,
SearchSpace,
User,
get_async_session,
)
from app.schemas.obsidian_plugin import (
ConnectRequest,
ConnectResponse,
DeleteAck,
DeleteAckItem,
DeleteBatchRequest,
HealthResponse,
ManifestResponse,
RenameAck,
RenameAckItem,
RenameBatchRequest,
StatsResponse,
SyncAck,
SyncAckItem,
SyncBatchRequest,
)
from app.services.notification_service import NotificationService
from app.services.obsidian_plugin_indexer import (
delete_note,
get_manifest,
merge_obsidian_connectors,
rename_note,
upsert_note,
)
from app.users import current_active_user
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/obsidian", tags=["obsidian-plugin"])
# Plugins feature-gate on these. Add entries, never rename or remove.
OBSIDIAN_CAPABILITIES: list[str] = ["sync", "rename", "delete", "manifest", "stats"]
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
def _build_handshake() -> dict[str, object]:
return {"capabilities": list(OBSIDIAN_CAPABILITIES)}
def _connector_type_value(connector: SearchSourceConnector) -> str:
connector_type = connector.connector_type
if hasattr(connector_type, "value"):
return str(connector_type.value)
return str(connector_type)
async def _start_obsidian_sync_notification(
session: AsyncSession,
*,
user: User,
connector: SearchSourceConnector,
total_count: int,
):
"""Create/update the rolling inbox item for Obsidian plugin sync.
Obsidian sync is continuous and batched, so we keep one stable
operation_id per connector instead of creating a new notification per batch.
"""
handler = NotificationService.connector_indexing
operation_id = f"obsidian_sync_connector_{connector.id}"
connector_name = connector.name or "Obsidian"
notification = await handler.find_or_create_notification(
session=session,
user_id=user.id,
operation_id=operation_id,
title=f"Syncing: {connector_name}",
message="Syncing from Obsidian plugin",
search_space_id=connector.search_space_id,
initial_metadata={
"connector_id": connector.id,
"connector_name": connector_name,
"connector_type": _connector_type_value(connector),
"sync_stage": "processing",
"indexed_count": 0,
"failed_count": 0,
"total_count": total_count,
"source": "obsidian_plugin",
},
)
return await handler.update_notification(
session=session,
notification=notification,
status="in_progress",
metadata_updates={
"sync_stage": "processing",
"total_count": total_count,
},
)
async def _finish_obsidian_sync_notification(
session: AsyncSession,
*,
notification,
indexed: int,
failed: int,
):
"""Mark the rolling Obsidian sync inbox item complete or failed."""
handler = NotificationService.connector_indexing
connector_name = notification.notification_metadata.get(
"connector_name", "Obsidian"
)
if failed > 0 and indexed == 0:
title = f"Failed: {connector_name}"
message = (
f"Sync failed: {failed} file(s) failed"
if failed > 1
else "Sync failed: 1 file failed"
)
status_value = "failed"
stage = "failed"
else:
title = f"Ready: {connector_name}"
if failed > 0:
message = f"Partially synced: {indexed} file(s) synced, {failed} failed."
elif indexed == 0:
message = "Already up to date!"
elif indexed == 1:
message = "Now searchable! 1 file synced."
else:
message = f"Now searchable! {indexed} files synced."
status_value = "completed"
stage = "completed"
await handler.update_notification(
session=session,
notification=notification,
title=title,
message=message,
status=status_value,
metadata_updates={
"indexed_count": indexed,
"failed_count": failed,
"sync_stage": stage,
},
)
async def _resolve_vault_connector(
session: AsyncSession,
*,
user: User,
vault_id: str,
) -> SearchSourceConnector:
"""Find the OBSIDIAN_CONNECTOR row that owns ``vault_id`` for this user."""
# ``config`` is core ``JSON`` (not ``JSONB``); ``as_string()`` is the
# cross-dialect equivalent of ``.astext`` and compiles to ``->>``.
stmt = select(SearchSourceConnector).where(
and_(
SearchSourceConnector.user_id == user.id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.OBSIDIAN_CONNECTOR,
SearchSourceConnector.config["vault_id"].as_string() == vault_id,
SearchSourceConnector.config["source"].as_string() == "plugin",
)
)
connector = (await session.execute(stmt)).scalars().first()
if connector is not None:
return connector
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail={
"code": "VAULT_NOT_REGISTERED",
"message": (
"No Obsidian plugin connector found for this vault. "
"Call POST /obsidian/connect first."
),
"vault_id": vault_id,
},
)
async def _ensure_search_space_access(
session: AsyncSession,
*,
user: User,
search_space_id: int,
) -> SearchSpace:
"""Owner-only access to the search space (shared spaces are a follow-up)."""
result = await session.execute(
select(SearchSpace).where(
and_(SearchSpace.id == search_space_id, SearchSpace.user_id == user.id)
)
)
space = result.scalars().first()
if space is None:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail={
"code": "SEARCH_SPACE_FORBIDDEN",
"message": "You don't own that search space.",
},
)
return space
# ---------------------------------------------------------------------------
# Endpoints
# ---------------------------------------------------------------------------
@router.get("/health", response_model=HealthResponse)
async def obsidian_health(
user: User = Depends(current_active_user),
) -> HealthResponse:
"""Return the API contract handshake; plugin caches it per onload."""
return HealthResponse(
**_build_handshake(),
server_time_utc=datetime.now(UTC),
)
async def _find_by_vault_id(
session: AsyncSession, *, user_id, vault_id: str
) -> SearchSourceConnector | None:
stmt = select(SearchSourceConnector).where(
and_(
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.OBSIDIAN_CONNECTOR,
SearchSourceConnector.config["source"].as_string() == "plugin",
SearchSourceConnector.config["vault_id"].as_string() == vault_id,
)
)
return (await session.execute(stmt)).scalars().first()
async def _find_by_fingerprint(
session: AsyncSession, *, user_id, vault_fingerprint: str
) -> SearchSourceConnector | None:
stmt = select(SearchSourceConnector).where(
and_(
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.OBSIDIAN_CONNECTOR,
SearchSourceConnector.config["source"].as_string() == "plugin",
SearchSourceConnector.config["vault_fingerprint"].as_string()
== vault_fingerprint,
)
)
return (await session.execute(stmt)).scalars().first()
def _build_config(payload: ConnectRequest, *, now_iso: str) -> dict[str, object]:
return {
"vault_id": payload.vault_id,
"vault_name": payload.vault_name,
"vault_fingerprint": payload.vault_fingerprint,
"source": "plugin",
"last_connect_at": now_iso,
}
def _display_name(vault_name: str) -> str:
return f"Obsidian - {vault_name}"
@router.post("/connect", response_model=ConnectResponse)
async def obsidian_connect(
payload: ConnectRequest,
user: User = Depends(current_active_user),
session: AsyncSession = Depends(get_async_session),
) -> ConnectResponse:
"""Register a vault, refresh an existing one, or adopt another device's row.
Resolution order:
1. ``(user_id, vault_id)`` known device, refresh metadata.
2. ``(user_id, vault_fingerprint)`` another device of the same vault,
caller adopts the surviving ``vault_id``.
3. Insert a new row.
Fingerprint collisions on (1) trigger ``merge_obsidian_connectors`` so
the partial unique index can never produce two live rows for one vault.
"""
await _ensure_search_space_access(
session, user=user, search_space_id=payload.search_space_id
)
now_iso = datetime.now(UTC).isoformat()
cfg = _build_config(payload, now_iso=now_iso)
display_name = _display_name(payload.vault_name)
existing_by_vid = await _find_by_vault_id(
session, user_id=user.id, vault_id=payload.vault_id
)
if existing_by_vid is not None:
collision = await _find_by_fingerprint(
session, user_id=user.id, vault_fingerprint=payload.vault_fingerprint
)
if collision is not None and collision.id != existing_by_vid.id:
await merge_obsidian_connectors(
session, source=existing_by_vid, target=collision
)
collision_cfg = dict(collision.config or {})
collision_cfg["vault_name"] = payload.vault_name
collision_cfg["last_connect_at"] = now_iso
collision.config = collision_cfg
collision.name = _display_name(payload.vault_name)
response = ConnectResponse(
connector_id=collision.id,
vault_id=collision_cfg["vault_id"],
search_space_id=collision.search_space_id,
**_build_handshake(),
)
await session.commit()
return response
existing_by_vid.name = display_name
existing_by_vid.config = cfg
existing_by_vid.search_space_id = payload.search_space_id
existing_by_vid.is_indexable = False
response = ConnectResponse(
connector_id=existing_by_vid.id,
vault_id=payload.vault_id,
search_space_id=existing_by_vid.search_space_id,
**_build_handshake(),
)
await session.commit()
return response
existing_by_fp = await _find_by_fingerprint(
session, user_id=user.id, vault_fingerprint=payload.vault_fingerprint
)
if existing_by_fp is not None:
survivor_cfg = dict(existing_by_fp.config or {})
survivor_cfg["vault_name"] = payload.vault_name
survivor_cfg["last_connect_at"] = now_iso
existing_by_fp.config = survivor_cfg
existing_by_fp.name = display_name
response = ConnectResponse(
connector_id=existing_by_fp.id,
vault_id=survivor_cfg["vault_id"],
search_space_id=existing_by_fp.search_space_id,
**_build_handshake(),
)
await session.commit()
return response
# ON CONFLICT DO NOTHING matches any unique index (vault_id OR
# fingerprint), so concurrent first-time connects from two devices
# of the same vault never raise IntegrityError — the loser just
# gets an empty RETURNING and falls through to re-fetch the winner.
insert_stmt = (
pg_insert(SearchSourceConnector)
.values(
name=display_name,
connector_type=SearchSourceConnectorType.OBSIDIAN_CONNECTOR,
is_indexable=False,
config=cfg,
user_id=user.id,
search_space_id=payload.search_space_id,
)
.on_conflict_do_nothing()
.returning(
SearchSourceConnector.id,
SearchSourceConnector.search_space_id,
)
)
inserted = (await session.execute(insert_stmt)).first()
if inserted is not None:
response = ConnectResponse(
connector_id=inserted.id,
vault_id=payload.vault_id,
search_space_id=inserted.search_space_id,
**_build_handshake(),
)
await session.commit()
return response
winner = await _find_by_fingerprint(
session, user_id=user.id, vault_fingerprint=payload.vault_fingerprint
)
if winner is None:
winner = await _find_by_vault_id(
session, user_id=user.id, vault_id=payload.vault_id
)
if winner is None:
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail="vault registration conflicted but winning row could not be located",
)
response = ConnectResponse(
connector_id=winner.id,
vault_id=(winner.config or {})["vault_id"],
search_space_id=winner.search_space_id,
**_build_handshake(),
)
await session.commit()
return response
@router.post("/sync", response_model=SyncAck)
async def obsidian_sync(
payload: SyncBatchRequest,
user: User = Depends(current_active_user),
session: AsyncSession = Depends(get_async_session),
) -> SyncAck:
"""Batch-upsert notes; returns per-note ack so the plugin can dequeue/retry."""
connector = await _resolve_vault_connector(
session, user=user, vault_id=payload.vault_id
)
notification = None
try:
notification = await _start_obsidian_sync_notification(
session, user=user, connector=connector, total_count=len(payload.notes)
)
except Exception:
logger.warning(
"obsidian sync notification start failed connector=%s user=%s",
connector.id,
user.id,
exc_info=True,
)
items: list[SyncAckItem] = []
indexed = 0
failed = 0
for note in payload.notes:
try:
doc = await upsert_note(
session, connector=connector, payload=note, user_id=str(user.id)
)
indexed += 1
items.append(SyncAckItem(path=note.path, status="ok", document_id=doc.id))
except HTTPException:
raise
except Exception as exc:
failed += 1
logger.exception(
"obsidian /sync failed for path=%s vault=%s",
note.path,
payload.vault_id,
)
items.append(
SyncAckItem(path=note.path, status="error", error=str(exc)[:300])
)
if notification is not None:
try:
await _finish_obsidian_sync_notification(
session,
notification=notification,
indexed=indexed,
failed=failed,
)
except Exception:
logger.warning(
"obsidian sync notification finish failed connector=%s user=%s",
connector.id,
user.id,
exc_info=True,
)
return SyncAck(
vault_id=payload.vault_id,
indexed=indexed,
failed=failed,
items=items,
)
@router.post("/rename", response_model=RenameAck)
async def obsidian_rename(
payload: RenameBatchRequest,
user: User = Depends(current_active_user),
session: AsyncSession = Depends(get_async_session),
) -> RenameAck:
"""Apply a batch of vault rename events."""
connector = await _resolve_vault_connector(
session, user=user, vault_id=payload.vault_id
)
items: list[RenameAckItem] = []
renamed = 0
missing = 0
for item in payload.renames:
try:
doc = await rename_note(
session,
connector=connector,
old_path=item.old_path,
new_path=item.new_path,
vault_id=payload.vault_id,
)
if doc is None:
missing += 1
items.append(
RenameAckItem(
old_path=item.old_path,
new_path=item.new_path,
status="missing",
)
)
else:
renamed += 1
items.append(
RenameAckItem(
old_path=item.old_path,
new_path=item.new_path,
status="ok",
document_id=doc.id,
)
)
except Exception as exc:
logger.exception(
"obsidian /rename failed for old=%s new=%s vault=%s",
item.old_path,
item.new_path,
payload.vault_id,
)
items.append(
RenameAckItem(
old_path=item.old_path,
new_path=item.new_path,
status="error",
error=str(exc)[:300],
)
)
return RenameAck(
vault_id=payload.vault_id,
renamed=renamed,
missing=missing,
items=items,
)
@router.delete("/notes", response_model=DeleteAck)
async def obsidian_delete_notes(
payload: DeleteBatchRequest,
user: User = Depends(current_active_user),
session: AsyncSession = Depends(get_async_session),
) -> DeleteAck:
"""Soft-delete a batch of notes by vault-relative path."""
connector = await _resolve_vault_connector(
session, user=user, vault_id=payload.vault_id
)
deleted = 0
missing = 0
items: list[DeleteAckItem] = []
for path in payload.paths:
try:
ok = await delete_note(
session,
connector=connector,
vault_id=payload.vault_id,
path=path,
)
if ok:
deleted += 1
items.append(DeleteAckItem(path=path, status="ok"))
else:
missing += 1
items.append(DeleteAckItem(path=path, status="missing"))
except Exception as exc:
logger.exception(
"obsidian DELETE /notes failed for path=%s vault=%s",
path,
payload.vault_id,
)
items.append(DeleteAckItem(path=path, status="error", error=str(exc)[:300]))
return DeleteAck(
vault_id=payload.vault_id,
deleted=deleted,
missing=missing,
items=items,
)
@router.get("/manifest", response_model=ManifestResponse)
async def obsidian_manifest(
vault_id: str = Query(..., description="Plugin-side stable vault UUID"),
user: User = Depends(current_active_user),
session: AsyncSession = Depends(get_async_session),
) -> ManifestResponse:
"""Return ``{path: {hash, mtime}}`` for the plugin's onload reconcile diff."""
connector = await _resolve_vault_connector(session, user=user, vault_id=vault_id)
return await get_manifest(session, connector=connector, vault_id=vault_id)
@router.get("/stats", response_model=StatsResponse)
async def obsidian_stats(
vault_id: str = Query(..., description="Plugin-side stable vault UUID"),
user: User = Depends(current_active_user),
session: AsyncSession = Depends(get_async_session),
) -> StatsResponse:
"""Active-note count + last sync time for the web tile.
``files_synced`` excludes tombstones so it matches ``/manifest``;
``last_sync_at`` includes them so deletes advance the freshness signal.
"""
connector = await _resolve_vault_connector(session, user=user, vault_id=vault_id)
is_active = Document.document_metadata["deleted_at"].as_string().is_(None)
row = (
await session.execute(
select(
func.count(case((is_active, 1))).label("files_synced"),
func.max(Document.updated_at).label("last_sync_at"),
).where(
and_(
Document.connector_id == connector.id,
Document.document_type == DocumentType.OBSIDIAN_CONNECTOR,
)
)
)
).first()
return StatsResponse(
vault_id=vault_id,
files_synced=int(row[0] or 0),
last_sync_at=row[1],
)

View file

@ -1157,25 +1157,6 @@ async def index_connector_content(
)
response_message = "Web page indexing started in the background."
elif connector.connector_type == SearchSourceConnectorType.OBSIDIAN_CONNECTOR:
from app.config import config as app_config
from app.tasks.celery_tasks.connector_tasks import index_obsidian_vault_task
# Obsidian connector only available in self-hosted mode
if not app_config.is_self_hosted():
raise HTTPException(
status_code=400,
detail="Obsidian connector is only available in self-hosted mode",
)
logger.info(
f"Triggering Obsidian vault indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}"
)
index_obsidian_vault_task.delay(
connector_id, search_space_id, str(user.id), indexing_from, indexing_to
)
response_message = "Obsidian vault indexing started in the background."
elif (
connector.connector_type
== SearchSourceConnectorType.COMPOSIO_GOOGLE_DRIVE_CONNECTOR
@ -3048,59 +3029,6 @@ async def run_bookstack_indexing(
)
# Add new helper functions for Obsidian indexing
async def run_obsidian_indexing_with_new_session(
connector_id: int,
search_space_id: int,
user_id: str,
start_date: str,
end_date: str,
):
"""Wrapper to run Obsidian indexing with its own database session."""
logger.info(
f"Background task started: Indexing Obsidian connector {connector_id} into space {search_space_id} from {start_date} to {end_date}"
)
async with async_session_maker() as session:
await run_obsidian_indexing(
session, connector_id, search_space_id, user_id, start_date, end_date
)
logger.info(f"Background task finished: Indexing Obsidian connector {connector_id}")
async def run_obsidian_indexing(
session: AsyncSession,
connector_id: int,
search_space_id: int,
user_id: str,
start_date: str,
end_date: str,
):
"""
Background task to run Obsidian vault indexing.
Args:
session: Database session
connector_id: ID of the Obsidian connector
search_space_id: ID of the search space
user_id: ID of the user
start_date: Start date for indexing
end_date: End date for indexing
"""
from app.tasks.connector_indexers import index_obsidian_vault
await _run_indexing_with_notifications(
session=session,
connector_id=connector_id,
search_space_id=search_space_id,
user_id=user_id,
start_date=start_date,
end_date=end_date,
indexing_function=index_obsidian_vault,
update_timestamp_func=_update_connector_timestamp_by_id,
supports_heartbeat_callback=True,
)
async def run_composio_indexing_with_new_session(
connector_id: int,
search_space_id: int,

View file

@ -1,59 +0,0 @@
"""
Obsidian Connector Credentials Schema.
Obsidian is a local-first note-taking app that stores notes as markdown files.
This connector supports indexing from local file system (self-hosted only).
"""
from pydantic import BaseModel, field_validator
class ObsidianAuthCredentialsBase(BaseModel):
"""
Credentials/configuration for the Obsidian connector.
Since Obsidian vaults are local directories, this schema primarily
holds the vault path and configuration options rather than API tokens.
"""
vault_path: str
vault_name: str | None = None
exclude_folders: list[str] | None = None
include_attachments: bool = False
@field_validator("vault_path")
@classmethod
def validate_vault_path(cls, v: str) -> str:
"""Ensure vault path is provided and stripped of whitespace."""
if not v or not v.strip():
raise ValueError("Vault path is required")
return v.strip()
@field_validator("exclude_folders", mode="before")
@classmethod
def parse_exclude_folders(cls, v):
"""Parse exclude_folders from string if needed."""
if v is None:
return [".trash", ".obsidian", "templates"]
if isinstance(v, str):
return [f.strip() for f in v.split(",") if f.strip()]
return v
def to_dict(self) -> dict:
"""Convert credentials to dictionary for storage."""
return {
"vault_path": self.vault_path,
"vault_name": self.vault_name,
"exclude_folders": self.exclude_folders,
"include_attachments": self.include_attachments,
}
@classmethod
def from_dict(cls, data: dict) -> "ObsidianAuthCredentialsBase":
"""Create credentials from dictionary."""
return cls(
vault_path=data.get("vault_path", ""),
vault_name=data.get("vault_name"),
exclude_folders=data.get("exclude_folders"),
include_attachments=data.get("include_attachments", False),
)

View file

@ -0,0 +1,180 @@
"""Wire schemas spoken between the SurfSense Obsidian plugin and the backend.
All schemas inherit ``extra='ignore'`` from :class:`_PluginBase` so additive
field changes never break either side; hard breaks live behind a new URL
prefix (``/api/v2/...``).
"""
from __future__ import annotations
from datetime import datetime
from typing import Any, Literal
from pydantic import BaseModel, ConfigDict, Field
_PLUGIN_MODEL_CONFIG = ConfigDict(extra="ignore")
class _PluginBase(BaseModel):
"""Base schema carrying the shared forward-compatibility config."""
model_config = _PLUGIN_MODEL_CONFIG
class NotePayload(_PluginBase):
"""One Obsidian note as pushed by the plugin (the source of truth)."""
vault_id: str = Field(
..., description="Stable plugin-generated UUID for this vault"
)
path: str = Field(..., description="Vault-relative path, e.g. 'notes/foo.md'")
name: str = Field(..., description="File stem (no extension)")
extension: str = Field(
default="md", description="File extension without leading dot"
)
content: str = Field(default="", description="Raw markdown body (post-frontmatter)")
frontmatter: dict[str, Any] = Field(default_factory=dict)
tags: list[str] = Field(default_factory=list)
headings: list[str] = Field(default_factory=list)
resolved_links: list[str] = Field(default_factory=list)
unresolved_links: list[str] = Field(default_factory=list)
embeds: list[str] = Field(default_factory=list)
aliases: list[str] = Field(default_factory=list)
content_hash: str = Field(
..., description="Plugin-computed SHA-256 of the raw content"
)
size: int | None = Field(
default=None,
ge=0,
description="Byte size of the local file (mtime+size short-circuit signal). Optional for forward compatibility.",
)
mtime: datetime
ctime: datetime
class SyncBatchRequest(_PluginBase):
"""Batch upsert; plugin sends 10-20 notes per request."""
vault_id: str
notes: list[NotePayload] = Field(default_factory=list, max_length=100)
class RenameItem(_PluginBase):
old_path: str
new_path: str
class RenameBatchRequest(_PluginBase):
vault_id: str
renames: list[RenameItem] = Field(default_factory=list, max_length=200)
class DeleteBatchRequest(_PluginBase):
vault_id: str
paths: list[str] = Field(default_factory=list, max_length=500)
class ManifestEntry(_PluginBase):
hash: str
mtime: datetime
size: int | None = Field(
default=None,
description="Byte size last seen by the server. Enables mtime+size short-circuit; absent when not yet recorded.",
)
class ManifestResponse(_PluginBase):
"""Path-keyed manifest of every non-deleted note for a vault."""
vault_id: str
items: dict[str, ManifestEntry] = Field(default_factory=dict)
class ConnectRequest(_PluginBase):
"""Vault registration / heartbeat. Replayed on every plugin onload."""
vault_id: str
vault_name: str
search_space_id: int
vault_fingerprint: str = Field(
...,
description=(
"Deterministic SHA-256 over the sorted markdown paths in the vault "
"(plus vault_name). Same vault content on any device produces the "
"same value; the server uses it to dedup connectors across devices."
),
)
class ConnectResponse(_PluginBase):
"""Carries the same handshake fields as ``HealthResponse`` so the plugin
learns the contract without a separate ``GET /health`` round-trip."""
connector_id: int
vault_id: str
search_space_id: int
capabilities: list[str]
class HealthResponse(_PluginBase):
"""API contract handshake. ``capabilities`` is additive-only string list."""
capabilities: list[str]
server_time_utc: datetime
# Per-item batch ack schemas — wire shape is load-bearing for the plugin
# queue (see api-client.ts / sync-engine.ts:processBatch).
class SyncAckItem(_PluginBase):
path: str
status: Literal["ok", "error"]
document_id: int | None = None
error: str | None = None
class SyncAck(_PluginBase):
vault_id: str
indexed: int
failed: int
items: list[SyncAckItem] = Field(default_factory=list)
class RenameAckItem(_PluginBase):
old_path: str
new_path: str
# ``missing`` is treated as success client-side (end state reached).
status: Literal["ok", "error", "missing"]
document_id: int | None = None
error: str | None = None
class RenameAck(_PluginBase):
vault_id: str
renamed: int
missing: int
items: list[RenameAckItem] = Field(default_factory=list)
class DeleteAckItem(_PluginBase):
path: str
status: Literal["ok", "error", "missing"]
error: str | None = None
class DeleteAck(_PluginBase):
vault_id: str
deleted: int
missing: int
items: list[DeleteAckItem] = Field(default_factory=list)
class StatsResponse(_PluginBase):
"""Backs the Obsidian connector tile in the web UI."""
vault_id: str
files_synced: int
last_sync_at: datetime | None = None

View file

@ -0,0 +1,471 @@
"""
Obsidian plugin indexer service.
Bridges the SurfSense Obsidian plugin's HTTP payloads
(see ``app/schemas/obsidian_plugin.py``) into the shared
``IndexingPipelineService``.
Responsibilities:
- ``upsert_note`` push one note through the indexing pipeline; respects
unchanged content (skip) and version-snapshots existing rows before
rewrite.
- ``rename_note`` rewrite path-derived fields (path metadata,
``unique_identifier_hash``, ``source_url``) without re-indexing content.
- ``delete_note`` soft delete with a tombstone in ``document_metadata``
so reconciliation can distinguish "user explicitly killed this in the UI"
from "plugin hasn't synced yet".
- ``get_manifest`` return ``{path: {hash, mtime, size}}`` for every
non-deleted note belonging to a vault, used by the plugin's reconcile
pass on ``onload``.
Design notes
------------
The plugin's content hash and the backend's ``content_hash`` are computed
differently (plugin uses raw SHA-256 of the markdown body; backend salts
with ``search_space_id``). We persist the plugin's hash in
``document_metadata['plugin_content_hash']`` so the manifest endpoint can
return what the plugin sent that's the only number the plugin can
compare without re-downloading content.
"""
from __future__ import annotations
import logging
from datetime import UTC, datetime
from typing import Any
from urllib.parse import quote
from sqlalchemy import and_, select
from sqlalchemy.ext.asyncio import AsyncSession
from app.db import (
Document,
DocumentStatus,
DocumentType,
SearchSourceConnector,
)
from app.indexing_pipeline.connector_document import ConnectorDocument
from app.indexing_pipeline.indexing_pipeline_service import IndexingPipelineService
from app.schemas.obsidian_plugin import (
ManifestEntry,
ManifestResponse,
NotePayload,
)
from app.services.llm_service import get_user_long_context_llm
from app.utils.document_converters import generate_unique_identifier_hash
from app.utils.document_versioning import create_version_snapshot
logger = logging.getLogger(__name__)
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
def _vault_path_unique_id(vault_id: str, path: str) -> str:
"""Stable identifier for a note. Vault-scoped so the same path under two
different vaults doesn't collide."""
return f"{vault_id}:{path}"
def _build_source_url(vault_name: str, path: str) -> str:
"""Build the ``obsidian://`` deep link for the web UI's "Open in Obsidian"
button. Both segments are URL-encoded because vault names and paths can
contain spaces, ``#``, ``?``, etc.
"""
return (
"obsidian://open"
f"?vault={quote(vault_name, safe='')}"
f"&file={quote(path, safe='')}"
)
def _build_metadata(
payload: NotePayload,
*,
vault_name: str,
connector_id: int,
extra: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""Flatten the rich plugin payload into the JSONB ``document_metadata``
column. Keys here are what the chat UI / search UI surface to users.
"""
meta: dict[str, Any] = {
"source": "plugin",
"vault_id": payload.vault_id,
"vault_name": vault_name,
"file_path": payload.path,
"file_name": payload.name,
"extension": payload.extension,
"frontmatter": payload.frontmatter,
"tags": payload.tags,
"headings": payload.headings,
"outgoing_links": payload.resolved_links,
"unresolved_links": payload.unresolved_links,
"embeds": payload.embeds,
"aliases": payload.aliases,
"plugin_content_hash": payload.content_hash,
"plugin_file_size": payload.size,
"mtime": payload.mtime.isoformat(),
"ctime": payload.ctime.isoformat(),
"connector_id": connector_id,
"url": _build_source_url(vault_name, payload.path),
}
if extra:
meta.update(extra)
return meta
def _build_document_string(payload: NotePayload, vault_name: str) -> str:
"""Compose the indexable string the pipeline embeds and chunks.
Mirrors the legacy obsidian indexer's METADATA + CONTENT framing so
existing search relevance heuristics keep working unchanged.
"""
tags_line = ", ".join(payload.tags) if payload.tags else "None"
links_line = ", ".join(payload.resolved_links) if payload.resolved_links else "None"
return (
"<METADATA>\n"
f"Title: {payload.name}\n"
f"Vault: {vault_name}\n"
f"Path: {payload.path}\n"
f"Tags: {tags_line}\n"
f"Links to: {links_line}\n"
"</METADATA>\n\n"
"<CONTENT>\n"
f"{payload.content}\n"
"</CONTENT>\n"
)
async def _find_existing_document(
session: AsyncSession,
*,
search_space_id: int,
vault_id: str,
path: str,
) -> Document | None:
unique_id = _vault_path_unique_id(vault_id, path)
uid_hash = generate_unique_identifier_hash(
DocumentType.OBSIDIAN_CONNECTOR,
unique_id,
search_space_id,
)
result = await session.execute(
select(Document).where(Document.unique_identifier_hash == uid_hash)
)
return result.scalars().first()
# ---------------------------------------------------------------------------
# Public API
# ---------------------------------------------------------------------------
async def upsert_note(
session: AsyncSession,
*,
connector: SearchSourceConnector,
payload: NotePayload,
user_id: str,
) -> Document:
"""Index or refresh a single note pushed by the plugin.
Returns the resulting ``Document`` (whether newly created, updated, or
a skip-because-unchanged hit).
"""
vault_name: str = (connector.config or {}).get("vault_name") or "Vault"
search_space_id = connector.search_space_id
existing = await _find_existing_document(
session,
search_space_id=search_space_id,
vault_id=payload.vault_id,
path=payload.path,
)
plugin_hash = payload.content_hash
if existing is not None:
existing_meta = existing.document_metadata or {}
was_tombstoned = bool(existing_meta.get("deleted_at"))
if (
not was_tombstoned
and existing_meta.get("plugin_content_hash") == plugin_hash
and DocumentStatus.is_state(existing.status, DocumentStatus.READY)
):
return existing
try:
await create_version_snapshot(session, existing)
except Exception:
logger.debug(
"version snapshot failed for obsidian doc %s",
existing.id,
exc_info=True,
)
document_string = _build_document_string(payload, vault_name)
metadata = _build_metadata(
payload,
vault_name=vault_name,
connector_id=connector.id,
)
connector_doc = ConnectorDocument(
title=payload.name,
source_markdown=document_string,
unique_id=_vault_path_unique_id(payload.vault_id, payload.path),
document_type=DocumentType.OBSIDIAN_CONNECTOR,
search_space_id=search_space_id,
connector_id=connector.id,
created_by_id=str(user_id),
should_summarize=connector.enable_summary,
fallback_summary=f"Obsidian Note: {payload.name}\n\n{payload.content}",
metadata=metadata,
)
pipeline = IndexingPipelineService(session)
prepared = await pipeline.prepare_for_indexing([connector_doc])
if not prepared:
if existing is not None:
return existing
raise RuntimeError(f"Indexing pipeline rejected obsidian note {payload.path}")
document = prepared[0]
llm = await get_user_long_context_llm(session, str(user_id), search_space_id)
return await pipeline.index(document, connector_doc, llm)
async def rename_note(
session: AsyncSession,
*,
connector: SearchSourceConnector,
old_path: str,
new_path: str,
vault_id: str,
) -> Document | None:
"""Rewrite path-derived columns without re-indexing content.
Returns the updated document, or ``None`` if no row matched the
``old_path`` (this happens when the plugin is renaming a file that was
never synced safe to ignore, the next ``sync`` will create it under
the new path).
"""
vault_name: str = (connector.config or {}).get("vault_name") or "Vault"
search_space_id = connector.search_space_id
existing = await _find_existing_document(
session,
search_space_id=search_space_id,
vault_id=vault_id,
path=old_path,
)
if existing is None:
return None
new_unique_id = _vault_path_unique_id(vault_id, new_path)
new_uid_hash = generate_unique_identifier_hash(
DocumentType.OBSIDIAN_CONNECTOR,
new_unique_id,
search_space_id,
)
collision = await session.execute(
select(Document).where(
and_(
Document.unique_identifier_hash == new_uid_hash,
Document.id != existing.id,
)
)
)
collision_row = collision.scalars().first()
if collision_row is not None:
logger.warning(
"obsidian rename target already exists "
"(vault=%s old=%s new=%s); skipping rename so the next /sync "
"can resolve the conflict via content_hash",
vault_id,
old_path,
new_path,
)
return existing
new_filename = new_path.rsplit("/", 1)[-1]
new_stem = new_filename.rsplit(".", 1)[0] if "." in new_filename else new_filename
existing.unique_identifier_hash = new_uid_hash
existing.title = new_stem
meta = dict(existing.document_metadata or {})
meta["file_path"] = new_path
meta["file_name"] = new_stem
meta["url"] = _build_source_url(vault_name, new_path)
existing.document_metadata = meta
existing.updated_at = datetime.now(UTC)
await session.commit()
return existing
async def delete_note(
session: AsyncSession,
*,
connector: SearchSourceConnector,
vault_id: str,
path: str,
) -> bool:
"""Soft-delete via tombstone in ``document_metadata``.
The row is *not* removed and chunks are *not* dropped, so existing
citations in chat threads remain resolvable. The manifest endpoint
filters tombstoned rows out, so the plugin's reconcile pass will not
see this path and won't try to "resurrect" a note the user deleted in
the SurfSense UI.
Returns True if a row was tombstoned, False if no matching row existed.
"""
existing = await _find_existing_document(
session,
search_space_id=connector.search_space_id,
vault_id=vault_id,
path=path,
)
if existing is None:
return False
meta = dict(existing.document_metadata or {})
if meta.get("deleted_at"):
return True
meta["deleted_at"] = datetime.now(UTC).isoformat()
meta["deleted_by_source"] = "plugin"
existing.document_metadata = meta
existing.updated_at = datetime.now(UTC)
await session.commit()
return True
async def merge_obsidian_connectors(
session: AsyncSession,
*,
source: SearchSourceConnector,
target: SearchSourceConnector,
) -> None:
"""Fold ``source``'s documents into ``target`` and delete ``source``.
Triggered when the fingerprint dedup detects two plugin connectors
pointing at the same vault (e.g. a mobile install raced with iCloud
hydration and got a partial fingerprint, then caught up). Path
collisions resolve in favour of ``target`` (the surviving row);
``source``'s duplicate documents are hard-deleted along with their
chunks via the ``cascade='all, delete-orphan'`` on ``Document.chunks``.
"""
if source.id == target.id:
return
target_vault_id = (target.config or {}).get("vault_id")
target_search_space_id = target.search_space_id
if not target_vault_id:
raise RuntimeError("merge target is missing vault_id")
target_paths_result = await session.execute(
select(Document).where(
and_(
Document.connector_id == target.id,
Document.document_type == DocumentType.OBSIDIAN_CONNECTOR,
)
)
)
target_paths: set[str] = set()
for doc in target_paths_result.scalars().all():
meta = doc.document_metadata or {}
path = meta.get("file_path")
if path:
target_paths.add(path)
source_docs_result = await session.execute(
select(Document).where(
and_(
Document.connector_id == source.id,
Document.document_type == DocumentType.OBSIDIAN_CONNECTOR,
)
)
)
for doc in source_docs_result.scalars().all():
meta = dict(doc.document_metadata or {})
path = meta.get("file_path")
if not path or path in target_paths:
await session.delete(doc)
continue
new_unique_id = _vault_path_unique_id(target_vault_id, path)
new_uid_hash = generate_unique_identifier_hash(
DocumentType.OBSIDIAN_CONNECTOR,
new_unique_id,
target_search_space_id,
)
meta["vault_id"] = target_vault_id
meta["connector_id"] = target.id
doc.document_metadata = meta
doc.connector_id = target.id
doc.search_space_id = target_search_space_id
doc.unique_identifier_hash = new_uid_hash
target_paths.add(path)
await session.flush()
await session.delete(source)
async def get_manifest(
session: AsyncSession,
*,
connector: SearchSourceConnector,
vault_id: str,
) -> ManifestResponse:
"""Return ``{path: {hash, mtime, size}}`` for every non-deleted note in
this vault.
The plugin compares this against its local vault on every ``onload`` to
catch up edits made while offline. Rows missing ``plugin_content_hash``
(e.g. tombstoned, or somehow indexed without going through this
service) are excluded so the plugin doesn't get confused by partial
data.
"""
result = await session.execute(
select(Document).where(
and_(
Document.search_space_id == connector.search_space_id,
Document.connector_id == connector.id,
Document.document_type == DocumentType.OBSIDIAN_CONNECTOR,
)
)
)
items: dict[str, ManifestEntry] = {}
for doc in result.scalars().all():
meta = doc.document_metadata or {}
if meta.get("deleted_at"):
continue
if meta.get("vault_id") != vault_id:
continue
path = meta.get("file_path")
plugin_hash = meta.get("plugin_content_hash")
mtime_raw = meta.get("mtime")
if not path or not plugin_hash or not mtime_raw:
continue
try:
mtime = datetime.fromisoformat(mtime_raw)
except ValueError:
continue
size_raw = meta.get("plugin_file_size")
size = int(size_raw) if isinstance(size_raw, int) else None
items[path] = ManifestEntry(hash=plugin_hash, mtime=mtime, size=size)
return ManifestResponse(vault_id=vault_id, items=items)

View file

@ -883,49 +883,6 @@ async def _index_bookstack_pages(
)
@celery_app.task(name="index_obsidian_vault", bind=True)
def index_obsidian_vault_task(
self,
connector_id: int,
search_space_id: int,
user_id: str,
start_date: str,
end_date: str,
):
"""Celery task to index Obsidian vault notes."""
import asyncio
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(
_index_obsidian_vault(
connector_id, search_space_id, user_id, start_date, end_date
)
)
finally:
loop.close()
async def _index_obsidian_vault(
connector_id: int,
search_space_id: int,
user_id: str,
start_date: str,
end_date: str,
):
"""Index Obsidian vault with new session."""
from app.routes.search_source_connectors_routes import (
run_obsidian_indexing,
)
async with get_celery_session_maker()() as session:
await run_obsidian_indexing(
session, connector_id, search_space_id, user_id, start_date, end_date
)
@celery_app.task(name="index_composio_connector", bind=True)
def index_composio_connector_task(
self,

View file

@ -46,7 +46,6 @@ from .linear_indexer import index_linear_issues
# Documentation and knowledge management
from .luma_indexer import index_luma_events
from .notion_indexer import index_notion_pages
from .obsidian_indexer import index_obsidian_vault
from .slack_indexer import index_slack_messages
from .webcrawler_indexer import index_crawled_urls
@ -69,7 +68,6 @@ __all__ = [ # noqa: RUF022
"index_linear_issues",
# Documentation and knowledge management
"index_notion_pages",
"index_obsidian_vault",
"index_crawled_urls",
# Communication platforms
"index_slack_messages",

View file

@ -1,676 +0,0 @@
"""
Obsidian connector indexer.
Indexes markdown notes from a local Obsidian vault.
This connector is only available in self-hosted mode.
Implements 2-phase document status updates for real-time UI feedback:
- Phase 1: Create all documents with 'pending' status (visible in UI immediately)
- Phase 2: Process each document: pending processing ready/failed
"""
import os
import re
import time
from collections.abc import Awaitable, Callable
from datetime import UTC, datetime
from pathlib import Path
import yaml
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config
from app.db import Document, DocumentStatus, DocumentType, SearchSourceConnectorType
from app.services.llm_service import get_user_long_context_llm
from app.services.task_logging_service import TaskLoggingService
from app.utils.document_converters import (
create_document_chunks,
embed_text,
generate_content_hash,
generate_document_summary,
generate_unique_identifier_hash,
)
from .base import (
build_document_metadata_string,
check_document_by_unique_identifier,
check_duplicate_document_by_hash,
get_connector_by_id,
get_current_timestamp,
logger,
safe_set_chunks,
update_connector_last_indexed,
)
# Type hint for heartbeat callback
HeartbeatCallbackType = Callable[[int], Awaitable[None]]
# Heartbeat interval in seconds
HEARTBEAT_INTERVAL_SECONDS = 30
def parse_frontmatter(content: str) -> tuple[dict | None, str]:
"""
Parse YAML frontmatter from markdown content.
Args:
content: The full markdown content
Returns:
Tuple of (frontmatter dict or None, content without frontmatter)
"""
if not content.startswith("---"):
return None, content
# Find the closing ---
end_match = re.search(r"\n---\n", content[3:])
if not end_match:
return None, content
frontmatter_str = content[3 : end_match.start() + 3]
remaining_content = content[end_match.end() + 3 :]
try:
frontmatter = yaml.safe_load(frontmatter_str)
return frontmatter, remaining_content.strip()
except yaml.YAMLError:
return None, content
def extract_wiki_links(content: str) -> list[str]:
"""
Extract [[wiki-style links]] from content.
Args:
content: Markdown content
Returns:
List of linked note names
"""
# Match [[link]] or [[link|alias]]
pattern = r"\[\[([^\]|]+)(?:\|[^\]]+)?\]\]"
matches = re.findall(pattern, content)
return list(set(matches))
def extract_tags(content: str) -> list[str]:
"""
Extract #tags from content (both inline and frontmatter).
Args:
content: Markdown content
Returns:
List of tags (without # prefix)
"""
# Match #tag but not ## headers
pattern = r"(?<!\S)#([a-zA-Z][a-zA-Z0-9_/-]*)"
matches = re.findall(pattern, content)
return list(set(matches))
def scan_vault(
vault_path: str,
exclude_folders: list[str] | None = None,
) -> list[dict]:
"""
Scan an Obsidian vault for markdown files.
Args:
vault_path: Path to the Obsidian vault
exclude_folders: List of folder names to exclude
Returns:
List of file info dicts with path, name, modified time
"""
if exclude_folders is None:
exclude_folders = [".trash", ".obsidian", "templates"]
vault = Path(vault_path)
if not vault.exists():
raise ValueError(f"Vault path does not exist: {vault_path}")
files = []
for md_file in vault.rglob("*.md"):
# Check if file is in an excluded folder
relative_path = md_file.relative_to(vault)
parts = relative_path.parts
if any(excluded in parts for excluded in exclude_folders):
continue
try:
stat = md_file.stat()
files.append(
{
"path": str(md_file),
"relative_path": str(relative_path),
"name": md_file.stem,
"modified_at": datetime.fromtimestamp(stat.st_mtime, tz=UTC),
"created_at": datetime.fromtimestamp(stat.st_ctime, tz=UTC),
"size": stat.st_size,
}
)
except OSError as e:
logger.warning(f"Could not stat file {md_file}: {e}")
return files
async def index_obsidian_vault(
session: AsyncSession,
connector_id: int,
search_space_id: int,
user_id: str,
start_date: str | None = None,
end_date: str | None = None,
update_last_indexed: bool = True,
on_heartbeat_callback: HeartbeatCallbackType | None = None,
) -> tuple[int, str | None]:
"""
Index notes from a local Obsidian vault.
This indexer is only available in self-hosted mode as it requires
direct file system access to the user's Obsidian vault.
Args:
session: Database session
connector_id: ID of the Obsidian connector
search_space_id: ID of the search space to store documents in
user_id: ID of the user
start_date: Start date for filtering (YYYY-MM-DD format) - optional
end_date: End date for filtering (YYYY-MM-DD format) - optional
update_last_indexed: Whether to update the last_indexed_at timestamp
on_heartbeat_callback: Optional callback to update notification during long-running indexing.
Returns:
Tuple containing (number of documents indexed, error message or None)
"""
task_logger = TaskLoggingService(session, search_space_id)
# Check if self-hosted mode
if not config.is_self_hosted():
return 0, "Obsidian connector is only available in self-hosted mode"
# Log task start
log_entry = await task_logger.log_task_start(
task_name="obsidian_vault_indexing",
source="connector_indexing_task",
message=f"Starting Obsidian vault indexing for connector {connector_id}",
metadata={
"connector_id": connector_id,
"user_id": str(user_id),
"start_date": start_date,
"end_date": end_date,
},
)
try:
# Get the connector
await task_logger.log_task_progress(
log_entry,
f"Retrieving Obsidian connector {connector_id} from database",
{"stage": "connector_retrieval"},
)
connector = await get_connector_by_id(
session, connector_id, SearchSourceConnectorType.OBSIDIAN_CONNECTOR
)
if not connector:
await task_logger.log_task_failure(
log_entry,
f"Connector with ID {connector_id} not found or is not an Obsidian connector",
"Connector not found",
{"error_type": "ConnectorNotFound"},
)
return (
0,
f"Connector with ID {connector_id} not found or is not an Obsidian connector",
)
# Get vault path from connector config
vault_path = connector.config.get("vault_path")
if not vault_path:
await task_logger.log_task_failure(
log_entry,
"Vault path not configured for this connector",
"Missing vault path",
{"error_type": "MissingVaultPath"},
)
return 0, "Vault path not configured for this connector"
# Validate vault path exists
if not os.path.exists(vault_path):
await task_logger.log_task_failure(
log_entry,
f"Vault path does not exist: {vault_path}",
"Vault path not found",
{"error_type": "VaultNotFound", "vault_path": vault_path},
)
return 0, f"Vault path does not exist: {vault_path}"
# Get configuration options
exclude_folders = connector.config.get(
"exclude_folders", [".trash", ".obsidian", "templates"]
)
vault_name = connector.config.get("vault_name") or os.path.basename(vault_path)
await task_logger.log_task_progress(
log_entry,
f"Scanning Obsidian vault: {vault_name}",
{"stage": "vault_scan", "vault_path": vault_path},
)
# Scan vault for markdown files
try:
files = scan_vault(vault_path, exclude_folders)
except Exception as e:
await task_logger.log_task_failure(
log_entry,
f"Failed to scan vault: {e}",
"Vault scan error",
{"error_type": "VaultScanError"},
)
return 0, f"Failed to scan vault: {e}"
logger.info(f"Found {len(files)} markdown files in vault")
await task_logger.log_task_progress(
log_entry,
f"Found {len(files)} markdown files to process",
{"stage": "files_discovered", "file_count": len(files)},
)
# Filter by date if provided (handle "undefined" string from frontend)
# Also handle inverted dates (start > end) by skipping filtering
start_dt = None
end_dt = None
if start_date and start_date != "undefined":
start_dt = datetime.strptime(start_date, "%Y-%m-%d").replace(tzinfo=UTC)
if end_date and end_date != "undefined":
# Make end_date inclusive (end of day)
end_dt = datetime.strptime(end_date, "%Y-%m-%d").replace(tzinfo=UTC)
end_dt = end_dt.replace(hour=23, minute=59, second=59)
# Only apply date filtering if dates are valid and in correct order
if start_dt and end_dt and start_dt > end_dt:
logger.warning(
f"start_date ({start_date}) is after end_date ({end_date}), skipping date filter"
)
else:
if start_dt:
files = [f for f in files if f["modified_at"] >= start_dt]
logger.info(
f"After start_date filter ({start_date}): {len(files)} files"
)
if end_dt:
files = [f for f in files if f["modified_at"] <= end_dt]
logger.info(f"After end_date filter ({end_date}): {len(files)} files")
logger.info(f"Processing {len(files)} files after date filtering")
indexed_count = 0
skipped_count = 0
failed_count = 0
duplicate_content_count = 0
# Heartbeat tracking - update notification periodically to prevent appearing stuck
last_heartbeat_time = time.time()
# =======================================================================
# PHASE 1: Analyze all files, create pending documents
# This makes ALL documents visible in the UI immediately with pending status
# =======================================================================
files_to_process = [] # List of dicts with document and file data
new_documents_created = False
for file_info in files:
try:
file_path = file_info["path"]
relative_path = file_info["relative_path"]
# Read file content
try:
with open(file_path, encoding="utf-8") as f:
content = f.read()
except UnicodeDecodeError:
logger.warning(f"Could not decode file {file_path}, skipping")
skipped_count += 1
continue
if not content.strip():
logger.debug(f"Empty file {file_path}, skipping")
skipped_count += 1
continue
# Parse frontmatter and extract metadata
frontmatter, body_content = parse_frontmatter(content)
wiki_links = extract_wiki_links(content)
tags = extract_tags(content)
# Get title from frontmatter or filename
title = file_info["name"]
if frontmatter:
title = frontmatter.get("title", title)
# Also extract tags from frontmatter
fm_tags = frontmatter.get("tags", [])
if isinstance(fm_tags, list):
tags = list({*tags, *fm_tags})
elif isinstance(fm_tags, str):
tags = list({*tags, fm_tags})
# Generate unique identifier using vault name and relative path
unique_identifier = f"{vault_name}:{relative_path}"
unique_identifier_hash = generate_unique_identifier_hash(
DocumentType.OBSIDIAN_CONNECTOR,
unique_identifier,
search_space_id,
)
# Generate content hash
content_hash = generate_content_hash(content, search_space_id)
# Check for existing document
existing_document = await check_document_by_unique_identifier(
session, unique_identifier_hash
)
if existing_document:
# Document exists - check if content has changed
if existing_document.content_hash == content_hash:
# Ensure status is ready (might have been stuck in processing/pending)
if not DocumentStatus.is_state(
existing_document.status, DocumentStatus.READY
):
existing_document.status = DocumentStatus.ready()
logger.debug(f"Note {title} unchanged, skipping")
skipped_count += 1
continue
# Queue existing document for update (will be set to processing in Phase 2)
files_to_process.append(
{
"document": existing_document,
"is_new": False,
"file_info": file_info,
"content": content,
"body_content": body_content,
"frontmatter": frontmatter,
"wiki_links": wiki_links,
"tags": tags,
"title": title,
"relative_path": relative_path,
"content_hash": content_hash,
"unique_identifier_hash": unique_identifier_hash,
}
)
continue
# Document doesn't exist by unique_identifier_hash
# Check if a document with the same content_hash exists (from another connector)
with session.no_autoflush:
duplicate_by_content = await check_duplicate_document_by_hash(
session, content_hash
)
if duplicate_by_content:
logger.info(
f"Obsidian note {title} already indexed by another connector "
f"(existing document ID: {duplicate_by_content.id}, "
f"type: {duplicate_by_content.document_type}). Skipping."
)
duplicate_content_count += 1
skipped_count += 1
continue
# Create new document with PENDING status (visible in UI immediately)
document = Document(
search_space_id=search_space_id,
title=title,
document_type=DocumentType.OBSIDIAN_CONNECTOR,
document_metadata={
"vault_name": vault_name,
"file_path": relative_path,
"connector_id": connector_id,
},
content="Pending...", # Placeholder until processed
content_hash=unique_identifier_hash, # Temporary unique value - updated when ready
unique_identifier_hash=unique_identifier_hash,
embedding=None,
chunks=[], # Empty at creation - safe for async
status=DocumentStatus.pending(), # Pending until processing starts
updated_at=get_current_timestamp(),
created_by_id=user_id,
connector_id=connector_id,
)
session.add(document)
new_documents_created = True
files_to_process.append(
{
"document": document,
"is_new": True,
"file_info": file_info,
"content": content,
"body_content": body_content,
"frontmatter": frontmatter,
"wiki_links": wiki_links,
"tags": tags,
"title": title,
"relative_path": relative_path,
"content_hash": content_hash,
"unique_identifier_hash": unique_identifier_hash,
}
)
except Exception as e:
logger.exception(
f"Error in Phase 1 for file {file_info.get('path', 'unknown')}: {e}"
)
failed_count += 1
continue
# Commit all pending documents - they all appear in UI now
if new_documents_created:
logger.info(
f"Phase 1: Committing {len([f for f in files_to_process if f['is_new']])} pending documents"
)
await session.commit()
# =======================================================================
# PHASE 2: Process each document one by one
# Each document transitions: pending → processing → ready/failed
# =======================================================================
logger.info(f"Phase 2: Processing {len(files_to_process)} documents")
# Get LLM for summarization
long_context_llm = await get_user_long_context_llm(
session, user_id, search_space_id
)
for item in files_to_process:
# Send heartbeat periodically
if on_heartbeat_callback:
current_time = time.time()
if current_time - last_heartbeat_time >= HEARTBEAT_INTERVAL_SECONDS:
await on_heartbeat_callback(indexed_count)
last_heartbeat_time = current_time
document = item["document"]
try:
# Set to PROCESSING and commit - shows "processing" in UI for THIS document only
document.status = DocumentStatus.processing()
await session.commit()
# Extract data from item
title = item["title"]
relative_path = item["relative_path"]
content = item["content"]
body_content = item["body_content"]
frontmatter = item["frontmatter"]
wiki_links = item["wiki_links"]
tags = item["tags"]
content_hash = item["content_hash"]
file_info = item["file_info"]
# Build metadata
document_metadata = {
"vault_name": vault_name,
"file_path": relative_path,
"tags": tags,
"outgoing_links": wiki_links,
"frontmatter": frontmatter,
"modified_at": file_info["modified_at"].isoformat(),
"created_at": file_info["created_at"].isoformat(),
"word_count": len(body_content.split()),
}
# Build document content with metadata
metadata_sections = [
(
"METADATA",
[
f"Title: {title}",
f"Vault: {vault_name}",
f"Path: {relative_path}",
f"Tags: {', '.join(tags) if tags else 'None'}",
f"Links to: {', '.join(wiki_links) if wiki_links else 'None'}",
],
),
("CONTENT", [body_content]),
]
document_string = build_document_metadata_string(metadata_sections)
# Generate summary
summary_content = ""
if long_context_llm and connector.enable_summary:
summary_content, _ = await generate_document_summary(
document_string,
long_context_llm,
document_metadata,
)
# Generate embedding
embedding = embed_text(document_string)
# Add URL and summary to metadata
document_metadata["url"] = f"obsidian://{vault_name}/{relative_path}"
document_metadata["summary"] = summary_content
document_metadata["connector_id"] = connector_id
# Create chunks
chunks = await create_document_chunks(document_string)
# Update document to READY with actual content
document.title = title
document.content = document_string
document.content_hash = content_hash
document.embedding = embedding
document.document_metadata = document_metadata
await safe_set_chunks(session, document, chunks)
document.updated_at = get_current_timestamp()
document.status = DocumentStatus.ready()
indexed_count += 1
# Batch commit every 10 documents (for ready status updates)
if indexed_count % 10 == 0:
logger.info(
f"Committing batch: {indexed_count} Obsidian notes processed so far"
)
await session.commit()
except Exception as e:
logger.exception(
f"Error processing file {item.get('file_info', {}).get('path', 'unknown')}: {e}"
)
# Mark document as failed with reason (visible in UI)
try:
document.status = DocumentStatus.failed(str(e))
document.updated_at = get_current_timestamp()
except Exception as status_error:
logger.error(
f"Failed to update document status to failed: {status_error}"
)
failed_count += 1
continue
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
await update_connector_last_indexed(session, connector, update_last_indexed)
# Final commit for any remaining documents not yet committed in batches
logger.info(f"Final commit: Total {indexed_count} Obsidian notes processed")
try:
await session.commit()
logger.info(
"Successfully committed all Obsidian document changes to database"
)
except Exception as e:
# Handle any remaining integrity errors gracefully (race conditions, etc.)
if (
"duplicate key value violates unique constraint" in str(e).lower()
or "uniqueviolationerror" in str(e).lower()
):
logger.warning(
f"Duplicate content_hash detected during final commit. "
f"This may occur if the same note was indexed by multiple connectors. "
f"Rolling back and continuing. Error: {e!s}"
)
await session.rollback()
# Don't fail the entire task - some documents may have been successfully indexed
else:
raise
# Build warning message if there were issues
warning_parts = []
if duplicate_content_count > 0:
warning_parts.append(f"{duplicate_content_count} duplicate")
if failed_count > 0:
warning_parts.append(f"{failed_count} failed")
warning_message = ", ".join(warning_parts) if warning_parts else None
total_processed = indexed_count
await task_logger.log_task_success(
log_entry,
f"Successfully completed Obsidian vault indexing for connector {connector_id}",
{
"notes_processed": total_processed,
"documents_indexed": indexed_count,
"documents_skipped": skipped_count,
"documents_failed": failed_count,
"duplicate_content_count": duplicate_content_count,
},
)
logger.info(
f"Obsidian vault indexing completed: {indexed_count} ready, "
f"{skipped_count} skipped, {failed_count} failed "
f"({duplicate_content_count} duplicate content)"
)
return total_processed, warning_message
except SQLAlchemyError as e:
logger.exception(f"Database error during Obsidian indexing: {e}")
await session.rollback()
await task_logger.log_task_failure(
log_entry,
f"Database error during Obsidian indexing: {e}",
"Database error",
{"error_type": "SQLAlchemyError"},
)
return 0, f"Database error: {e}"
except Exception as e:
logger.exception(f"Error during Obsidian indexing: {e}")
await task_logger.log_task_failure(
log_entry,
f"Error during Obsidian indexing: {e}",
"Unexpected error",
{"error_type": type(e).__name__},
)
return 0, str(e)

View file

@ -34,7 +34,6 @@ CONNECTOR_TASK_MAP = {
SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR: "index_elasticsearch_documents",
SearchSourceConnectorType.WEBCRAWLER_CONNECTOR: "index_crawled_urls",
SearchSourceConnectorType.BOOKSTACK_CONNECTOR: "index_bookstack_pages",
SearchSourceConnectorType.OBSIDIAN_CONNECTOR: "index_obsidian_vault",
}
@ -100,7 +99,6 @@ def create_periodic_schedule(
index_linear_issues_task,
index_luma_events_task,
index_notion_pages_task,
index_obsidian_vault_task,
index_slack_messages_task,
)
@ -121,7 +119,6 @@ def create_periodic_schedule(
SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR: index_elasticsearch_documents_task,
SearchSourceConnectorType.WEBCRAWLER_CONNECTOR: index_crawled_urls_task,
SearchSourceConnectorType.BOOKSTACK_CONNECTOR: index_bookstack_pages_task,
SearchSourceConnectorType.OBSIDIAN_CONNECTOR: index_obsidian_vault_task,
}
# Trigger the first run immediately

View file

@ -0,0 +1,469 @@
"""Integration tests for the Obsidian plugin HTTP wire contract.
Three concerns:
1. The /connect upsert really collapses concurrent first-time connects to
exactly one row. This locks the partial unique index from migration 129
to its purpose.
2. The fingerprint dedup path: a second device connecting with a fresh
``vault_id`` but the same ``vault_fingerprint`` adopts the existing
connector instead of creating a duplicate.
3. The end-to-end response shapes returned by /connect /sync /rename
/notes /manifest /stats match the schemas the plugin's TypeScript
decoders expect. Each renamed field is a contract change, and a smoke
pass like this is the cheapest way to catch a future drift before it
ships.
"""
from __future__ import annotations
import asyncio
import uuid
from datetime import UTC, datetime
from unittest.mock import AsyncMock, patch
import pytest
import pytest_asyncio
from sqlalchemy import func, select, text
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.asyncio import AsyncSession
from app.db import (
SearchSourceConnector,
SearchSourceConnectorType,
SearchSpace,
User,
)
from app.routes.obsidian_plugin_routes import (
obsidian_connect,
obsidian_delete_notes,
obsidian_manifest,
obsidian_rename,
obsidian_stats,
obsidian_sync,
)
from app.schemas.obsidian_plugin import (
ConnectRequest,
DeleteAck,
DeleteBatchRequest,
ManifestResponse,
NotePayload,
RenameAck,
RenameBatchRequest,
RenameItem,
StatsResponse,
SyncAck,
SyncBatchRequest,
)
pytestmark = pytest.mark.integration
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
def _make_note_payload(vault_id: str, path: str, content_hash: str) -> NotePayload:
"""Minimal NotePayload that the schema accepts; the indexer is mocked
out so the values don't have to round-trip through the real pipeline."""
now = datetime.now(UTC)
return NotePayload(
vault_id=vault_id,
path=path,
name=path.rsplit("/", 1)[-1].rsplit(".", 1)[0],
extension="md",
content="# Test\n\nbody",
content_hash=content_hash,
mtime=now,
ctime=now,
)
@pytest_asyncio.fixture
async def race_user_and_space(async_engine):
"""User + SearchSpace committed via the live engine so the two
concurrent /connect sessions in the race test can both see them.
We can't use the savepoint-trapped ``db_session`` fixture here
because the concurrent sessions need to see committed rows.
"""
user_id = uuid.uuid4()
async with AsyncSession(async_engine) as setup:
user = User(
id=user_id,
email=f"obsidian-race-{uuid.uuid4()}@surfsense.test",
hashed_password="x",
is_active=True,
is_superuser=False,
is_verified=True,
)
space = SearchSpace(name="Race Space", user_id=user_id)
setup.add_all([user, space])
await setup.commit()
await setup.refresh(space)
space_id = space.id
yield user_id, space_id
async with AsyncSession(async_engine) as cleanup:
# Order matters: connectors -> documents -> space -> user. The
# connectors test creates documents, so we wipe them too. The
# CASCADE on user_id catches anything we missed.
await cleanup.execute(
text("DELETE FROM search_source_connectors WHERE user_id = :uid"),
{"uid": user_id},
)
await cleanup.execute(
text("DELETE FROM searchspaces WHERE id = :id"),
{"id": space_id},
)
await cleanup.execute(
text('DELETE FROM "user" WHERE id = :uid'),
{"uid": user_id},
)
await cleanup.commit()
# ---------------------------------------------------------------------------
# /connect race + index enforcement
# ---------------------------------------------------------------------------
class TestConnectRace:
async def test_concurrent_first_connects_collapse_to_one_row(
self, async_engine, race_user_and_space
):
"""Two simultaneous /connect calls for the same vault should
produce exactly one row, not two. Same vault_id + same
fingerprint funnels through both partial unique indexes; the
loser falls back to the survivor row via the IntegrityError
branch in obsidian_connect."""
user_id, space_id = race_user_and_space
vault_id = str(uuid.uuid4())
fingerprint = "fp-" + uuid.uuid4().hex
async def _call(name_suffix: str) -> None:
async with AsyncSession(async_engine) as s:
fresh_user = await s.get(User, user_id)
payload = ConnectRequest(
vault_id=vault_id,
vault_name=f"My Vault {name_suffix}",
search_space_id=space_id,
vault_fingerprint=fingerprint,
)
await obsidian_connect(payload, user=fresh_user, session=s)
results = await asyncio.gather(_call("a"), _call("b"), return_exceptions=True)
for r in results:
assert not isinstance(r, Exception), f"Connect raised: {r!r}"
async with AsyncSession(async_engine) as verify:
count = (
await verify.execute(
select(func.count(SearchSourceConnector.id)).where(
SearchSourceConnector.user_id == user_id,
)
)
).scalar_one()
assert count == 1
async def test_partial_unique_index_blocks_raw_duplicate(
self, async_engine, race_user_and_space
):
"""Raw INSERTs that bypass the route must still be blocked by
the partial unique indexes from migration 129."""
user_id, space_id = race_user_and_space
vault_id = str(uuid.uuid4())
async with AsyncSession(async_engine) as s:
s.add(
SearchSourceConnector(
name="Obsidian - First",
connector_type=SearchSourceConnectorType.OBSIDIAN_CONNECTOR,
is_indexable=False,
config={
"vault_id": vault_id,
"vault_name": "First",
"source": "plugin",
"vault_fingerprint": "fp-1",
},
user_id=user_id,
search_space_id=space_id,
)
)
await s.commit()
with pytest.raises(IntegrityError):
async with AsyncSession(async_engine) as s:
s.add(
SearchSourceConnector(
name="Obsidian - Second",
connector_type=SearchSourceConnectorType.OBSIDIAN_CONNECTOR,
is_indexable=False,
config={
"vault_id": vault_id,
"vault_name": "Second",
"source": "plugin",
"vault_fingerprint": "fp-2",
},
user_id=user_id,
search_space_id=space_id,
)
)
await s.commit()
async def test_fingerprint_blocks_raw_cross_device_duplicate(
self, async_engine, race_user_and_space
):
"""Two connectors for the same user with different vault_ids but
the same fingerprint cannot coexist."""
user_id, space_id = race_user_and_space
fingerprint = "fp-" + uuid.uuid4().hex
async with AsyncSession(async_engine) as s:
s.add(
SearchSourceConnector(
name="Obsidian - Desktop",
connector_type=SearchSourceConnectorType.OBSIDIAN_CONNECTOR,
is_indexable=False,
config={
"vault_id": str(uuid.uuid4()),
"vault_name": "Vault",
"source": "plugin",
"vault_fingerprint": fingerprint,
},
user_id=user_id,
search_space_id=space_id,
)
)
await s.commit()
with pytest.raises(IntegrityError):
async with AsyncSession(async_engine) as s:
s.add(
SearchSourceConnector(
name="Obsidian - Mobile",
connector_type=SearchSourceConnectorType.OBSIDIAN_CONNECTOR,
is_indexable=False,
config={
"vault_id": str(uuid.uuid4()),
"vault_name": "Vault",
"source": "plugin",
"vault_fingerprint": fingerprint,
},
user_id=user_id,
search_space_id=space_id,
)
)
await s.commit()
async def test_second_device_adopts_existing_connector_via_fingerprint(
self, async_engine, race_user_and_space
):
"""Device A connects with vault_id=A. Device B then connects with
a fresh vault_id=B but the same fingerprint. The route must
return A's identity (not create a B row), proving cross-device
dedup happens transparently to the plugin."""
user_id, space_id = race_user_and_space
vault_id_a = str(uuid.uuid4())
vault_id_b = str(uuid.uuid4())
fingerprint = "fp-" + uuid.uuid4().hex
async with AsyncSession(async_engine) as s:
fresh_user = await s.get(User, user_id)
resp_a = await obsidian_connect(
ConnectRequest(
vault_id=vault_id_a,
vault_name="Shared Vault",
search_space_id=space_id,
vault_fingerprint=fingerprint,
),
user=fresh_user,
session=s,
)
async with AsyncSession(async_engine) as s:
fresh_user = await s.get(User, user_id)
resp_b = await obsidian_connect(
ConnectRequest(
vault_id=vault_id_b,
vault_name="Shared Vault",
search_space_id=space_id,
vault_fingerprint=fingerprint,
),
user=fresh_user,
session=s,
)
assert resp_b.vault_id == vault_id_a
assert resp_b.connector_id == resp_a.connector_id
async with AsyncSession(async_engine) as verify:
count = (
await verify.execute(
select(func.count(SearchSourceConnector.id)).where(
SearchSourceConnector.user_id == user_id,
)
)
).scalar_one()
assert count == 1
# ---------------------------------------------------------------------------
# Combined wire-shape smoke test
# ---------------------------------------------------------------------------
class TestWireContractSmoke:
"""Walks /connect -> /sync -> /rename -> /notes -> /manifest -> /stats
sequentially and asserts each response matches the new schema. With
`response_model=` on every route, FastAPI is already validating the
shape on real traffic; this test mainly guards against accidental
field renames the way the TypeScript decoder would catch them."""
async def test_full_flow_returns_typed_payloads(
self, db_session: AsyncSession, db_user: User, db_search_space: SearchSpace
):
vault_id = str(uuid.uuid4())
# 1. /connect
connect_resp = await obsidian_connect(
ConnectRequest(
vault_id=vault_id,
vault_name="Smoke Vault",
search_space_id=db_search_space.id,
vault_fingerprint="fp-" + uuid.uuid4().hex,
),
user=db_user,
session=db_session,
)
assert connect_resp.connector_id > 0
assert connect_resp.vault_id == vault_id
assert "sync" in connect_resp.capabilities
# 2. /sync — stub the indexer so the call doesn't drag the LLM /
# embedding pipeline in. We're testing the wire contract, not the
# indexer itself.
fake_doc = type("FakeDoc", (), {"id": 12345})()
with patch(
"app.routes.obsidian_plugin_routes.upsert_note",
new=AsyncMock(return_value=fake_doc),
):
sync_resp = await obsidian_sync(
SyncBatchRequest(
vault_id=vault_id,
notes=[
_make_note_payload(vault_id, "ok.md", "hash-ok"),
_make_note_payload(vault_id, "fail.md", "hash-fail"),
],
),
user=db_user,
session=db_session,
)
assert isinstance(sync_resp, SyncAck)
assert sync_resp.vault_id == vault_id
assert sync_resp.indexed == 2
assert sync_resp.failed == 0
assert len(sync_resp.items) == 2
assert all(it.status == "ok" for it in sync_resp.items)
# The TypeScript decoder filters on items[].status === "error" and
# extracts .path, so confirm both fields are present and named.
assert {it.path for it in sync_resp.items} == {"ok.md", "fail.md"}
# 2b. Re-run /sync but force the indexer to raise on one note so
# the per-item failure decoder gets exercised end-to-end.
async def _selective_upsert(session, *, connector, payload, user_id):
if payload.path == "fail.md":
raise RuntimeError("simulated indexing failure")
return fake_doc
with patch(
"app.routes.obsidian_plugin_routes.upsert_note",
new=AsyncMock(side_effect=_selective_upsert),
):
sync_resp = await obsidian_sync(
SyncBatchRequest(
vault_id=vault_id,
notes=[
_make_note_payload(vault_id, "ok.md", "h1"),
_make_note_payload(vault_id, "fail.md", "h2"),
],
),
user=db_user,
session=db_session,
)
assert sync_resp.indexed == 1
assert sync_resp.failed == 1
statuses = {it.path: it.status for it in sync_resp.items}
assert statuses == {"ok.md": "ok", "fail.md": "error"}
# 3. /rename — patch rename_note so we don't need a real Document.
async def _rename(*args, **kwargs) -> object:
if kwargs.get("old_path") == "missing.md":
return None
return fake_doc
with patch(
"app.routes.obsidian_plugin_routes.rename_note",
new=AsyncMock(side_effect=_rename),
):
rename_resp = await obsidian_rename(
RenameBatchRequest(
vault_id=vault_id,
renames=[
RenameItem(old_path="a.md", new_path="b.md"),
RenameItem(old_path="missing.md", new_path="x.md"),
],
),
user=db_user,
session=db_session,
)
assert isinstance(rename_resp, RenameAck)
assert rename_resp.renamed == 1
assert rename_resp.missing == 1
assert {it.status for it in rename_resp.items} == {"ok", "missing"}
# snake_case fields are deliberate — the plugin decoder maps them
# to camelCase explicitly.
assert all(it.old_path and it.new_path for it in rename_resp.items)
# 4. /notes DELETE
async def _delete(*args, **kwargs) -> bool:
return kwargs.get("path") != "ghost.md"
with patch(
"app.routes.obsidian_plugin_routes.delete_note",
new=AsyncMock(side_effect=_delete),
):
delete_resp = await obsidian_delete_notes(
DeleteBatchRequest(vault_id=vault_id, paths=["b.md", "ghost.md"]),
user=db_user,
session=db_session,
)
assert isinstance(delete_resp, DeleteAck)
assert delete_resp.deleted == 1
assert delete_resp.missing == 1
assert {it.path: it.status for it in delete_resp.items} == {
"b.md": "ok",
"ghost.md": "missing",
}
# 5. /manifest — empty (no real Documents were created because
# upsert_note was mocked) but the response shape is what we care
# about.
manifest_resp = await obsidian_manifest(
vault_id=vault_id, user=db_user, session=db_session
)
assert isinstance(manifest_resp, ManifestResponse)
assert manifest_resp.vault_id == vault_id
assert manifest_resp.items == {}
# 6. /stats — same; row count is 0 because upsert_note was mocked.
stats_resp = await obsidian_stats(
vault_id=vault_id, user=db_user, session=db_session
)
assert isinstance(stats_resp, StatsResponse)
assert stats_resp.vault_id == vault_id
assert stats_resp.files_synced == 0
assert stats_resp.last_sync_at is None

View file

@ -202,9 +202,7 @@ class TestHTTPExceptionHandler:
# Intentional 503s (e.g. feature flag off) must surface the developer
# message so the frontend can render actionable copy.
body = _assert_envelope(client.get("/http-503"), 503)
assert (
body["error"]["message"] == "Page purchases are temporarily unavailable."
)
assert body["error"]["message"] == "Page purchases are temporarily unavailable."
assert body["error"]["message"] != GENERIC_5XX_MESSAGE
def test_502_preserves_detail(self, client):

View file

@ -0,0 +1,10 @@
# top-most EditorConfig file
root = true
[*]
charset = utf-8
end_of_line = lf
insert_final_newline = true
indent_style = tab
indent_size = 4
tab_width = 4

22
surfsense_obsidian/.gitignore vendored Normal file
View file

@ -0,0 +1,22 @@
# vscode
.vscode
# Intellij
*.iml
.idea
# npm
node_modules
# Don't include the compiled main.js file in the repo.
# They should be uploaded to GitHub releases instead.
main.js
# Exclude sourcemaps
*.map
# obsidian
data.json
# Exclude macOS Finder (System Explorer) View States
.DS_Store

View file

@ -0,0 +1 @@
tag-version-prefix=""

View file

@ -0,0 +1,251 @@
# Obsidian community plugin
## Project overview
- Target: Obsidian Community Plugin (TypeScript → bundled JavaScript).
- Entry point: `main.ts` compiled to `main.js` and loaded by Obsidian.
- Required release artifacts: `main.js`, `manifest.json`, and optional `styles.css`.
## Environment & tooling
- Node.js: use current LTS (Node 18+ recommended).
- **Package manager: npm** (required for this sample - `package.json` defines npm scripts and dependencies).
- **Bundler: esbuild** (required for this sample - `esbuild.config.mjs` and build scripts depend on it). Alternative bundlers like Rollup or webpack are acceptable for other projects if they bundle all external dependencies into `main.js`.
- Types: `obsidian` type definitions.
**Note**: This sample project has specific technical dependencies on npm and esbuild. If you're creating a plugin from scratch, you can choose different tools, but you'll need to replace the build configuration accordingly.
### Install
```bash
npm install
```
### Dev (watch)
```bash
npm run dev
```
### Production build
```bash
npm run build
```
## Linting
- To use eslint install eslint from terminal: `npm install -g eslint`
- To use eslint to analyze this project use this command: `eslint main.ts`
- eslint will then create a report with suggestions for code improvement by file and line number.
- If your source code is in a folder, such as `src`, you can use eslint with this command to analyze all files in that folder: `eslint ./src/`
## File & folder conventions
- **Organize code into multiple files**: Split functionality across separate modules rather than putting everything in `main.ts`.
- Source lives in `src/`. Keep `main.ts` small and focused on plugin lifecycle (loading, unloading, registering commands).
- **Example file structure**:
```
src/
main.ts # Plugin entry point, lifecycle management
settings.ts # Settings interface and defaults
commands/ # Command implementations
command1.ts
command2.ts
ui/ # UI components, modals, views
modal.ts
view.ts
utils/ # Utility functions, helpers
helpers.ts
constants.ts
types.ts # TypeScript interfaces and types
```
- **Do not commit build artifacts**: Never commit `node_modules/`, `main.js`, or other generated files to version control.
- Keep the plugin small. Avoid large dependencies. Prefer browser-compatible packages.
- Generated output should be placed at the plugin root or `dist/` depending on your build setup. Release artifacts must end up at the top level of the plugin folder in the vault (`main.js`, `manifest.json`, `styles.css`).
## Manifest rules (`manifest.json`)
- Must include (non-exhaustive):
- `id` (plugin ID; for local dev it should match the folder name)
- `name`
- `version` (Semantic Versioning `x.y.z`)
- `minAppVersion`
- `description`
- `isDesktopOnly` (boolean)
- Optional: `author`, `authorUrl`, `fundingUrl` (string or map)
- Never change `id` after release. Treat it as stable API.
- Keep `minAppVersion` accurate when using newer APIs.
- Canonical requirements are coded here: https://github.com/obsidianmd/obsidian-releases/blob/master/.github/workflows/validate-plugin-entry.yml
## Testing
- Manual install for testing: copy `main.js`, `manifest.json`, `styles.css` (if any) to:
```
<Vault>/.obsidian/plugins/<plugin-id>/
```
- Reload Obsidian and enable the plugin in **Settings → Community plugins**.
## Commands & settings
- Any user-facing commands should be added via `this.addCommand(...)`.
- If the plugin has configuration, provide a settings tab and sensible defaults.
- Persist settings using `this.loadData()` / `this.saveData()`.
- Use stable command IDs; avoid renaming once released.
## Versioning & releases
- Bump `version` in `manifest.json` (SemVer) and update `versions.json` to map plugin version → minimum app version.
- Create a GitHub release whose tag exactly matches `manifest.json`'s `version`. Do not use a leading `v`.
- Attach `manifest.json`, `main.js`, and `styles.css` (if present) to the release as individual assets.
- After the initial release, follow the process to add/update your plugin in the community catalog as required.
## Security, privacy, and compliance
Follow Obsidian's **Developer Policies** and **Plugin Guidelines**. In particular:
- Default to local/offline operation. Only make network requests when essential to the feature.
- No hidden telemetry. If you collect optional analytics or call third-party services, require explicit opt-in and document clearly in `README.md` and in settings.
- Never execute remote code, fetch and eval scripts, or auto-update plugin code outside of normal releases.
- Minimize scope: read/write only what's necessary inside the vault. Do not access files outside the vault.
- Clearly disclose any external services used, data sent, and risks.
- Respect user privacy. Do not collect vault contents, filenames, or personal information unless absolutely necessary and explicitly consented.
- Avoid deceptive patterns, ads, or spammy notifications.
- Register and clean up all DOM, app, and interval listeners using the provided `register*` helpers so the plugin unloads safely.
## UX & copy guidelines (for UI text, commands, settings)
- Prefer sentence case for headings, buttons, and titles.
- Use clear, action-oriented imperatives in step-by-step copy.
- Use **bold** to indicate literal UI labels. Prefer "select" for interactions.
- Use arrow notation for navigation: **Settings → Community plugins**.
- Keep in-app strings short, consistent, and free of jargon.
## Performance
- Keep startup light. Defer heavy work until needed.
- Avoid long-running tasks during `onload`; use lazy initialization.
- Batch disk access and avoid excessive vault scans.
- Debounce/throttle expensive operations in response to file system events.
## Coding conventions
- TypeScript with `"strict": true` preferred.
- **Keep `main.ts` minimal**: Focus only on plugin lifecycle (onload, onunload, addCommand calls). Delegate all feature logic to separate modules.
- **Split large files**: If any file exceeds ~200-300 lines, consider breaking it into smaller, focused modules.
- **Use clear module boundaries**: Each file should have a single, well-defined responsibility.
- Bundle everything into `main.js` (no unbundled runtime deps).
- Avoid Node/Electron APIs if you want mobile compatibility; set `isDesktopOnly` accordingly.
- Prefer `async/await` over promise chains; handle errors gracefully.
## Mobile
- Where feasible, test on iOS and Android.
- Don't assume desktop-only behavior unless `isDesktopOnly` is `true`.
- Avoid large in-memory structures; be mindful of memory and storage constraints.
## Agent do/don't
**Do**
- Add commands with stable IDs (don't rename once released).
- Provide defaults and validation in settings.
- Write idempotent code paths so reload/unload doesn't leak listeners or intervals.
- Use `this.register*` helpers for everything that needs cleanup.
**Don't**
- Introduce network calls without an obvious user-facing reason and documentation.
- Ship features that require cloud services without clear disclosure and explicit opt-in.
- Store or transmit vault contents unless essential and consented.
## Common tasks
### Organize code across multiple files
**main.ts** (minimal, lifecycle only):
```ts
import { Plugin } from "obsidian";
import { MySettings, DEFAULT_SETTINGS } from "./settings";
import { registerCommands } from "./commands";
export default class MyPlugin extends Plugin {
settings: MySettings;
async onload() {
this.settings = Object.assign({}, DEFAULT_SETTINGS, await this.loadData());
registerCommands(this);
}
}
```
**settings.ts**:
```ts
export interface MySettings {
enabled: boolean;
apiKey: string;
}
export const DEFAULT_SETTINGS: MySettings = {
enabled: true,
apiKey: "",
};
```
**commands/index.ts**:
```ts
import { Plugin } from "obsidian";
import { doSomething } from "./my-command";
export function registerCommands(plugin: Plugin) {
plugin.addCommand({
id: "do-something",
name: "Do something",
callback: () => doSomething(plugin),
});
}
```
### Add a command
```ts
this.addCommand({
id: "your-command-id",
name: "Do the thing",
callback: () => this.doTheThing(),
});
```
### Persist settings
```ts
interface MySettings { enabled: boolean }
const DEFAULT_SETTINGS: MySettings = { enabled: true };
async onload() {
this.settings = Object.assign({}, DEFAULT_SETTINGS, await this.loadData());
await this.saveData(this.settings);
}
```
### Register listeners safely
```ts
this.registerEvent(this.app.workspace.on("file-open", f => { /* ... */ }));
this.registerDomEvent(window, "resize", () => { /* ... */ });
this.registerInterval(window.setInterval(() => { /* ... */ }, 1000));
```
## Troubleshooting
- Plugin doesn't load after build: ensure `main.js` and `manifest.json` are at the top level of the plugin folder under `<Vault>/.obsidian/plugins/<plugin-id>/`.
- Build issues: if `main.js` is missing, run `npm run build` or `npm run dev` to compile your TypeScript source code.
- Commands not appearing: verify `addCommand` runs after `onload` and IDs are unique.
- Settings not persisting: ensure `loadData`/`saveData` are awaited and you re-render the UI after changes.
- Mobile-only issues: confirm you're not using desktop-only APIs; check `isDesktopOnly` and adjust.
## References
- Obsidian sample plugin: https://github.com/obsidianmd/obsidian-sample-plugin
- API documentation: https://docs.obsidian.md
- Developer policies: https://docs.obsidian.md/Developer+policies
- Plugin guidelines: https://docs.obsidian.md/Plugins/Releasing/Plugin+guidelines
- Style guide: https://help.obsidian.md/style-guide

201
surfsense_obsidian/LICENSE Normal file
View file

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -0,0 +1,150 @@
# SurfSense for Obsidian
Sync your Obsidian vault to [SurfSense](https://github.com/MODSetter/SurfSense)
so your notes become searchable alongside the rest of your knowledge sources
(GitHub, Slack, Linear, Drive, web pages, etc.) from any SurfSense chat.
The plugin runs inside Obsidian itself, on desktop and mobile, so it works
the same way for SurfSense Cloud and self-hosted deployments. There is no
server-side vault mount and no Electron-only path; everything goes over HTTPS.
## What it does
- Realtime sync as you create, edit, rename, or delete notes
- Initial scan + reconciliation against the server manifest on startup,
so vault edits made while the plugin was offline still show up
- Persistent upload queue, so a crash or offline window never loses changes
- Frontmatter, `[[wiki links]]`, `#tags`, headings, and resolved/unresolved
links are extracted and indexed
- Each chat citation links straight back into Obsidian via the
`obsidian://open?vault=…&file=…` deep link
- Multi-vault aware: each vault you enable the plugin in becomes its own
connector row in SurfSense, named after the vault
## Install
### Via [BRAT](https://github.com/TfTHacker/obsidian42-brat) (current)
1. Install the BRAT community plugin.
2. Run **BRAT: Add a beta plugin for testing**.
3. Paste `MODSetter/SurfSense` and pick the latest release.
4. Enable **SurfSense** in *Settings → Community plugins*.
### Manual sideload
1. Download `main.js`, `manifest.json`, and `styles.css` from the latest
GitHub release tagged with the plugin version (e.g. `0.1.0`, with no `v`
prefix, matching the `version` field in `manifest.json`).
2. Copy them into `<vault>/.obsidian/plugins/surfsense/`.
3. Restart Obsidian and enable the plugin.
### Community plugin store
Submission to the official Obsidian community plugin store is in progress.
Once approved you will be able to install from *Settings → Community plugins*
inside Obsidian.
## Configure
Open **Settings → SurfSense** in Obsidian and fill in:
| Setting | Value |
| --- | --- |
| Server URL | `https://surfsense.com` for SurfSense Cloud, or your self-hosted URL |
| API token | Copy from the *Connectors → Obsidian* dialog in the SurfSense web app |
| Search space | Pick the search space this vault should sync into |
| Vault name | Defaults to your Obsidian vault name; rename if you have multiple vaults |
| Sync mode | *Auto* (recommended) or *Manual* |
| Exclude patterns | Glob patterns of folders/files to skip (e.g. `.trash`, `_attachments`, `templates/**`) |
| Include attachments | Off by default; enable to sync non-`.md` files |
The connector row appears automatically inside SurfSense the first time the
plugin successfully calls `/obsidian/connect`. You can manage or delete it
from *Connectors → Obsidian* in the web app.
> **Token lifetime.** The web app currently issues 24-hour JWTs. If you see
> *"token expired"* in the plugin status bar, paste a fresh token from the
> SurfSense web app. Long-lived personal access tokens are coming in a future
> release.
## Mobile
The plugin works on Obsidian for iOS and Android. Sync runs whenever the
app is in the foreground and once more on app close. Mobile OSes
aggressively suspend background apps, so mobile sync is near-realtime rather
than instant. Desktop is the source of truth for live editing.
## Privacy & safety
The SurfSense backend qualifies as server-side telemetry under Obsidian's
[Developer policies](https://github.com/obsidianmd/obsidian-developer-docs/blob/main/en/Developer%20policies.md),
so here is the full list of what the plugin sends and stores. The
canonical SurfSense privacy policy lives at
<https://surfsense.com/privacy>; this section is the plugin-specific
addendum.
**Sent on `/connect` (once per onload):**
- `vault_id`: a random UUID minted in the plugin's `data.json` on first run
- `vault_name`: the Obsidian vault folder name
- `search_space_id`: the SurfSense search space you picked
**Sent per note on `/sync`, `/rename`, `/delete`:**
- `path`, `name`, `extension`
- `content` (plain text of the note)
- `frontmatter`, `tags`, `headings`, resolved and unresolved links,
`embeds`, `aliases`
- `content_hash` (SHA-256 of the note body), `mtime`, `ctime`
**Stored server-side per vault:**
- One connector row keyed by `vault_id` with `{vault_name, source: "plugin",
last_connect_at}`. Nothing per-device, no plugin version, no analytics.
- One `documents` row per note (soft-deleted rather than hard-deleted so
existing chat citations remain valid).
**What never leaves the plugin:**
- No remote code loading, no `eval`, no analytics.
- All network traffic goes to your configured **Server URL** only.
- The `Authorization: Bearer …` header is set per-request with the token
you paste; the plugin never reads cookies or other Obsidian state.
- The plugin uses Obsidian's `requestUrl` (no `fetch`, no `node:http`,
no `node:https`) and Web Crypto for hashing, per Obsidian's mobile guidance.
For retention, deletion, and contact details see
<https://surfsense.com/privacy>.
## Development
This plugin lives in [`surfsense_obsidian/`](.) inside the SurfSense
monorepo. To work on it locally:
```sh
cd surfsense_obsidian
npm install
npm run dev # esbuild in watch mode → main.js
```
Symlink the folder into a test vault's `.obsidian/plugins/surfsense/`,
enable the plugin, then **Cmd+R** in Obsidian whenever `main.js` rebuilds.
Lint:
```sh
npm run lint
```
The release pipeline lives at
[`.github/workflows/release-obsidian-plugin.yml`](../.github/workflows/release-obsidian-plugin.yml)
in the repo root and is triggered by tags of the form `obsidian-v0.1.0`.
It verifies the tag matches `manifest.json`, builds the plugin, attaches
`main.js` + `manifest.json` + `styles.css` to a GitHub release tagged with
the bare version (e.g. `0.1.0`, the form BRAT and the Obsidian community
store look for), and mirrors `manifest.json` + `versions.json` to the repo
root so Obsidian's community plugin browser can discover them.
## License
[Apache-2.0](LICENSE), same as the rest of SurfSense.

View file

@ -0,0 +1,49 @@
import esbuild from "esbuild";
import process from "process";
import { builtinModules } from 'node:module';
const banner =
`/*
THIS IS A GENERATED/BUNDLED FILE BY ESBUILD
if you want to view the source, please visit the github repository of this plugin
*/
`;
const prod = (process.argv[2] === "production");
const context = await esbuild.context({
banner: {
js: banner,
},
entryPoints: ["src/main.ts"],
bundle: true,
external: [
"obsidian",
"electron",
"@codemirror/autocomplete",
"@codemirror/collab",
"@codemirror/commands",
"@codemirror/language",
"@codemirror/lint",
"@codemirror/search",
"@codemirror/state",
"@codemirror/view",
"@lezer/common",
"@lezer/highlight",
"@lezer/lr",
...builtinModules],
format: "cjs",
target: "es2018",
logLevel: "info",
sourcemap: prod ? false : "inline",
treeShaking: true,
outfile: "main.js",
minify: prod,
});
if (prod) {
await context.rebuild();
process.exit(0);
} else {
await context.watch();
}

View file

@ -0,0 +1,55 @@
import tseslint from 'typescript-eslint';
import obsidianmd from "eslint-plugin-obsidianmd";
import globals from "globals";
import { globalIgnores } from "eslint/config";
export default tseslint.config(
{
languageOptions: {
globals: {
...globals.browser,
},
parserOptions: {
projectService: {
allowDefaultProject: [
'eslint.config.js',
'manifest.json'
]
},
tsconfigRootDir: import.meta.dirname,
extraFileExtensions: ['.json']
},
},
},
...obsidianmd.configs.recommended,
{
plugins: { obsidianmd },
rules: {
"obsidianmd/ui/sentence-case": [
"error",
{
brands: [
"Surfsense",
"iOS",
"iPadOS",
"macOS",
"Windows",
"Android",
"Linux",
"Obsidian",
"Markdown",
],
},
],
},
},
globalIgnores([
"node_modules",
"dist",
"esbuild.config.mjs",
"eslint.config.js",
"version-bump.mjs",
"versions.json",
"main.js",
]),
);

View file

@ -0,0 +1,10 @@
{
"id": "surfsense",
"name": "SurfSense",
"version": "0.1.1",
"minAppVersion": "1.5.4",
"description": "Turn your vault into a searchable second brain with SurfSense.",
"author": "SurfSense",
"authorUrl": "https://github.com/MODSetter/SurfSense",
"isDesktopOnly": false
}

5170
surfsense_obsidian/package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,34 @@
{
"name": "surfsense-obsidian",
"version": "0.1.0",
"description": "SurfSense plugin for Obsidian: sync your vault to SurfSense for AI-powered search.",
"main": "main.js",
"type": "module",
"scripts": {
"dev": "node esbuild.config.mjs",
"build": "tsc -noEmit -skipLibCheck && node esbuild.config.mjs production",
"version": "node version-bump.mjs && git add manifest.json versions.json",
"lint": "eslint ."
},
"keywords": [
"obsidian",
"surfsense",
"sync",
"search"
],
"license": "Apache-2.0",
"devDependencies": {
"@eslint/js": "9.30.1",
"@types/node": "^20.19.39",
"esbuild": "0.25.5",
"eslint-plugin-obsidianmd": "0.1.9",
"globals": "14.0.0",
"jiti": "2.6.1",
"tslib": "2.4.0",
"typescript": "^5.8.3",
"typescript-eslint": "8.35.1"
},
"dependencies": {
"obsidian": "latest"
}
}

View file

@ -0,0 +1,286 @@
import { Notice, requestUrl, type RequestUrlParam, type RequestUrlResponse } from "obsidian";
import type {
ConnectResponse,
DeleteAck,
HealthResponse,
ManifestResponse,
NotePayload,
RenameAck,
RenameItem,
SearchSpace,
SyncAck,
} from "./types";
/**
* SurfSense backend client used by the Obsidian plugin.
*
* Mobile-safety contract (must hold for every transitive import):
* - Use Obsidian `requestUrl` only no `fetch`, no `axios`, no
* `node:http`, no `node:https`. CORS is bypassed and mobile works.
* - No top-level `node:*` imports anywhere reachable from this file.
* - Hashing happens elsewhere via Web Crypto, not `node:crypto`.
*
* Auth + wire contract:
* - Every request carries `Authorization: Bearer <token>` only. No
* custom headers the backend identifies the caller from the JWT
* and feature-detects the API via the `capabilities` array on
* `/health` and `/connect`.
* - 401 surfaces as `AuthError` so the orchestrator can show the
* "token expired, paste a fresh one" UX.
* - HealthResponse / ConnectResponse use index signatures so any
* additive backend field (e.g. new capabilities) parses without
* breaking the decoder. This mirrors `ConfigDict(extra='ignore')`
* on the server side.
*/
export class AuthError extends Error {
constructor(message: string) {
super(message);
this.name = "AuthError";
}
}
export class TransientError extends Error {
readonly status: number;
constructor(status: number, message: string) {
super(message);
this.name = "TransientError";
this.status = status;
}
}
export class PermanentError extends Error {
readonly status: number;
constructor(status: number, message: string) {
super(message);
this.name = "PermanentError";
this.status = status;
}
}
/** 404 `VAULT_NOT_REGISTERED` — `/connect` hasn't committed yet; retry after reconnect. */
export class VaultNotRegisteredError extends TransientError {
constructor(message: string) {
super(404, message);
this.name = "VaultNotRegisteredError";
}
}
export interface ApiClientOptions {
getServerUrl: () => string;
getToken: () => string;
onAuthError?: () => void;
}
export class SurfSenseApiClient {
private readonly opts: ApiClientOptions;
constructor(opts: ApiClientOptions) {
this.opts = opts;
}
updateOptions(partial: Partial<ApiClientOptions>): void {
Object.assign(this.opts, partial);
}
async health(): Promise<HealthResponse> {
return await this.request<HealthResponse>("GET", "/api/v1/obsidian/health");
}
async listSearchSpaces(): Promise<SearchSpace[]> {
const resp = await this.request<SearchSpace[] | { items: SearchSpace[] }>(
"GET",
"/api/v1/searchspaces/"
);
if (Array.isArray(resp)) return resp;
if (resp && Array.isArray((resp as { items?: SearchSpace[] }).items)) {
return (resp as { items: SearchSpace[] }).items;
}
return [];
}
async verifyToken(): Promise<{ ok: true; health: HealthResponse }> {
// /health is gated by current_active_user, so a successful response
// transitively proves the token works. Cheaper than fetching a list.
const health = await this.health();
return { ok: true, health };
}
async connect(input: {
searchSpaceId: number;
vaultId: string;
vaultName: string;
vaultFingerprint: string;
}): Promise<ConnectResponse> {
return await this.request<ConnectResponse>(
"POST",
"/api/v1/obsidian/connect",
{
vault_id: input.vaultId,
vault_name: input.vaultName,
search_space_id: input.searchSpaceId,
vault_fingerprint: input.vaultFingerprint,
}
);
}
/** POST /sync — `failed[]` are paths whose `status === "error"` for retry. */
async syncBatch(input: {
vaultId: string;
notes: NotePayload[];
}): Promise<{ indexed: number; failed: string[] }> {
const resp = await this.request<SyncAck>(
"POST",
"/api/v1/obsidian/sync",
{ vault_id: input.vaultId, notes: input.notes }
);
const failed = resp.items
.filter((it) => it.status === "error")
.map((it) => it.path);
return { indexed: resp.indexed, failed };
}
/** POST /rename — `"missing"` counts as success; only `"error"` is retried. */
async renameBatch(input: {
vaultId: string;
renames: Pick<RenameItem, "oldPath" | "newPath">[];
}): Promise<{
renamed: number;
failed: Array<{ oldPath: string; newPath: string }>;
}> {
const resp = await this.request<RenameAck>(
"POST",
"/api/v1/obsidian/rename",
{
vault_id: input.vaultId,
renames: input.renames.map((r) => ({
old_path: r.oldPath,
new_path: r.newPath,
})),
}
);
const failed = resp.items
.filter((it) => it.status === "error")
.map((it) => ({ oldPath: it.old_path, newPath: it.new_path }));
return { renamed: resp.renamed, failed };
}
/** DELETE /notes — `"missing"` counts as success; only `"error"` is retried. */
async deleteBatch(input: {
vaultId: string;
paths: string[];
}): Promise<{ deleted: number; failed: string[] }> {
const resp = await this.request<DeleteAck>(
"DELETE",
"/api/v1/obsidian/notes",
{ vault_id: input.vaultId, paths: input.paths }
);
const failed = resp.items
.filter((it) => it.status === "error")
.map((it) => it.path);
return { deleted: resp.deleted, failed };
}
async getManifest(vaultId: string): Promise<ManifestResponse> {
return await this.request<ManifestResponse>(
"GET",
`/api/v1/obsidian/manifest?vault_id=${encodeURIComponent(vaultId)}`
);
}
private async request<T>(
method: RequestUrlParam["method"],
path: string,
body?: unknown
): Promise<T> {
const baseUrl = this.opts.getServerUrl().replace(/\/+$/, "");
const token = this.opts.getToken();
if (!token) {
throw new AuthError("Missing API token. Open SurfSense settings to paste one.");
}
const headers: Record<string, string> = {
Authorization: `Bearer ${token}`,
Accept: "application/json",
};
if (body !== undefined) headers["Content-Type"] = "application/json";
let resp: RequestUrlResponse;
try {
resp = await requestUrl({
url: `${baseUrl}${path}`,
method,
headers,
body: body === undefined ? undefined : JSON.stringify(body),
throw: false,
});
} catch (err) {
throw new TransientError(0, `Network error: ${(err as Error).message}`);
}
if (resp.status >= 200 && resp.status < 300) {
return parseJson<T>(resp);
}
const detail = extractDetail(resp);
if (resp.status === 401) {
this.opts.onAuthError?.();
new Notice("Surfsense: token expired or invalid. Paste a fresh token in settings.");
throw new AuthError(detail || "Unauthorized");
}
if (resp.status >= 500 || resp.status === 429) {
throw new TransientError(resp.status, detail || `HTTP ${resp.status}`);
}
if (resp.status === 404 && extractCode(resp) === "VAULT_NOT_REGISTERED") {
throw new VaultNotRegisteredError(detail || "Vault not registered yet");
}
throw new PermanentError(resp.status, detail || `HTTP ${resp.status}`);
}
}
function parseJson<T>(resp: RequestUrlResponse): T {
// Plugin endpoints always return JSON; non-JSON 2xx is usually a
// captive portal or CDN page — surface as transient so we back off.
const text = resp.text ?? "";
try {
return JSON.parse(text) as T;
} catch {
throw new TransientError(
resp.status,
`Invalid JSON from server (got: ${text.slice(0, 80)})`
);
}
}
function safeJson(resp: RequestUrlResponse): Record<string, unknown> {
try {
return resp.text ? (JSON.parse(resp.text) as Record<string, unknown>) : {};
} catch {
return {};
}
}
function extractDetail(resp: RequestUrlResponse): string {
const json = safeJson(resp);
if (typeof json.detail === "string") return json.detail;
if (typeof json.message === "string") return json.message;
const detailObj = json.detail;
if (detailObj && typeof detailObj === "object") {
const obj = detailObj as Record<string, unknown>;
if (typeof obj.message === "string") return obj.message;
}
return resp.text?.slice(0, 200) ?? "";
}
function extractCode(resp: RequestUrlResponse): string | undefined {
const json = safeJson(resp);
const detailObj = json.detail;
if (detailObj && typeof detailObj === "object") {
const code = (detailObj as Record<string, unknown>).code;
if (typeof code === "string") return code;
}
return undefined;
}

View file

@ -0,0 +1,94 @@
/**
* Tiny glob matcher for exclude patterns.
*
* Supports `*` (any chars except `/`), `**` (any chars including `/`), and
* literal segments. Patterns without a slash are matched against any path
* segment (so `templates` excludes `templates/foo.md` and `notes/templates/x.md`).
*
* Intentionally not a full minimatch Obsidian users overwhelmingly type
* folder names ("templates", ".trash") and the obvious wildcards. Avoiding
* the dependency keeps the bundle small and the mobile attack surface tiny.
*/
const cache = new Map<string, RegExp>();
function compile(pattern: string): RegExp {
const cached = cache.get(pattern);
if (cached) return cached;
let body = "";
let i = 0;
while (i < pattern.length) {
const ch = pattern[i] ?? "";
if (ch === "*") {
if (pattern[i + 1] === "*") {
body += ".*";
i += 2;
if (pattern[i] === "/") i += 1;
continue;
}
body += "[^/]*";
i += 1;
continue;
}
if (".+^${}()|[]\\".includes(ch)) {
body += "\\" + ch;
i += 1;
continue;
}
body += ch;
i += 1;
}
const anchored = pattern.includes("/")
? `^${body}(/.*)?$`
: `(^|/)${body}(/.*)?$`;
const re = new RegExp(anchored);
cache.set(pattern, re);
return re;
}
export function isExcluded(path: string, patterns: string[]): boolean {
if (!patterns.length) return false;
for (const raw of patterns) {
const trimmed = raw.trim();
if (!trimmed || trimmed.startsWith("#")) continue;
if (compile(trimmed).test(path)) return true;
}
return false;
}
export function parseExcludePatterns(raw: string): string[] {
return raw
.split(/\r?\n/)
.map((line) => line.trim())
.filter((line) => line.length > 0 && !line.startsWith("#"));
}
/** Normalize a folder path: strip leading/trailing slashes; "" or "/" means vault root. */
export function normalizeFolder(folder: string): string {
return folder.replace(/^\/+|\/+$/g, "");
}
/** True if `path` lives inside `folder` (or `folder` is the vault root). */
export function isInFolder(path: string, folder: string): boolean {
const f = normalizeFolder(folder);
if (f === "") return true;
return path === f || path.startsWith(`${f}/`);
}
/** Exclude wins over include. Empty includeFolders means "include everything". */
export function isFolderFiltered(
path: string,
includeFolders: string[],
excludeFolders: string[],
): boolean {
for (const f of excludeFolders) {
if (isInFolder(path, f)) return true;
}
if (includeFolders.length === 0) return false;
for (const f of includeFolders) {
if (isInFolder(path, f)) return false;
}
return true;
}

View file

@ -0,0 +1,32 @@
import { type App, FuzzySuggestModal, type TFolder } from "obsidian";
/** Folder picker built on Obsidian's stock {@link FuzzySuggestModal}. */
export class FolderSuggestModal extends FuzzySuggestModal<TFolder> {
private readonly onPick: (path: string) => void;
private readonly excluded: Set<string>;
constructor(app: App, onPick: (path: string) => void, excluded: string[] = []) {
super(app);
this.onPick = onPick;
this.excluded = new Set(excluded.map((p) => p.replace(/^\/+|\/+$/g, "")));
this.setPlaceholder("Type to filter folders…");
}
getItems(): TFolder[] {
return this.app.vault
.getAllFolders(true)
.filter((f) => !this.excluded.has(this.toPath(f)));
}
getItemText(folder: TFolder): string {
return this.toPath(folder) || "/";
}
onChooseItem(folder: TFolder): void {
this.onPick(this.toPath(folder));
}
private toPath(folder: TFolder): string {
return folder.isRoot() ? "" : folder.path;
}
}

View file

@ -0,0 +1,279 @@
import { Notice, Platform, Plugin } from "obsidian";
import { SurfSenseApiClient } from "./api-client";
import { PersistentQueue } from "./queue";
import { SurfSenseSettingTab } from "./settings";
import { StatusBar } from "./status-bar";
import { StatusModal } from "./status-modal";
import { SyncEngine } from "./sync-engine";
import {
DEFAULT_SETTINGS,
type QueueItem,
type StatusState,
type SurfsensePluginSettings,
} from "./types";
import { generateVaultUuid } from "./vault-identity";
/** SurfSense plugin entry point. */
export default class SurfSensePlugin extends Plugin {
settings!: SurfsensePluginSettings;
api!: SurfSenseApiClient;
queue!: PersistentQueue;
engine!: SyncEngine;
private statusBar: StatusBar | null = null;
lastStatus: StatusState = { kind: "idle", queueDepth: 0 };
serverCapabilities: string[] = [];
private settingTab: SurfSenseSettingTab | null = null;
private statusListeners = new Set<() => void>();
private reconcileTimerId: number | null = null;
async onload() {
await this.loadSettings();
this.seedIdentity();
await this.saveSettings();
this.api = new SurfSenseApiClient({
getServerUrl: () => this.settings.serverUrl,
getToken: () => this.settings.apiToken,
});
this.queue = new PersistentQueue(this.settings.queue ?? [], {
persist: async (items) => {
this.settings.queue = items;
await this.saveData(this.settings);
},
});
this.engine = new SyncEngine({
app: this.app,
apiClient: this.api,
queue: this.queue,
getSettings: () => this.settings,
saveSettings: async (mut) => {
mut(this.settings);
await this.saveSettings();
this.notifyStatusChange();
},
setStatus: (s) => {
this.lastStatus = s;
this.statusBar?.update(s);
this.notifyStatusChange();
},
onCapabilities: (caps) => {
this.serverCapabilities = [...caps];
this.notifyStatusChange();
},
onReconcileBackoffChanged: () => {
this.restartReconcileTimer();
},
});
this.queue.setFlushHandler(() => {
if (!this.shouldAutoSync()) return;
void this.engine.flushQueue();
});
this.settingTab = new SurfSenseSettingTab(this.app, this);
this.addSettingTab(this.settingTab);
const statusHost = this.addStatusBarItem();
this.statusBar = new StatusBar(statusHost, () => this.openStatusModal());
this.statusBar.update(this.lastStatus);
this.registerEvent(
this.app.vault.on("create", (file) => this.engine.onCreate(file)),
);
this.registerEvent(
this.app.vault.on("modify", (file) => this.engine.onModify(file)),
);
this.registerEvent(
this.app.vault.on("delete", (file) => this.engine.onDelete(file)),
);
this.registerEvent(
this.app.vault.on("rename", (file, oldPath) =>
this.engine.onRename(file, oldPath),
),
);
this.registerEvent(
this.app.metadataCache.on("changed", (file, data, cache) =>
this.engine.onMetadataChanged(file, data, cache),
),
);
this.addCommand({
id: "resync-vault",
name: "Re-sync entire vault",
callback: async () => {
try {
await this.engine.maybeReconcile(true);
new Notice("Surfsense: re-sync started.");
} catch (err) {
new Notice(`Surfsense: re-sync failed — ${(err as Error).message}`);
}
},
});
this.addCommand({
id: "sync-current-note",
name: "Sync current note",
checkCallback: (checking) => {
const file = this.app.workspace.getActiveFile();
if (!file || file.extension.toLowerCase() !== "md") return false;
if (checking) return true;
this.queue.enqueueUpsert(file.path);
void this.engine.flushQueue();
return true;
},
});
this.addCommand({
id: "open-status",
name: "Open sync status",
callback: () => this.openStatusModal(),
});
this.addCommand({
id: "open-settings",
name: "Open settings",
callback: () => {
// `app.setting` isn't in the d.ts; fall back silently if it moves.
type SettingHost = {
open?: () => void;
openTabById?: (id: string) => void;
};
const setting = (this.app as unknown as { setting?: SettingHost }).setting;
if (setting?.open) setting.open();
if (setting?.openTabById) setting.openTabById(this.manifest.id);
},
});
const onNetChange = () => {
if (this.shouldAutoSync()) void this.engine.flushQueue();
};
this.registerDomEvent(window, "online", onNetChange);
const conn = (navigator as unknown as { connection?: NetworkConnection }).connection;
if (conn && typeof conn.addEventListener === "function") {
conn.addEventListener("change", onNetChange);
this.register(() => conn.removeEventListener?.("change", onNetChange));
}
// Wait for layout so the metadataCache is warm before reconcile.
this.app.workspace.onLayoutReady(() => {
void this.engine.start();
this.restartReconcileTimer();
});
}
onunload() {
this.queue?.cancelFlush();
this.queue?.requestStop();
}
/**
* Obsidian fires this when another device rewrites our data.json.
* If the synced vault_id differs from ours, adopt it and
* re-handshake so the server routes us to the right row.
*/
async onExternalSettingsChange(): Promise<void> {
const previousVaultId = this.settings.vaultId;
const previousConnectorId = this.settings.connectorId;
await this.loadSettings();
const changed =
this.settings.vaultId !== previousVaultId ||
this.settings.connectorId !== previousConnectorId;
if (!changed) return;
this.notifyStatusChange();
if (this.settings.searchSpaceId !== null) {
void this.engine.ensureConnected();
}
}
get queueDepth(): number {
return this.queue?.size ?? 0;
}
openStatusModal(): void {
new StatusModal(this.app, this).open();
}
restartReconcileTimer(): void {
if (this.reconcileTimerId !== null) {
window.clearInterval(this.reconcileTimerId);
this.reconcileTimerId = null;
}
const minutes = this.settings.syncIntervalMinutes ?? 10;
if (minutes <= 0) return;
const baseMs = minutes * 60 * 1000;
// Idle vaults back off (×2 → ×4 → ×8); resets on the first edit or non-empty reconcile.
const effectiveMs = this.engine?.getReconcileBackoffMs(baseMs) ?? baseMs;
const id = window.setInterval(
() => {
if (!this.shouldAutoSync()) return;
void this.engine.maybeReconcile();
},
effectiveMs,
);
this.reconcileTimerId = id;
this.registerInterval(id);
}
/** Gate for background network activity; per-edit flush + periodic reconcile both consult this. */
shouldAutoSync(): boolean {
if (!this.settings.wifiOnly) return true;
if (!Platform.isMobileApp) return true;
// navigator.connection is supported on Android Capacitor; undefined on iOS.
// When unavailable, behave permissively so iOS users aren't blocked outright.
const conn = (navigator as unknown as { connection?: NetworkConnection }).connection;
if (!conn || typeof conn.type !== "string") return true;
return conn.type === "wifi" || conn.type === "ethernet";
}
onStatusChange(listener: () => void): void {
this.statusListeners.add(listener);
}
offStatusChange(listener: () => void): void {
this.statusListeners.delete(listener);
}
private notifyStatusChange(): void {
for (const fn of this.statusListeners) fn();
}
async loadSettings() {
const data = (await this.loadData()) as Partial<SurfsensePluginSettings> | null;
this.settings = {
...DEFAULT_SETTINGS,
...(data ?? {}),
queue: (data?.queue ?? []).map((i: QueueItem) => ({ ...i })),
tombstones: { ...(data?.tombstones ?? {}) },
includeFolders: [...(data?.includeFolders ?? [])],
excludeFolders: [...(data?.excludeFolders ?? [])],
excludePatterns: data?.excludePatterns?.length
? [...data.excludePatterns]
: [...DEFAULT_SETTINGS.excludePatterns],
};
}
async saveSettings() {
await this.saveData(this.settings);
}
/**
* Mint a tentative vault_id locally on first run. The server's
* fingerprint dedup (see /obsidian/connect) may overwrite it on the
* first /connect when another device of the same vault has already
* registered; we always trust the server's response.
*/
private seedIdentity(): void {
if (!this.settings.vaultId) {
this.settings.vaultId = generateVaultUuid();
}
}
}
/** Subset of the Network Information API used to detect WiFi vs cellular on Android. */
interface NetworkConnection {
type?: string;
addEventListener?: (event: string, handler: () => void) => void;
removeEventListener?: (event: string, handler: () => void) => void;
}

View file

@ -0,0 +1,163 @@
import {
type App,
type CachedMetadata,
type FrontMatterCache,
type HeadingCache,
type ReferenceCache,
type TFile,
} from "obsidian";
import type { HeadingRef, NotePayload } from "./types";
/**
* Build a NotePayload from an Obsidian TFile.
*
* Mobile-safety contract:
* - No top-level `node:fs` / `node:path` / `node:crypto` imports.
* File IO uses `vault.cachedRead` (works on the mobile WASM adapter).
* Hashing uses Web Crypto `subtle.digest`.
* - Caller MUST first wait for `metadataCache.changed` before calling
* this for a `.md` file, otherwise `frontmatter`/`tags`/`headings`
* can lag the actual file contents.
*/
export async function buildNotePayload(
app: App,
file: TFile,
vaultId: string,
): Promise<NotePayload> {
const content = await app.vault.cachedRead(file);
const cache: CachedMetadata | null = app.metadataCache.getFileCache(file);
const frontmatter = normalizeFrontmatter(cache?.frontmatter);
const tags = collectTags(cache);
const headings = collectHeadings(cache?.headings ?? []);
const aliases = collectAliases(frontmatter);
const { embeds, internalLinks } = collectLinks(cache);
const { resolved, unresolved } = resolveLinkTargets(
app,
file.path,
internalLinks,
);
const contentHash = await computeContentHash(content);
return {
vault_id: vaultId,
path: file.path,
name: file.basename,
extension: file.extension,
content,
frontmatter,
tags,
headings,
resolved_links: resolved,
unresolved_links: unresolved,
embeds,
aliases,
content_hash: contentHash,
size: file.stat.size,
mtime: file.stat.mtime,
ctime: file.stat.ctime,
};
}
export async function computeContentHash(content: string): Promise<string> {
const bytes = new TextEncoder().encode(content);
const digest = await crypto.subtle.digest("SHA-256", bytes);
return bufferToHex(digest);
}
function bufferToHex(buf: ArrayBuffer): string {
const view = new Uint8Array(buf);
let hex = "";
for (let i = 0; i < view.length; i++) {
hex += (view[i] ?? 0).toString(16).padStart(2, "0");
}
return hex;
}
function normalizeFrontmatter(
fm: FrontMatterCache | undefined,
): Record<string, unknown> {
if (!fm) return {};
// FrontMatterCache extends a plain object; strip the `position` key
// the cache adds so the wire payload stays clean.
const rest: Record<string, unknown> = { ...(fm as Record<string, unknown>) };
delete rest.position;
return rest;
}
function collectTags(cache: CachedMetadata | null): string[] {
const out = new Set<string>();
for (const t of cache?.tags ?? []) {
const tag = t.tag.startsWith("#") ? t.tag.slice(1) : t.tag;
if (tag) out.add(tag);
}
const fmTags: unknown =
cache?.frontmatter?.tags ?? cache?.frontmatter?.tag;
if (Array.isArray(fmTags)) {
for (const t of fmTags) {
if (typeof t === "string" && t) out.add(t.replace(/^#/, ""));
}
} else if (typeof fmTags === "string" && fmTags) {
for (const t of fmTags.split(/[\s,]+/)) {
if (t) out.add(t.replace(/^#/, ""));
}
}
return [...out];
}
function collectHeadings(items: HeadingCache[]): HeadingRef[] {
return items.map((h) => ({ heading: h.heading, level: h.level }));
}
function collectAliases(frontmatter: Record<string, unknown>): string[] {
const raw = frontmatter.aliases ?? frontmatter.alias;
if (Array.isArray(raw)) {
return raw.filter((x): x is string => typeof x === "string" && x.length > 0);
}
if (typeof raw === "string" && raw) return [raw];
return [];
}
function collectLinks(cache: CachedMetadata | null): {
embeds: string[];
internalLinks: ReferenceCache[];
} {
const linkRefs: ReferenceCache[] = [
...((cache?.links) ?? []),
...((cache?.embeds as ReferenceCache[] | undefined) ?? []),
];
const embeds = ((cache?.embeds as ReferenceCache[] | undefined) ?? []).map(
(e) => e.link,
);
return { embeds, internalLinks: linkRefs };
}
function resolveLinkTargets(
app: App,
sourcePath: string,
links: ReferenceCache[],
): { resolved: string[]; unresolved: string[] } {
const resolved = new Set<string>();
const unresolved = new Set<string>();
for (const link of links) {
const target = app.metadataCache.getFirstLinkpathDest(
stripSubpath(link.link),
sourcePath,
);
if (target) {
resolved.add(target.path);
} else {
unresolved.add(link.link);
}
}
return { resolved: [...resolved], unresolved: [...unresolved] };
}
function stripSubpath(link: string): string {
const hashIdx = link.indexOf("#");
const pipeIdx = link.indexOf("|");
let end = link.length;
if (hashIdx !== -1) end = Math.min(end, hashIdx);
if (pipeIdx !== -1) end = Math.min(end, pipeIdx);
return link.slice(0, end);
}

View file

@ -0,0 +1,228 @@
import { type Debouncer, debounce } from "obsidian";
import type { QueueItem } from "./types";
/**
* Persistent upload queue.
*
* Mobile-safety contract:
* - Persistence is delegated to a save callback (which the plugin wires
* to `plugin.saveData()`); never `node:fs`. Items also live in the
* plugin's settings JSON so a crash mid-flight loses nothing.
* - No top-level `node:*` imports.
*
* Behavioural contract:
* - Per-file debounce: enqueueing the same path coalesces, the latest
* `enqueuedAt` wins so we don't ship a stale snapshot.
* - `delete` for a path drops any pending `upsert` for that path
* (otherwise we'd resurrect a note the user just deleted).
* - `rename` is a first-class op so the backend can update
* `unique_identifier_hash` instead of "delete + create" (which would
* blow away document versions, citations, and the document_id used
* in chat history).
* - Drain takes a worker, returns once the worker either succeeds for
* every batch or hits a stop signal (transient error, mid-drain
* stop request).
*/
export interface QueueWorker {
processBatch(batch: QueueItem[]): Promise<BatchResult>;
}
export interface BatchResult {
/** Items that succeeded; they will be ack'd off the queue. */
acked: QueueItem[];
/** Items that should be retried; their `attempt` is bumped. */
retry: QueueItem[];
/** Items that failed permanently (4xx). They get dropped. */
dropped: QueueItem[];
/** If true, the drain loop stops (e.g. transient/network error). */
stop: boolean;
/** Optional retry-after for transient errors (ms). */
backoffMs?: number;
}
export interface PersistentQueueOptions {
debounceMs?: number;
batchSize?: number;
maxAttempts?: number;
persist: (items: QueueItem[]) => Promise<void> | void;
now?: () => number;
}
const DEFAULTS = {
debounceMs: 2000,
batchSize: 15,
maxAttempts: 8,
};
export class PersistentQueue {
private items: QueueItem[];
private readonly opts: Required<
Omit<PersistentQueueOptions, "persist" | "now">
> & {
persist: PersistentQueueOptions["persist"];
now: () => number;
};
private draining = false;
private stopRequested = false;
private debouncedFlush: Debouncer<[], void> | null = null;
constructor(initial: QueueItem[], opts: PersistentQueueOptions) {
this.items = [...initial];
this.opts = {
debounceMs: opts.debounceMs ?? DEFAULTS.debounceMs,
batchSize: opts.batchSize ?? DEFAULTS.batchSize,
maxAttempts: opts.maxAttempts ?? DEFAULTS.maxAttempts,
persist: opts.persist,
now: opts.now ?? (() => Date.now()),
};
}
get size(): number {
return this.items.length;
}
snapshot(): QueueItem[] {
return this.items.map((i) => ({ ...i }));
}
setFlushHandler(handler: () => void): void {
// resetTimer: true → each enqueue postpones the flush.
this.debouncedFlush = debounce(handler, this.opts.debounceMs, true);
}
enqueueUpsert(path: string): void {
const now = this.opts.now();
this.items = this.items.filter(
(i) => !(i.op === "upsert" && i.path === path),
);
this.items.push({ op: "upsert", path, enqueuedAt: now, attempt: 0 });
void this.persist();
this.scheduleFlush();
}
enqueueDelete(path: string): void {
const now = this.opts.now();
// A delete supersedes any pending upsert for the same path.
this.items = this.items.filter(
(i) =>
!(
(i.op === "upsert" && i.path === path) ||
(i.op === "delete" && i.path === path)
),
);
this.items.push({ op: "delete", path, enqueuedAt: now, attempt: 0 });
void this.persist();
this.scheduleFlush();
}
enqueueRename(oldPath: string, newPath: string): void {
const now = this.opts.now();
this.items = this.items.filter(
(i) =>
!(
(i.op === "upsert" && (i.path === oldPath || i.path === newPath)) ||
(i.op === "rename" && i.oldPath === oldPath && i.newPath === newPath)
),
);
this.items.push({
op: "rename",
oldPath,
newPath,
enqueuedAt: now,
attempt: 0,
});
// Pair with an upsert — content may have changed alongside the rename.
this.items.push({ op: "upsert", path: newPath, enqueuedAt: now, attempt: 0 });
void this.persist();
this.scheduleFlush();
}
requestStop(): void {
this.stopRequested = true;
}
cancelFlush(): void {
this.debouncedFlush?.cancel();
}
private scheduleFlush(): void {
this.debouncedFlush?.();
}
async drain(worker: QueueWorker): Promise<DrainSummary> {
if (this.draining) return { batches: 0, acked: 0, dropped: 0, stopped: false };
this.draining = true;
this.stopRequested = false;
const summary: DrainSummary = {
batches: 0,
acked: 0,
dropped: 0,
stopped: false,
};
try {
while (this.items.length > 0 && !this.stopRequested) {
const batch = this.takeBatch();
summary.batches += 1;
const result = await worker.processBatch(batch);
summary.acked += result.acked.length;
summary.dropped += result.dropped.length;
const ackKeys = new Set(result.acked.map(itemKey));
const dropKeys = new Set(result.dropped.map(itemKey));
const retryKeys = new Set(result.retry.map(itemKey));
// Items the worker didn't classify get retried — never silently dropped.
const unhandled = batch.filter(
(b) =>
!ackKeys.has(itemKey(b)) &&
!dropKeys.has(itemKey(b)) &&
!retryKeys.has(itemKey(b)),
);
const retry = [...result.retry, ...unhandled].map((i) => ({
...i,
attempt: i.attempt + 1,
}));
const survivors = retry.filter((i) => i.attempt <= this.opts.maxAttempts);
summary.dropped += retry.length - survivors.length;
this.items = [...survivors, ...this.items];
await this.persist();
if (result.stop) {
summary.stopped = true;
if (result.backoffMs) summary.backoffMs = result.backoffMs;
break;
}
}
if (this.stopRequested) summary.stopped = true;
return summary;
} finally {
this.draining = false;
}
}
private takeBatch(): QueueItem[] {
const head = this.items.slice(0, this.opts.batchSize);
this.items = this.items.slice(this.opts.batchSize);
return head;
}
private async persist(): Promise<void> {
await this.opts.persist(this.snapshot());
}
}
export interface DrainSummary {
batches: number;
acked: number;
dropped: number;
stopped: boolean;
backoffMs?: number;
}
export function itemKey(i: QueueItem): string {
if (i.op === "rename") return `rename:${i.oldPath}=>${i.newPath}`;
return `${i.op}:${i.path}`;
}

View file

@ -0,0 +1,380 @@
import {
type App,
Notice,
Platform,
PluginSettingTab,
Setting,
setIcon,
} from "obsidian";
import { AuthError } from "./api-client";
import { normalizeFolder, parseExcludePatterns } from "./excludes";
import { FolderSuggestModal } from "./folder-suggest-modal";
import type SurfSensePlugin from "./main";
import type { SearchSpace } from "./types";
/** Plugin settings tab. */
export class SurfSenseSettingTab extends PluginSettingTab {
private readonly plugin: SurfSensePlugin;
private searchSpaces: SearchSpace[] = [];
private loadingSpaces = false;
constructor(app: App, plugin: SurfSensePlugin) {
super(app, plugin);
this.plugin = plugin;
}
display(): void {
const { containerEl } = this;
containerEl.empty();
const settings = this.plugin.settings;
this.renderConnectionHeading(containerEl);
new Setting(containerEl)
.setName("Server URL")
.setDesc(
"https://surfsense.com for SurfSense Cloud, or your self-hosted URL.",
)
.addText((text) =>
text
.setPlaceholder("https://surfsense.com")
.setValue(settings.serverUrl)
.onChange(async (value) => {
const next = value.trim();
const previous = this.plugin.settings.serverUrl;
if (previous !== "" && next !== previous) {
this.plugin.settings.searchSpaceId = null;
this.plugin.settings.connectorId = null;
}
this.plugin.settings.serverUrl = next;
await this.plugin.saveSettings();
}),
);
new Setting(containerEl)
.setName("API token")
.setDesc(
"Paste your Surfsense API token (expires after 24 hours; re-paste when you see an auth error).",
)
.addText((text) => {
text.inputEl.type = "password";
text.inputEl.autocomplete = "off";
text.inputEl.spellcheck = false;
text
.setPlaceholder("Paste token")
.setValue(settings.apiToken)
.onChange(async (value) => {
const next = value.trim();
const previous = this.plugin.settings.apiToken;
if (previous !== "" && next !== previous) {
this.plugin.settings.searchSpaceId = null;
this.plugin.settings.connectorId = null;
}
this.plugin.settings.apiToken = next;
await this.plugin.saveSettings();
});
})
.addButton((btn) =>
btn
.setButtonText("Verify")
.setCta()
.onClick(async () => {
btn.setDisabled(true);
try {
await this.plugin.api.verifyToken();
new Notice("Surfsense: token verified.");
await this.refreshSearchSpaces();
this.display();
} catch (err) {
this.handleApiError(err);
} finally {
btn.setDisabled(false);
}
}),
);
new Setting(containerEl)
.setName("Search space")
.setDesc(
"Which Surfsense search space this vault syncs into. Reload after changing your token.",
)
.addDropdown((drop) => {
drop.addOption("", this.loadingSpaces ? "Loading…" : "Select a search space");
for (const space of this.searchSpaces) {
drop.addOption(String(space.id), space.name);
}
if (settings.searchSpaceId !== null) {
drop.setValue(String(settings.searchSpaceId));
}
drop.onChange(async (value) => {
this.plugin.settings.searchSpaceId = value ? Number(value) : null;
this.plugin.settings.connectorId = null;
await this.plugin.saveSettings();
if (this.plugin.settings.searchSpaceId !== null) {
try {
await this.plugin.engine.ensureConnected();
await this.plugin.engine.maybeReconcile(true);
new Notice("Surfsense: vault connected.");
this.display();
} catch (err) {
this.handleApiError(err);
}
}
});
})
.addExtraButton((btn) =>
btn
.setIcon("refresh-ccw")
.setTooltip("Reload search spaces")
.onClick(async () => {
await this.refreshSearchSpaces();
this.display();
}),
);
new Setting(containerEl).setName("Vault").setHeading();
new Setting(containerEl)
.setName("Sync interval")
.setDesc(
"How often to check for changes made outside Obsidian. Set to off to only sync manually.",
)
.addDropdown((drop) => {
const options: Array<[number, string]> = [
[0, "Off"],
[5, "5 minutes"],
[10, "10 minutes"],
[15, "15 minutes"],
[30, "30 minutes"],
[60, "60 minutes"],
[120, "2 hours"],
[360, "6 hours"],
[720, "12 hours"],
[1440, "24 hours"],
];
for (const [value, label] of options) {
drop.addOption(String(value), label);
}
drop.setValue(String(settings.syncIntervalMinutes));
drop.onChange(async (value) => {
this.plugin.settings.syncIntervalMinutes = Number(value);
await this.plugin.saveSettings();
this.plugin.restartReconcileTimer();
});
});
this.renderFolderList(
containerEl,
"Include folders",
"Folders to sync (leave empty to sync entire vault).",
settings.includeFolders,
(next) => {
this.plugin.settings.includeFolders = next;
},
);
this.renderFolderList(
containerEl,
"Exclude folders",
"Folders to exclude from sync (takes precedence over includes).",
settings.excludeFolders,
(next) => {
this.plugin.settings.excludeFolders = next;
},
);
new Setting(containerEl)
.setName("Advanced exclude patterns")
.setDesc(
"Glob fallback for power users. One pattern per line, supports * and **. Lines starting with # are comments. Applied on top of the folder lists above.",
)
.addTextArea((area) => {
area.inputEl.rows = 4;
area
.setPlaceholder(".trash\n_attachments\ntemplates/**")
.setValue(settings.excludePatterns.join("\n"))
.onChange(async (value) => {
this.plugin.settings.excludePatterns = parseExcludePatterns(value);
await this.plugin.saveSettings();
});
});
new Setting(containerEl)
.setName("Include attachments")
.setDesc(
"Also sync non-Markdown files such as images and PDFs.",
)
.addToggle((toggle) =>
toggle
.setValue(settings.includeAttachments)
.onChange(async (value) => {
this.plugin.settings.includeAttachments = value;
await this.plugin.saveSettings();
}),
);
if (Platform.isMobileApp) {
new Setting(containerEl)
.setName("Sync only on WiFi")
.setDesc(
"Pause automatic syncing on cellular. Note: only Android can detect network type — on iOS this toggle has no effect.",
)
.addToggle((toggle) =>
toggle
.setValue(settings.wifiOnly)
.onChange(async (value) => {
this.plugin.settings.wifiOnly = value;
await this.plugin.saveSettings();
}),
);
}
new Setting(containerEl)
.setName("Force sync")
.setDesc("Manually re-index the entire vault now.")
.addButton((btn) =>
btn.setButtonText("Update").onClick(async () => {
btn.setDisabled(true);
try {
await this.plugin.engine.maybeReconcile(true);
new Notice("Surfsense: re-sync requested.");
} catch (err) {
this.handleApiError(err);
} finally {
btn.setDisabled(false);
}
}),
);
new Setting(containerEl)
.addButton((btn) =>
btn
.setButtonText("View sync status")
.setCta()
.onClick(() => this.plugin.openStatusModal()),
)
.addButton((btn) =>
btn.setButtonText("Open releases").onClick(() => {
window.open(
"https://github.com/MODSetter/SurfSense/releases?q=obsidian",
"_blank",
);
}),
);
}
private renderConnectionHeading(containerEl: HTMLElement): void {
const heading = new Setting(containerEl).setName("Connection").setHeading();
heading.nameEl.addClass("surfsense-connection-heading");
const indicator = heading.nameEl.createSpan({
cls: "surfsense-connection-indicator",
});
const visual = this.getConnectionVisual();
indicator.addClass(`surfsense-connection-indicator--${visual.tone}`);
setIcon(indicator, visual.icon);
indicator.setAttr("aria-label", visual.label);
indicator.setAttr("title", visual.label);
}
private getConnectionVisual(): {
icon: string;
label: string;
tone: "ok" | "syncing" | "warn" | "err" | "muted";
} {
const settings = this.plugin.settings;
const kind = this.plugin.lastStatus.kind;
if (kind === "auth-error") {
return { icon: "lock", label: "Token invalid or expired", tone: "err" };
}
if (kind === "error") {
return { icon: "alert-circle", label: "Connection error", tone: "err" };
}
if (kind === "offline") {
return { icon: "wifi-off", label: "Server unreachable", tone: "warn" };
}
if (!settings.apiToken) {
return { icon: "circle", label: "Missing API token", tone: "muted" };
}
if (!settings.searchSpaceId) {
return { icon: "circle", label: "Pick a search space", tone: "muted" };
}
if (!settings.connectorId) {
return { icon: "circle", label: "Not connected yet", tone: "muted" };
}
if (kind === "syncing" || kind === "queued") {
return { icon: "refresh-ccw", label: "Connected and syncing", tone: "syncing" };
}
return { icon: "check-circle", label: "Connected", tone: "ok" };
}
private async refreshSearchSpaces(): Promise<void> {
this.loadingSpaces = true;
try {
this.searchSpaces = await this.plugin.api.listSearchSpaces();
} catch (err) {
this.handleApiError(err);
this.searchSpaces = [];
} finally {
this.loadingSpaces = false;
}
}
private renderFolderList(
containerEl: HTMLElement,
title: string,
desc: string,
current: string[],
write: (next: string[]) => void,
): void {
const setting = new Setting(containerEl).setName(title).setDesc(desc);
const persist = async (next: string[]): Promise<void> => {
const dedup = Array.from(new Set(next.map(normalizeFolder)));
write(dedup);
await this.plugin.saveSettings();
this.display();
};
setting.addButton((btn) =>
btn
.setButtonText("Add folder")
.setCta()
.onClick(() => {
new FolderSuggestModal(
this.app,
(picked) => {
void persist([...current, picked]);
},
current,
).open();
}),
);
for (const folder of current) {
new Setting(containerEl).setName(folder || "/").addExtraButton((btn) =>
btn
.setIcon("cross")
.setTooltip("Remove")
.onClick(() => {
void persist(current.filter((f) => f !== folder));
}),
);
}
}
private handleApiError(err: unknown): void {
if (err instanceof AuthError) {
new Notice(`SurfSense: ${err.message}`);
return;
}
new Notice(
`SurfSense: request failed — ${(err as Error).message ?? "unknown error"}`,
);
}
}

View file

@ -0,0 +1,65 @@
import { setIcon } from "obsidian";
import type { StatusKind, StatusState } from "./types";
/**
* Tiny status-bar adornment.
*
* Plain DOM (no HTML strings, no CSS-in-JS) so it stays cheap on mobile
* and Obsidian's lint doesn't complain about innerHTML.
*/
interface StatusVisual {
icon: string;
label: string;
cls: string;
}
const VISUALS: Record<StatusKind, StatusVisual> = {
idle: { icon: "check-circle", label: "Synced", cls: "surfsense-status--ok" },
syncing: { icon: "refresh-ccw", label: "Syncing", cls: "surfsense-status--syncing" },
queued: { icon: "upload", label: "Queued", cls: "surfsense-status--syncing" },
offline: { icon: "wifi-off", label: "Offline", cls: "surfsense-status--warn" },
"auth-error": { icon: "lock", label: "Auth error", cls: "surfsense-status--err" },
error: { icon: "alert-circle", label: "Error", cls: "surfsense-status--err" },
};
export class StatusBar {
private readonly el: HTMLElement;
private readonly icon: HTMLElement;
private readonly text: HTMLElement;
constructor(host: HTMLElement, onClick?: () => void) {
this.el = host;
this.el.addClass("surfsense-status");
this.icon = this.el.createSpan({ cls: "surfsense-status__icon" });
this.text = this.el.createSpan({ cls: "surfsense-status__text" });
if (onClick) {
this.el.addClass("surfsense-status--clickable");
this.el.addEventListener("click", onClick);
}
this.update({ kind: "idle", queueDepth: 0 });
}
update(state: StatusState): void {
const visual = VISUALS[state.kind];
this.el.removeClass(
"surfsense-status--ok",
"surfsense-status--syncing",
"surfsense-status--warn",
"surfsense-status--err",
);
this.el.addClass(visual.cls);
setIcon(this.icon, visual.icon);
let label = `SurfSense: ${visual.label}`;
if (state.queueDepth > 0 && state.kind !== "idle") {
label += ` (${state.queueDepth})`;
}
this.text.setText(label);
this.el.setAttr(
"aria-label",
state.detail ? `${label}${state.detail}` : label,
);
this.el.setAttr("title", state.detail ?? label);
}
}

View file

@ -0,0 +1,76 @@
import { type App, Modal, Notice, Setting } from "obsidian";
import type SurfSensePlugin from "./main";
/** Live status panel reachable from the status bar / command palette. */
export class StatusModal extends Modal {
private readonly plugin: SurfSensePlugin;
private readonly onChange = (): void => this.render();
constructor(app: App, plugin: SurfSensePlugin) {
super(app);
this.plugin = plugin;
}
onOpen(): void {
this.setTitle("Surfsense status");
this.plugin.onStatusChange(this.onChange);
this.render();
}
onClose(): void {
this.plugin.offStatusChange(this.onChange);
this.contentEl.empty();
}
private render(): void {
const { contentEl, plugin } = this;
contentEl.empty();
const s = plugin.settings;
const rows: Array<[string, string]> = [
["Status", plugin.lastStatus.kind],
[
"Last sync",
s.lastSyncAt ? new Date(s.lastSyncAt).toLocaleString() : "—",
],
[
"Last reconcile",
s.lastReconcileAt
? new Date(s.lastReconcileAt).toLocaleString()
: "—",
],
["Files synced", String(s.filesSynced ?? 0)],
["Queue depth", String(plugin.queueDepth)],
[
"Capabilities",
plugin.serverCapabilities.length
? plugin.serverCapabilities.join(", ")
: "(not yet handshaken)",
],
];
for (const [label, value] of rows) {
new Setting(contentEl).setName(label).setDesc(value);
}
new Setting(contentEl)
.addButton((btn) =>
btn
.setButtonText("Re-sync entire vault")
.setCta()
.onClick(async () => {
btn.setDisabled(true);
try {
await plugin.engine.maybeReconcile(true);
new Notice("Surfsense: re-sync requested.");
} catch (err) {
new Notice(
`Surfsense: re-sync failed — ${(err as Error).message}`,
);
} finally {
btn.setDisabled(false);
}
}),
)
.addButton((btn) => btn.setButtonText("Close").onClick(() => this.close()));
}
}

View file

@ -0,0 +1,610 @@
import {
type App,
type CachedMetadata,
type Debouncer,
Notice,
type TAbstractFile,
TFile,
debounce,
} from "obsidian";
import {
AuthError,
PermanentError,
type SurfSenseApiClient,
TransientError,
VaultNotRegisteredError,
} from "./api-client";
import { isExcluded, isFolderFiltered } from "./excludes";
import { buildNotePayload } from "./payload";
import { type BatchResult, PersistentQueue } from "./queue";
import type {
HealthResponse,
ManifestEntry,
NotePayload,
QueueItem,
StatusKind,
StatusState,
} from "./types";
import { computeVaultFingerprint } from "./vault-identity";
/**
* Reconciles vault state with the server.
* Start order: connect (or /health) drain queue reconcile subscribe events.
*/
export interface SyncEngineDeps {
app: App;
apiClient: SurfSenseApiClient;
queue: PersistentQueue;
getSettings: () => SyncEngineSettings;
saveSettings: (mut: (s: SyncEngineSettings) => void) => Promise<void>;
setStatus: (s: StatusState) => void;
onCapabilities: (caps: string[]) => void;
/** Fired when the adaptive backoff multiplier may have changed; main.ts uses it to reschedule. */
onReconcileBackoffChanged?: () => void;
}
export interface SyncEngineSettings {
vaultId: string;
connectorId: number | null;
searchSpaceId: number | null;
includeFolders: string[];
excludeFolders: string[];
excludePatterns: string[];
includeAttachments: boolean;
lastReconcileAt: number | null;
lastSyncAt: number | null;
filesSynced: number;
tombstones: Record<string, number>;
}
export const RECONCILE_MIN_INTERVAL_MS = 5 * 60 * 1000;
const TOMBSTONE_TTL_MS = 24 * 60 * 60 * 1000; // 1 day
const PENDING_DEBOUNCE_MS = 1500;
export class SyncEngine {
private readonly deps: SyncEngineDeps;
private capabilities: string[] = [];
private pendingMdEdits = new Map<string, Debouncer<[], void>>();
/** Consecutive reconciles that found no work; powers the adaptive interval. */
private idleReconcileStreak = 0;
/** 2^streak is capped at this value (e.g. 8 → max ×8 backoff). */
private readonly maxBackoffMultiplier = 8;
constructor(deps: SyncEngineDeps) {
this.deps = deps;
}
/** Returns the next-tick interval given the user's base, scaled by the idle streak. */
getReconcileBackoffMs(baseMs: number): number {
const multiplier = Math.min(2 ** this.idleReconcileStreak, this.maxBackoffMultiplier);
return baseMs * multiplier;
}
getCapabilities(): readonly string[] {
return this.capabilities;
}
supports(capability: string): boolean {
return this.capabilities.includes(capability);
}
/** Run the onload sequence described in this file's docstring. */
async start(): Promise<void> {
this.setStatus("syncing", "Connecting to SurfSense…");
const settings = this.deps.getSettings();
if (!settings.searchSpaceId) {
// No target yet — /health still surfaces auth/network errors.
try {
const health = await this.deps.apiClient.health();
this.applyHealth(health);
} catch (err) {
this.handleStartupError(err);
return;
}
this.setStatus("idle", "Pick a search space in settings to start syncing.");
return;
}
// Re-announce so the backend sees the latest vault_name + last_connect_at.
// flushQueue gates on connectorId, so a failed connect leaves the queue intact.
await this.ensureConnected();
await this.flushQueue();
await this.maybeReconcile();
this.setStatus(this.queueStatusKind(), undefined);
}
/**
* (Re)register the vault. Adopts server's `vault_id` in case fingerprint
* dedup routed us to an existing row from another device.
*/
async ensureConnected(): Promise<void> {
const settings = this.deps.getSettings();
if (!settings.searchSpaceId) {
this.setStatus("idle", "Pick a search space in settings.");
return;
}
this.setStatus("syncing", "Connecting to SurfSense");
try {
const fingerprint = await computeVaultFingerprint(this.deps.app);
const resp = await this.deps.apiClient.connect({
searchSpaceId: settings.searchSpaceId,
vaultId: settings.vaultId,
vaultName: this.deps.app.vault.getName(),
vaultFingerprint: fingerprint,
});
this.applyHealth(resp);
await this.deps.saveSettings((s) => {
s.vaultId = resp.vault_id;
s.connectorId = resp.connector_id;
});
this.setStatus(this.queueStatusKind(), this.statusDetail());
} catch (err) {
this.handleStartupError(err);
}
}
applyHealth(h: HealthResponse): void {
this.capabilities = Array.isArray(h.capabilities) ? [...h.capabilities] : [];
this.deps.onCapabilities(this.capabilities);
}
// ---- vault event handlers --------------------------------------------
onCreate(file: TAbstractFile): void {
if (!this.shouldTrack(file)) return;
const settings = this.deps.getSettings();
if (this.isExcluded(file.path, settings)) return;
this.resetIdleStreak();
if (this.isMarkdown(file)) {
this.scheduleMdUpsert(file.path);
return;
}
this.deps.queue.enqueueUpsert(file.path);
}
onModify(file: TAbstractFile): void {
if (!this.shouldTrack(file)) return;
const settings = this.deps.getSettings();
if (this.isExcluded(file.path, settings)) return;
this.resetIdleStreak();
if (this.isMarkdown(file)) {
// Wait for metadataCache.changed so the payload sees fresh metadata.
this.scheduleMdUpsert(file.path);
return;
}
this.deps.queue.enqueueUpsert(file.path);
}
onDelete(file: TAbstractFile): void {
if (!this.shouldTrack(file)) return;
this.resetIdleStreak();
this.deps.queue.enqueueDelete(file.path);
void this.deps.saveSettings((s) => {
s.tombstones[file.path] = Date.now();
});
}
onRename(file: TAbstractFile, oldPath: string): void {
if (!this.shouldTrack(file)) return;
this.resetIdleStreak();
const settings = this.deps.getSettings();
if (this.isExcluded(file.path, settings)) {
this.deps.queue.enqueueDelete(oldPath);
void this.deps.saveSettings((s) => {
s.tombstones[oldPath] = Date.now();
});
return;
}
this.deps.queue.enqueueRename(oldPath, file.path);
}
onMetadataChanged(file: TFile, _data: string, _cache: CachedMetadata): void {
if (!this.shouldTrack(file)) return;
const settings = this.deps.getSettings();
if (this.isExcluded(file.path, settings)) return;
if (!this.isMarkdown(file)) return;
// Metadata is fresh now — cancel the deferred upsert and enqueue immediately.
const pending = this.pendingMdEdits.get(file.path);
if (pending) {
pending.cancel();
this.pendingMdEdits.delete(file.path);
}
this.deps.queue.enqueueUpsert(file.path);
}
private scheduleMdUpsert(path: string): void {
let pending = this.pendingMdEdits.get(path);
if (!pending) {
// resetTimer: true → each edit pushes the upsert out by another PENDING_DEBOUNCE_MS.
pending = debounce(
() => {
this.pendingMdEdits.delete(path);
this.deps.queue.enqueueUpsert(path);
},
PENDING_DEBOUNCE_MS,
true,
);
this.pendingMdEdits.set(path, pending);
}
pending();
}
// ---- queue draining ---------------------------------------------------
async flushQueue(): Promise<void> {
if (this.deps.queue.size === 0) return;
// Shared gate for every flush trigger so the first /sync can't race /connect.
if (!this.deps.getSettings().connectorId) {
await this.ensureConnected();
if (!this.deps.getSettings().connectorId) return;
}
this.setStatus("syncing", `Syncing ${this.deps.queue.size} item(s)…`);
const summary = await this.deps.queue.drain({
processBatch: (batch) => this.processBatch(batch),
});
if (summary.acked > 0) {
await this.deps.saveSettings((s) => {
s.lastSyncAt = Date.now();
s.filesSynced = (s.filesSynced ?? 0) + summary.acked;
});
}
this.setStatus(this.queueStatusKind(), this.statusDetail());
}
private async processBatch(batch: QueueItem[]): Promise<BatchResult> {
const settings = this.deps.getSettings();
const upserts = batch.filter((b): b is QueueItem & { op: "upsert" } => b.op === "upsert");
const renames = batch.filter((b): b is QueueItem & { op: "rename" } => b.op === "rename");
const deletes = batch.filter((b): b is QueueItem & { op: "delete" } => b.op === "delete");
const acked: QueueItem[] = [];
const retry: QueueItem[] = [];
const dropped: QueueItem[] = [];
// Renames first so paths line up before content upserts.
if (renames.length > 0) {
try {
const resp = await this.deps.apiClient.renameBatch({
vaultId: settings.vaultId,
renames: renames.map((r) => ({ oldPath: r.oldPath, newPath: r.newPath })),
});
const failed = new Set(
resp.failed.map((f) => `${f.oldPath}\u0000${f.newPath}`),
);
for (const r of renames) {
if (failed.has(`${r.oldPath}\u0000${r.newPath}`)) retry.push(r);
else acked.push(r);
}
} catch (err) {
if (await this.handleVaultNotRegistered(err)) {
retry.push(...renames);
} else {
const verdict = this.classify(err);
if (verdict === "stop") return { acked, retry: [...retry, ...renames], dropped, stop: true };
if (verdict === "retry") retry.push(...renames);
else dropped.push(...renames);
}
}
}
if (deletes.length > 0) {
try {
const resp = await this.deps.apiClient.deleteBatch({
vaultId: settings.vaultId,
paths: deletes.map((d) => d.path),
});
const failed = new Set(resp.failed);
for (const d of deletes) {
if (failed.has(d.path)) retry.push(d);
else acked.push(d);
}
} catch (err) {
if (await this.handleVaultNotRegistered(err)) {
retry.push(...deletes);
} else {
const verdict = this.classify(err);
if (verdict === "stop") return { acked, retry: [...retry, ...deletes], dropped, stop: true };
if (verdict === "retry") retry.push(...deletes);
else dropped.push(...deletes);
}
}
}
if (upserts.length > 0) {
const payloads: NotePayload[] = [];
for (const item of upserts) {
const file = this.deps.app.vault.getFileByPath(item.path);
if (!file) {
// Vanished — ack now; the delete event will follow if needed.
acked.push(item);
continue;
}
try {
const payload = this.isMarkdown(file)
? await buildNotePayload(this.deps.app, file, settings.vaultId)
: await this.buildBinaryPayload(file, settings.vaultId);
payloads.push(payload);
} catch (err) {
console.error("SurfSense: failed to build payload", item.path, err);
retry.push(item);
}
}
if (payloads.length > 0) {
try {
const resp = await this.deps.apiClient.syncBatch({
vaultId: settings.vaultId,
notes: payloads,
});
// Per-note failures retry; queue maxAttempts drops poison pills.
const failed = new Set(resp.failed);
for (const item of upserts) {
if (retry.find((r) => r === item)) continue;
if (failed.has(item.path)) retry.push(item);
else acked.push(item);
}
} catch (err) {
if (await this.handleVaultNotRegistered(err)) {
for (const item of upserts) {
if (retry.find((r) => r === item)) continue;
retry.push(item);
}
} else {
const verdict = this.classify(err);
if (verdict === "stop")
return { acked, retry: [...retry, ...upserts], dropped, stop: true };
if (verdict === "retry") retry.push(...upserts);
else dropped.push(...upserts);
}
}
}
}
return { acked, retry, dropped, stop: false };
}
private async buildBinaryPayload(file: TFile, vaultId: string): Promise<NotePayload> {
// Attachments skip buildNotePayload (no markdown metadata) but still
// need hash + stat so the server can de-dupe and manifest diff works.
const buf = await this.deps.app.vault.readBinary(file);
const digest = await crypto.subtle.digest("SHA-256", buf);
const hash = bufferToHex(digest);
return {
vault_id: vaultId,
path: file.path,
name: file.basename,
extension: file.extension,
content: "",
frontmatter: {},
tags: [],
headings: [],
resolved_links: [],
unresolved_links: [],
embeds: [],
aliases: [],
content_hash: hash,
size: file.stat.size,
mtime: file.stat.mtime,
ctime: file.stat.ctime,
is_binary: true,
};
}
// ---- reconcile --------------------------------------------------------
async maybeReconcile(force = false): Promise<void> {
const settings = this.deps.getSettings();
if (!settings.connectorId) return;
if (!force && settings.lastReconcileAt) {
if (Date.now() - settings.lastReconcileAt < RECONCILE_MIN_INTERVAL_MS) return;
}
// Re-handshake first: if the vault grew enough to match another
// device's fingerprint, the server merges and routes us to the
// survivor row, which the /manifest call below then uses.
await this.ensureConnected();
const refreshed = this.deps.getSettings();
if (!refreshed.connectorId) return;
this.setStatus("syncing", "Reconciling vault with server…");
try {
const manifest = await this.deps.apiClient.getManifest(refreshed.vaultId);
const remote = manifest.items ?? {};
const enqueued = this.diffAndQueue(refreshed, remote);
await this.deps.saveSettings((s) => {
s.lastReconcileAt = Date.now();
s.tombstones = pruneTombstones(s.tombstones);
});
this.updateIdleStreak(enqueued);
await this.flushQueue();
} catch (err) {
this.classifyAndStatus(err, "Reconcile failed");
}
}
/**
* Diff local vault vs server manifest and enqueue work. Skips disk reads
* on idle reconciles by short-circuiting on `mtime + size`; false positives
* collapse to a no-op upsert via the server's `content_hash` check.
* Returns the enqueued count to drive adaptive backoff.
*/
private diffAndQueue(
settings: SyncEngineSettings,
remote: Record<string, ManifestEntry>,
): number {
const localFiles = this.deps.app.vault.getFiles().filter((f) => {
if (!this.shouldTrack(f)) return false;
if (this.isExcluded(f.path, settings)) return false;
return true;
});
const localPaths = new Set(localFiles.map((f) => f.path));
let enqueued = 0;
for (const file of localFiles) {
const remoteEntry = remote[file.path];
if (!remoteEntry) {
this.deps.queue.enqueueUpsert(file.path);
enqueued++;
continue;
}
const remoteMtimeMs = toMillis(remoteEntry.mtime);
const mtimeMatches = file.stat.mtime <= remoteMtimeMs + 1000;
// Older server rows lack `size` — treat as unknown and re-upsert.
const sizeMatches =
typeof remoteEntry.size === "number" && file.stat.size === remoteEntry.size;
if (mtimeMatches && sizeMatches) continue;
this.deps.queue.enqueueUpsert(file.path);
enqueued++;
}
// Remote-only → delete, unless a fresh tombstone is already in the queue.
for (const path of Object.keys(remote)) {
if (localPaths.has(path)) continue;
const tombstone = settings.tombstones[path];
if (tombstone && Date.now() - tombstone < TOMBSTONE_TTL_MS) continue;
this.deps.queue.enqueueDelete(path);
enqueued++;
}
return enqueued;
}
/** Bump (idle) or reset (active) the streak; notify only when the capped multiplier changes. */
private updateIdleStreak(enqueued: number): void {
const previousStreak = this.idleReconcileStreak;
if (enqueued === 0) this.idleReconcileStreak++;
else this.idleReconcileStreak = 0;
const cap = Math.log2(this.maxBackoffMultiplier);
const cappedPrev = Math.min(previousStreak, cap);
const cappedNow = Math.min(this.idleReconcileStreak, cap);
if (cappedPrev !== cappedNow) this.deps.onReconcileBackoffChanged?.();
}
/** Vault edit — drop back to base interval immediately. */
private resetIdleStreak(): void {
if (this.idleReconcileStreak === 0) return;
this.idleReconcileStreak = 0;
this.deps.onReconcileBackoffChanged?.();
}
// ---- status helpers ---------------------------------------------------
private setStatus(kind: StatusKind, detail?: string): void {
this.deps.setStatus({ kind, detail, queueDepth: this.deps.queue.size });
}
private queueStatusKind(): StatusKind {
if (this.deps.queue.size > 0) return "queued";
return "idle";
}
private statusDetail(): string | undefined {
const settings = this.deps.getSettings();
if (settings.lastSyncAt) {
return `Last sync ${formatRelative(settings.lastSyncAt)}`;
}
return undefined;
}
private handleStartupError(err: unknown): void {
if (err instanceof AuthError) {
this.setStatus("auth-error", err.message);
return;
}
if (err instanceof TransientError) {
this.setStatus("offline", err.message);
return;
}
this.setStatus("error", (err as Error).message ?? "Unknown error");
}
/** Re-connect on VAULT_NOT_REGISTERED so the next drain sees the new row. */
private async handleVaultNotRegistered(err: unknown): Promise<boolean> {
if (!(err instanceof VaultNotRegisteredError)) return false;
console.warn("SurfSense: vault not registered, re-connecting before retry", err);
await this.ensureConnected();
return true;
}
private classify(err: unknown): "ack" | "retry" | "drop" | "stop" {
if (err instanceof AuthError) {
this.setStatus("auth-error", err.message);
return "stop";
}
if (err instanceof TransientError) {
this.setStatus("offline", err.message);
return "stop";
}
if (err instanceof PermanentError) {
console.warn("SurfSense: permanent error, dropping batch", err);
new Notice(`Surfsense: ${err.message}`);
return "drop";
}
console.error("SurfSense: unknown error", err);
return "retry";
}
private classifyAndStatus(err: unknown, prefix: string): void {
this.classify(err);
this.setStatus(this.queueStatusKind(), `${prefix}: ${(err as Error).message}`);
}
// ---- predicates -------------------------------------------------------
private shouldTrack(file: TAbstractFile): boolean {
if (!isTFile(file)) return false;
const settings = this.deps.getSettings();
if (!settings.includeAttachments && !this.isMarkdown(file)) return false;
return true;
}
private isExcluded(path: string, settings: SyncEngineSettings): boolean {
if (isFolderFiltered(path, settings.includeFolders, settings.excludeFolders)) {
return true;
}
return isExcluded(path, settings.excludePatterns);
}
private isMarkdown(file: TAbstractFile): boolean {
return isTFile(file) && file.extension.toLowerCase() === "md";
}
}
function isTFile(f: TAbstractFile): f is TFile {
return f instanceof TFile;
}
function bufferToHex(buf: ArrayBuffer): string {
const view = new Uint8Array(buf);
let hex = "";
for (let i = 0; i < view.length; i++) hex += (view[i] ?? 0).toString(16).padStart(2, "0");
return hex;
}
function formatRelative(ts: number): string {
const diff = Date.now() - ts;
if (diff < 60_000) return "just now";
if (diff < 3600_000) return `${Math.round(diff / 60_000)}m ago`;
if (diff < 86_400_000) return `${Math.round(diff / 3600_000)}h ago`;
return `${Math.round(diff / 86_400_000)}d ago`;
}
/** Manifest mtimes arrive as ISO strings, vault stats as epoch ms — normalise. */
function toMillis(value: number | string | Date): number {
if (typeof value === "number") return value;
if (value instanceof Date) return value.getTime();
const parsed = Date.parse(value);
return Number.isFinite(parsed) ? parsed : 0;
}
function pruneTombstones(tombstones: Record<string, number>): Record<string, number> {
const out: Record<string, number> = {};
const cutoff = Date.now() - TOMBSTONE_TTL_MS;
for (const [k, v] of Object.entries(tombstones)) {
if (v >= cutoff) out[k] = v;
}
return out;
}

View file

@ -0,0 +1,187 @@
/** Shared types for the SurfSense Obsidian plugin. Leaf module — no src/ imports. */
export interface SurfsensePluginSettings {
serverUrl: string;
apiToken: string;
searchSpaceId: number | null;
connectorId: number | null;
/** UUID for the vault — lives here so Obsidian Sync replicates it across devices. */
vaultId: string;
/** 0 disables periodic reconcile (Force sync still works). */
syncIntervalMinutes: number;
/** Mobile-only: pause auto-sync when on cellular. iOS can't detect network type, so the toggle is a no-op there. */
wifiOnly: boolean;
includeFolders: string[];
excludeFolders: string[];
excludePatterns: string[];
includeAttachments: boolean;
lastSyncAt: number | null;
lastReconcileAt: number | null;
filesSynced: number;
queue: QueueItem[];
tombstones: Record<string, number>;
}
export const DEFAULT_SETTINGS: SurfsensePluginSettings = {
serverUrl: "https://surfsense.com",
apiToken: "",
searchSpaceId: null,
connectorId: null,
vaultId: "",
syncIntervalMinutes: 10,
wifiOnly: false,
includeFolders: [],
excludeFolders: [],
excludePatterns: [".trash", "_attachments", "templates"],
includeAttachments: false,
lastSyncAt: null,
lastReconcileAt: null,
filesSynced: 0,
queue: [],
tombstones: {},
};
export type QueueOp = "upsert" | "delete" | "rename";
export interface UpsertItem {
op: "upsert";
path: string;
enqueuedAt: number;
attempt: number;
}
export interface DeleteItem {
op: "delete";
path: string;
enqueuedAt: number;
attempt: number;
}
export interface RenameItem {
op: "rename";
oldPath: string;
newPath: string;
enqueuedAt: number;
attempt: number;
}
export type QueueItem = UpsertItem | DeleteItem | RenameItem;
export interface NotePayload {
vault_id: string;
path: string;
name: string;
extension: string;
content: string;
frontmatter: Record<string, unknown>;
tags: string[];
headings: HeadingRef[];
resolved_links: string[];
unresolved_links: string[];
embeds: string[];
aliases: string[];
content_hash: string;
/** Byte size of the local file; pairs with mtime for the reconcile short-circuit. */
size: number;
mtime: number;
ctime: number;
[key: string]: unknown;
}
export interface HeadingRef {
heading: string;
level: number;
}
export interface SearchSpace {
id: number;
name: string;
description?: string;
[key: string]: unknown;
}
export interface ConnectResponse {
connector_id: number;
vault_id: string;
search_space_id: number;
capabilities: string[];
server_time_utc: string;
[key: string]: unknown;
}
export interface HealthResponse {
capabilities: string[];
server_time_utc: string;
[key: string]: unknown;
}
export interface ManifestEntry {
hash: string;
mtime: number;
/** Optional: byte size of stored content. Enables mtime+size short-circuit; falls back to upsert when missing. */
size?: number;
[key: string]: unknown;
}
export interface ManifestResponse {
vault_id: string;
items: Record<string, ManifestEntry>;
[key: string]: unknown;
}
/** Per-item ack shapes — mirror `app/schemas/obsidian_plugin.py` 1:1. */
export interface SyncAckItem {
path: string;
status: "ok" | "error";
document_id?: number;
error?: string;
}
export interface SyncAck {
vault_id: string;
indexed: number;
failed: number;
items: SyncAckItem[];
}
export interface RenameAckItem {
old_path: string;
new_path: string;
status: "ok" | "error" | "missing";
document_id?: number;
error?: string;
}
export interface RenameAck {
vault_id: string;
renamed: number;
missing: number;
items: RenameAckItem[];
}
export interface DeleteAckItem {
path: string;
status: "ok" | "error" | "missing";
error?: string;
}
export interface DeleteAck {
vault_id: string;
deleted: number;
missing: number;
items: DeleteAckItem[];
}
export type StatusKind =
| "idle"
| "syncing"
| "queued"
| "offline"
| "auth-error"
| "error";
export interface StatusState {
kind: StatusKind;
detail?: string;
queueDepth: number;
}

View file

@ -0,0 +1,43 @@
import type { App } from "obsidian";
/**
* Deterministic SHA-256 over the vault name + sorted markdown paths.
*
* Two devices observing the same vault content compute the same value,
* regardless of how it was synced (iCloud, Syncthing, Obsidian Sync, ).
* The server uses this as the cross-device dedup key on /connect.
*/
export async function computeVaultFingerprint(app: App): Promise<string> {
const vaultName = app.vault.getName();
const paths = app.vault
.getMarkdownFiles()
.map((f) => f.path)
.sort();
const payload = `${vaultName}\n${paths.join("\n")}`;
const bytes = new TextEncoder().encode(payload);
const digest = await crypto.subtle.digest("SHA-256", bytes);
return bufferToHex(digest);
}
function bufferToHex(buf: ArrayBuffer): string {
const view = new Uint8Array(buf);
let hex = "";
for (let i = 0; i < view.length; i++) {
hex += (view[i] ?? 0).toString(16).padStart(2, "0");
}
return hex;
}
export function generateVaultUuid(): string {
const c = globalThis.crypto;
if (c?.randomUUID) return c.randomUUID();
const buf = new Uint8Array(16);
c.getRandomValues(buf);
buf[6] = ((buf[6] ?? 0) & 0x0f) | 0x40;
buf[8] = ((buf[8] ?? 0) & 0x3f) | 0x80;
const hex = Array.from(buf, (b) => b.toString(16).padStart(2, "0")).join("");
return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(
16,
20,
)}-${hex.slice(20)}`;
}

View file

@ -0,0 +1,76 @@
/*
* SurfSense Obsidian plugin styles. Status-bar widget only the settings
* tab uses Obsidian's stock Setting rows, no custom CSS needed.
*/
.surfsense-status {
gap: 6px;
}
.surfsense-status--clickable {
cursor: pointer;
}
.surfsense-status__icon {
display: inline-flex;
width: 14px;
height: 14px;
}
.surfsense-status__icon svg {
width: 14px;
height: 14px;
}
.surfsense-status--ok .surfsense-status__icon {
color: var(--color-green);
}
.surfsense-status--syncing .surfsense-status__icon {
color: var(--color-blue);
}
.surfsense-status--warn .surfsense-status__icon {
color: var(--color-yellow);
}
.surfsense-status--err .surfsense-status__icon {
color: var(--color-red);
}
.surfsense-connection-indicator {
display: inline-flex;
width: 14px;
height: 14px;
}
.surfsense-connection-heading {
display: inline-flex;
align-items: center;
gap: 8px;
}
.surfsense-connection-indicator svg {
width: 14px;
height: 14px;
}
.surfsense-connection-indicator--ok {
color: var(--color-green);
}
.surfsense-connection-indicator--syncing {
color: var(--color-blue);
}
.surfsense-connection-indicator--warn {
color: var(--color-yellow);
}
.surfsense-connection-indicator--err {
color: var(--color-red);
}
.surfsense-connection-indicator--muted {
color: var(--text-muted);
}

View file

@ -0,0 +1,30 @@
{
"compilerOptions": {
"baseUrl": "src",
"inlineSourceMap": true,
"inlineSources": true,
"module": "ESNext",
"target": "ES6",
"allowJs": true,
"noImplicitAny": true,
"noImplicitThis": true,
"noImplicitReturns": true,
"moduleResolution": "node",
"importHelpers": true,
"noUncheckedIndexedAccess": true,
"isolatedModules": true,
"strictNullChecks": true,
"strictBindCallApply": true,
"allowSyntheticDefaultImports": true,
"useUnknownInCatchVariables": true,
"lib": [
"DOM",
"ES5",
"ES6",
"ES7"
]
},
"include": [
"src/**/*.ts"
]
}

View file

@ -0,0 +1,17 @@
import { readFileSync, writeFileSync } from "fs";
const targetVersion = process.env.npm_package_version;
// read minAppVersion from manifest.json and bump version to target version
const manifest = JSON.parse(readFileSync("manifest.json", "utf8"));
const { minAppVersion } = manifest;
manifest.version = targetVersion;
writeFileSync("manifest.json", JSON.stringify(manifest, null, "\t"));
// update versions.json with target version and minAppVersion from manifest.json
// but only if the target version is not already in versions.json
const versions = JSON.parse(readFileSync('versions.json', 'utf8'));
if (!Object.values(versions).includes(minAppVersion)) {
versions[targetVersion] = minAppVersion;
writeFileSync('versions.json', JSON.stringify(versions, null, '\t'));
}

View file

@ -0,0 +1,3 @@
{
"0.1.1": "1.5.4"
}

View file

@ -1,4 +1,8 @@
NEXT_PUBLIC_FASTAPI_BACKEND_URL=http://localhost:8000
# Server-only. Internal backend URL used by Next.js server code.
FASTAPI_BACKEND_INTERNAL_URL=https://your-internal-backend.example.com
NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE=LOCAL or GOOGLE
NEXT_PUBLIC_ETL_SERVICE=UNSTRUCTURED or LLAMACLOUD or DOCLING
NEXT_PUBLIC_ZERO_CACHE_URL=http://localhost:4848

View file

@ -0,0 +1,70 @@
import type { NextRequest } from "next/server";
export const dynamic = "force-dynamic";
const HOP_BY_HOP_HEADERS = new Set([
"connection",
"keep-alive",
"proxy-authenticate",
"proxy-authorization",
"te",
"trailer",
"transfer-encoding",
"upgrade",
]);
function getBackendBaseUrl() {
const base = process.env.FASTAPI_BACKEND_INTERNAL_URL || "http://localhost:8000";
return base.endsWith("/") ? base.slice(0, -1) : base;
}
function toUpstreamHeaders(headers: Headers) {
const nextHeaders = new Headers(headers);
nextHeaders.delete("host");
nextHeaders.delete("content-length");
return nextHeaders;
}
function toClientHeaders(headers: Headers) {
const nextHeaders = new Headers(headers);
for (const header of HOP_BY_HOP_HEADERS) {
nextHeaders.delete(header);
}
return nextHeaders;
}
async function proxy(request: NextRequest, context: { params: Promise<{ path?: string[] }> }) {
const params = await context.params;
const path = params.path?.join("/") || "";
const upstreamUrl = new URL(`${getBackendBaseUrl()}/api/v1/${path}`);
upstreamUrl.search = request.nextUrl.search;
const hasBody = request.method !== "GET" && request.method !== "HEAD";
const response = await fetch(upstreamUrl, {
method: request.method,
headers: toUpstreamHeaders(request.headers),
body: hasBody ? request.body : undefined,
// `duplex: "half"` is required by the Fetch spec when streaming a
// ReadableStream as the request body. Avoids buffering uploads in heap.
// @ts-expect-error - `duplex` is not yet in lib.dom RequestInit types.
duplex: hasBody ? "half" : undefined,
redirect: "manual",
});
return new Response(response.body, {
status: response.status,
statusText: response.statusText,
headers: toClientHeaders(response.headers),
});
}
export {
proxy as GET,
proxy as POST,
proxy as PUT,
proxy as PATCH,
proxy as DELETE,
proxy as OPTIONS,
proxy as HEAD,
};

View file

@ -5,7 +5,10 @@ import type { Context } from "@/types/zero";
import { queries } from "@/zero/queries";
import { schema } from "@/zero/schema";
const backendURL = process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL || "http://localhost:8000";
const backendURL =
process.env.FASTAPI_BACKEND_INTERNAL_URL ||
process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL ||
"http://localhost:8000";
async function authenticateRequest(
request: Request

View file

@ -3,7 +3,7 @@
import { Check, Copy, Info } from "lucide-react";
import { useTranslations } from "next-intl";
import { useCallback, useRef, useState } from "react";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import { Alert, AlertDescription } from "@/components/ui/alert";
import { Button } from "@/components/ui/button";
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip";
import { useApiKey } from "@/hooks/use-api-key";

View file

@ -200,8 +200,8 @@ export function DesktopContent() {
Launch on Startup
</CardTitle>
<CardDescription className="text-xs md:text-sm">
Automatically start SurfSense when you sign in to your computer so global
shortcuts and folder sync are always available.
Automatically start SurfSense when you sign in to your computer so global shortcuts and
folder sync are always available.
</CardDescription>
</CardHeader>
<CardContent className="px-3 md:px-6 pb-3 md:pb-6 space-y-3">
@ -232,8 +232,7 @@ export function DesktopContent() {
Start minimized to tray
</Label>
<p className="text-xs text-muted-foreground">
Skip the main window on boot SurfSense lives in the system tray until you need
it.
Skip the main window on boot SurfSense lives in the system tray until you need it.
</p>
</div>
<Switch

View file

@ -126,9 +126,7 @@ export function PurchaseHistoryContent() {
return [
...pagePurchases.map(normalizePagePurchase),
...tokenPurchases.map(normalizeTokenPurchase),
].sort(
(a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime()
);
].sort((a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime());
}, [pagesQuery.data, tokensQuery.data]);
if (isLoading) {

View file

@ -1,311 +1,187 @@
"use client";
import { zodResolver } from "@hookform/resolvers/zod";
import { Info } from "lucide-react";
import type { FC } from "react";
import { useRef, useState } from "react";
import { useForm } from "react-hook-form";
import * as z from "zod";
import { Check, Copy, Info } from "lucide-react";
import { type FC, useCallback, useRef, useState } from "react";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import {
Form,
FormControl,
FormDescription,
FormField,
FormItem,
FormLabel,
FormMessage,
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { Switch } from "@/components/ui/switch";
import { Button } from "@/components/ui/button";
import { EnumConnectorName } from "@/contracts/enums/connector";
import { useApiKey } from "@/hooks/use-api-key";
import { copyToClipboard as copyToClipboardUtil } from "@/lib/utils";
import { getConnectorBenefits } from "../connector-benefits";
import type { ConnectFormProps } from "../index";
const obsidianConnectorFormSchema = z.object({
name: z.string().min(3, {
message: "Connector name must be at least 3 characters.",
}),
vault_path: z.string().min(1, {
message: "Vault path is required.",
}),
vault_name: z.string().min(1, {
message: "Vault name is required.",
}),
exclude_folders: z.string().optional(),
include_attachments: z.boolean(),
});
const PLUGIN_RELEASES_URL =
"https://github.com/MODSetter/SurfSense/releases?q=obsidian&expanded=true";
type ObsidianConnectorFormValues = z.infer<typeof obsidianConnectorFormSchema>;
const BACKEND_URL = process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL ?? "https://surfsense.com";
export const ObsidianConnectForm: FC<ConnectFormProps> = ({ onSubmit, isSubmitting }) => {
const isSubmittingRef = useRef(false);
const [periodicEnabled, setPeriodicEnabled] = useState(true);
const [frequencyMinutes, setFrequencyMinutes] = useState("60");
const form = useForm<ObsidianConnectorFormValues>({
resolver: zodResolver(obsidianConnectorFormSchema),
defaultValues: {
name: "Obsidian Vault",
vault_path: "",
vault_name: "",
exclude_folders: ".obsidian,.trash",
include_attachments: false,
},
});
/**
* Obsidian connect form for the plugin-only architecture.
*
* The legacy `vault_path` form was removed because it only worked on
* self-hosted with a server-side bind mount and broke for everyone else.
* The plugin pushes data over HTTPS so this UI is purely instructional
* there is no backend create call here. The connector row is created
* server-side the first time the plugin calls `POST /obsidian/connect`.
*
* The footer "Connect" button in `ConnectorConnectView` triggers this
* form's submit; we just close the dialog (`onBack()`) since there's
* nothing to validate or persist from this side.
*/
export const ObsidianConnectForm: FC<ConnectFormProps> = ({ onBack }) => {
const { apiKey, isLoading, copied, copyToClipboard } = useApiKey();
const [copiedUrl, setCopiedUrl] = useState(false);
const urlCopyTimerRef = useRef<ReturnType<typeof setTimeout> | undefined>(undefined);
const handleSubmit = async (values: ObsidianConnectorFormValues) => {
// Prevent multiple submissions
if (isSubmittingRef.current || isSubmitting) {
return;
}
const copyServerUrl = useCallback(async () => {
const ok = await copyToClipboardUtil(BACKEND_URL);
if (!ok) return;
setCopiedUrl(true);
if (urlCopyTimerRef.current) clearTimeout(urlCopyTimerRef.current);
urlCopyTimerRef.current = setTimeout(() => setCopiedUrl(false), 2000);
}, []);
isSubmittingRef.current = true;
try {
// Parse exclude_folders into an array
const excludeFolders = values.exclude_folders
? values.exclude_folders
.split(",")
.map((f) => f.trim())
.filter(Boolean)
: [".obsidian", ".trash"];
await onSubmit({
name: values.name,
connector_type: EnumConnectorName.OBSIDIAN_CONNECTOR,
config: {
vault_path: values.vault_path,
vault_name: values.vault_name,
exclude_folders: excludeFolders,
include_attachments: values.include_attachments,
},
is_indexable: true,
is_active: true,
last_indexed_at: null,
periodic_indexing_enabled: periodicEnabled,
indexing_frequency_minutes: periodicEnabled ? Number.parseInt(frequencyMinutes, 10) : null,
next_scheduled_at: null,
periodicEnabled,
frequencyMinutes,
});
} finally {
isSubmittingRef.current = false;
}
const handleSubmit = (event: React.FormEvent<HTMLFormElement>) => {
event.preventDefault();
onBack();
};
return (
<div className="space-y-6 pb-6">
<Alert className="bg-purple-500/10 dark:bg-purple-500/10 border-purple-500/30 p-2 sm:p-3">
{/* Form is intentionally empty so the footer Connect button is a no-op
that just closes the dialog (see component-level docstring). */}
<form id="obsidian-connect-form" onSubmit={handleSubmit} />
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 p-2 sm:p-3">
<Info className="size-4 shrink-0 text-purple-500" />
<AlertTitle className="text-xs sm:text-sm">Self-Hosted Only</AlertTitle>
<AlertTitle className="text-xs sm:text-sm">Plugin-based sync</AlertTitle>
<AlertDescription className="text-[10px] sm:text-xs">
This connector requires direct file system access and only works with self-hosted
SurfSense installations.
SurfSense now syncs Obsidian via an official plugin that runs inside Obsidian itself.
Works on desktop and mobile, in cloud and self-hosted deployments.
</AlertDescription>
</Alert>
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<Form {...form}>
<form
id="obsidian-connect-form"
onSubmit={form.handleSubmit(handleSubmit)}
className="space-y-4 sm:space-y-6"
>
<FormField
control={form.control}
name="name"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Connector Name</FormLabel>
<FormControl>
<Input
placeholder="My Obsidian Vault"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
A friendly name to identify this connector.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="vault_path"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Vault Path</FormLabel>
<FormControl>
<Input
placeholder="/path/to/your/obsidian/vault"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40 font-mono"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
The absolute path to your Obsidian vault on the server. This must be accessible
from the SurfSense backend.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="vault_name"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Vault Name</FormLabel>
<FormControl>
<Input
placeholder="My Knowledge Base"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
A display name for your vault. This will be used in search results.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="exclude_folders"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Exclude Folders</FormLabel>
<FormControl>
<Input
placeholder=".obsidian,.trash,templates"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40 font-mono"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
Comma-separated list of folder names to exclude from indexing.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="include_attachments"
render={({ field }) => (
<FormItem className="flex flex-row items-center justify-between rounded-lg border border-slate-400/20 p-3">
<div className="space-y-0.5">
<FormLabel className="text-xs sm:text-sm">Include Attachments</FormLabel>
<FormDescription className="text-[10px] sm:text-xs">
Index attachment folders and embedded files (images, PDFs, etc.)
</FormDescription>
</div>
<FormControl>
<Switch
checked={field.value}
onCheckedChange={field.onChange}
disabled={isSubmitting}
/>
</FormControl>
</FormItem>
)}
/>
{/* Indexing Configuration */}
<div className="space-y-4 pt-4 border-t border-slate-400/20">
<h3 className="text-sm sm:text-base font-medium">Indexing Configuration</h3>
{/* Periodic Sync Config */}
<div className="rounded-xl bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6">
<div className="flex items-center justify-between">
<div className="space-y-1">
<h3 className="font-medium text-sm sm:text-base">Enable Periodic Sync</h3>
<p className="text-xs sm:text-sm text-muted-foreground">
Automatically re-index at regular intervals
</p>
</div>
<Switch
checked={periodicEnabled}
onCheckedChange={setPeriodicEnabled}
disabled={isSubmitting}
/>
</div>
{periodicEnabled && (
<div className="mt-4 pt-4 border-t border-slate-400/20 space-y-3">
<div className="space-y-2">
<Label htmlFor="frequency" className="text-xs sm:text-sm">
Sync Frequency
</Label>
<Select
value={frequencyMinutes}
onValueChange={setFrequencyMinutes}
disabled={isSubmitting}
>
<SelectTrigger
id="frequency"
className="w-full bg-slate-400/5 dark:bg-slate-400/5 border-slate-400/20 text-xs sm:text-sm"
>
<SelectValue placeholder="Select frequency" />
</SelectTrigger>
<SelectContent className="z-100">
<SelectItem value="5" className="text-xs sm:text-sm">
Every 5 minutes
</SelectItem>
<SelectItem value="15" className="text-xs sm:text-sm">
Every 15 minutes
</SelectItem>
<SelectItem value="60" className="text-xs sm:text-sm">
Every hour
</SelectItem>
<SelectItem value="360" className="text-xs sm:text-sm">
Every 6 hours
</SelectItem>
<SelectItem value="720" className="text-xs sm:text-sm">
Every 12 hours
</SelectItem>
<SelectItem value="1440" className="text-xs sm:text-sm">
Daily
</SelectItem>
<SelectItem value="10080" className="text-xs sm:text-sm">
Weekly
</SelectItem>
</SelectContent>
</Select>
</div>
</div>
)}
<section className="rounded-xl border border-border bg-slate-400/5 p-3 sm:p-6 dark:bg-white/5">
<div className="space-y-5 sm:space-y-6">
{/* Step 1 — Install plugin */}
<article>
<header className="mb-3 flex items-center gap-2">
<div className="flex size-7 items-center justify-center rounded-md border border-slate-400/30 text-xs font-medium">
1
</div>
</div>
</form>
</Form>
</div>
<h3 className="text-sm font-medium sm:text-base">Install the plugin</h3>
</header>
<p className="mb-3 text-[11px] text-muted-foreground sm:text-xs">
Grab the latest SurfSense plugin release. Once it's in the community store, you'll
also be able to install it from{" "}
<span className="font-medium">Settings Community plugins</span> inside Obsidian.
</p>
<a
href={PLUGIN_RELEASES_URL}
target="_blank"
rel="noopener noreferrer"
className="inline-flex"
>
<Button
type="button"
variant="secondary"
size="sm"
className="gap-2 text-xs sm:text-sm"
>
Open plugin releases
</Button>
</a>
</article>
<div className="h-px bg-border/60" />
{/* Step 2 — Copy API key */}
<article>
<header className="mb-3 flex items-center gap-2">
<div className="flex size-7 items-center justify-center rounded-md border border-slate-400/30 text-xs font-medium">
2
</div>
<h3 className="text-sm font-medium sm:text-base">Copy your API key</h3>
</header>
<p className="mb-3 text-[11px] text-muted-foreground sm:text-xs">
Paste this into the plugin's <span className="font-medium">API token</span> setting.
The token expires after 24 hours. Long-lived personal access tokens are coming in a
future release.
</p>
{isLoading ? (
<div className="h-10 w-full animate-pulse rounded-md border border-border/60 bg-muted/30" />
) : apiKey ? (
<div className="flex items-center gap-2 rounded-md border border-border/60 bg-muted/30 px-2.5 py-1.5">
<div className="min-w-0 flex-1 overflow-x-auto scrollbar-hide">
<p className="cursor-text select-all whitespace-nowrap font-mono text-[10px] text-muted-foreground">
{apiKey}
</p>
</div>
<Button
type="button"
variant="ghost"
size="icon"
onClick={copyToClipboard}
className="size-7 shrink-0 text-muted-foreground hover:text-foreground"
aria-label={copied ? "Copied" : "Copy API key"}
>
{copied ? (
<Check className="size-3.5 text-green-500" />
) : (
<Copy className="size-3.5" />
)}
</Button>
</div>
) : (
<p className="text-center text-xs text-muted-foreground/60">
No API key available try refreshing the page.
</p>
)}
</article>
<div className="h-px bg-border/60" />
{/* Step 3 — Server URL */}
<article>
<header className="mb-3 flex items-center gap-2">
<div className="flex size-7 items-center justify-center rounded-md border border-slate-400/30 text-xs font-medium">
3
</div>
<h3 className="text-sm font-medium sm:text-base">Point the plugin at this server</h3>
</header>
<p className="text-[11px] text-muted-foreground sm:text-xs">
For SurfSense Cloud, use the default{" "}
<span className="font-medium">surfsense.com</span>. If you are self-hosting, set the
plugin's <span className="font-medium">Server URL</span> to your frontend domain.
</p>
</article>
<div className="h-px bg-border/60" />
{/* Step 4 — Pick search space */}
<article>
<header className="mb-3 flex items-center gap-2">
<div className="flex size-7 items-center justify-center rounded-md border border-slate-400/30 text-xs font-medium">
4
</div>
<h3 className="text-sm font-medium sm:text-base">Pick this search space</h3>
</header>
<p className="text-[11px] text-muted-foreground sm:text-xs">
In the plugin's <span className="font-medium">Search space</span> setting, choose the
search space you want this vault to sync into. The connector will appear here
automatically once the plugin makes its first sync.
</p>
</article>
</div>
</section>
{/* What you get section */}
{getConnectorBenefits(EnumConnectorName.OBSIDIAN_CONNECTOR) && (
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 px-3 sm:px-6 py-4 space-y-2">
<h4 className="text-xs sm:text-sm font-medium">
<div className="space-y-2 rounded-xl border border-border bg-slate-400/5 px-3 py-4 sm:px-6 dark:bg-white/5">
<h4 className="text-xs font-medium sm:text-sm">
What you get with Obsidian integration:
</h4>
<ul className="list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
<ul className="list-disc space-y-1 pl-5 text-[10px] text-muted-foreground sm:text-xs">
{getConnectorBenefits(EnumConnectorName.OBSIDIAN_CONNECTOR)?.map((benefit) => (
<li key={benefit}>{benefit}</li>
))}

View file

@ -104,11 +104,11 @@ export function getConnectorBenefits(connectorType: string): string[] | null {
"No manual indexing required - meetings are added automatically",
],
OBSIDIAN_CONNECTOR: [
"Search through all your Obsidian notes and knowledge base",
"Access note content with YAML frontmatter metadata preserved",
"Wiki-style links ([[note]]) and #tags are indexed",
"Connect your personal knowledge base directly to your search space",
"Incremental sync - only changed files are re-indexed",
"Search through all of your Obsidian notes",
"Realtime sync as you create, edit, rename, or delete notes",
"YAML frontmatter, [[wiki links]], and #tags are preserved and indexed",
"Open any chat citation straight back in Obsidian via deep links",
"Each device is identifiable, so you can revoke a vault from one machine",
"Full support for your vault's folder structure",
],
};

View file

@ -1,167 +1,162 @@
"use client";
import type { FC } from "react";
import { useState } from "react";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import { Switch } from "@/components/ui/switch";
import { AlertTriangle, Info } from "lucide-react";
import { type FC, useEffect, useMemo, useState } from "react";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import { connectorsApiService, type ObsidianStats } from "@/lib/apis/connectors-api.service";
import type { ConnectorConfigProps } from "../index";
export interface ObsidianConfigProps extends ConnectorConfigProps {
onNameChange?: (name: string) => void;
const OBSIDIAN_SETUP_DOCS_URL = "/docs/connectors/obsidian";
function formatTimestamp(value: unknown): string {
if (typeof value !== "string" || !value) return "—";
const d = new Date(value);
if (Number.isNaN(d.getTime())) return value;
return d.toLocaleString();
}
export const ObsidianConfig: FC<ObsidianConfigProps> = ({
connector,
onConfigChange,
onNameChange,
}) => {
const [vaultPath, setVaultPath] = useState<string>(
(connector.config?.vault_path as string) || ""
);
const [vaultName, setVaultName] = useState<string>(
(connector.config?.vault_name as string) || ""
);
const [excludeFolders, setExcludeFolders] = useState<string>(() => {
const folders = connector.config?.exclude_folders;
if (Array.isArray(folders)) {
return folders.join(", ");
}
return (folders as string) || ".obsidian, .trash";
});
const [includeAttachments, setIncludeAttachments] = useState<boolean>(
(connector.config?.include_attachments as boolean) || false
);
const [name, setName] = useState<string>(connector.name || "");
/**
* Obsidian connector config view.
*
* Read-only on purpose: the plugin owns vault identity, so the connector's
* display name is auto-derived from `payload.vault_name` server-side on
* every `/connect` (see `obsidian_plugin_routes.obsidian_connect`). The
* web UI doesn't expose a Name input or a Save button for Obsidian (the
* latter is suppressed in `connector-edit-view.tsx`).
*
* Renders one of three modes depending on the connector's `config`:
*
* 1. **Plugin connector** (`config.source === "plugin"`) read-only stats
* panel showing what the plugin most recently reported.
* 2. **Legacy server-path connector** (`config.legacy === true`, set by the
* migration) migration warning + docs link + explicit disconnect data-loss
* warning so users move to the plugin flow safely.
* 3. **Unknown** fallback for rows that escaped migration; suggests a
* clean re-install.
*/
export const ObsidianConfig: FC<ConnectorConfigProps> = ({ connector }) => {
const config = (connector.config ?? {}) as Record<string, unknown>;
const isLegacy = config.legacy === true;
const isPlugin = config.source === "plugin";
const handleVaultPathChange = (value: string) => {
setVaultPath(value);
if (onConfigChange) {
onConfigChange({
...connector.config,
vault_path: value,
});
}
};
const handleVaultNameChange = (value: string) => {
setVaultName(value);
if (onConfigChange) {
onConfigChange({
...connector.config,
vault_name: value,
});
}
};
const handleExcludeFoldersChange = (value: string) => {
setExcludeFolders(value);
const foldersArray = value
.split(",")
.map((f) => f.trim())
.filter(Boolean);
if (onConfigChange) {
onConfigChange({
...connector.config,
exclude_folders: foldersArray,
});
}
};
const handleIncludeAttachmentsChange = (value: boolean) => {
setIncludeAttachments(value);
if (onConfigChange) {
onConfigChange({
...connector.config,
include_attachments: value,
});
}
};
const handleNameChange = (value: string) => {
setName(value);
if (onNameChange) {
onNameChange(value);
}
};
if (isLegacy) return <LegacyBanner />;
if (isPlugin) return <PluginStats config={config} />;
return <UnknownConnectorState />;
};
const LegacyBanner: FC = () => {
return (
<div className="space-y-6">
{/* Connector Name */}
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<div className="space-y-2">
<Label className="text-xs sm:text-sm">Connector Name</Label>
<Input
value={name}
onChange={(e) => handleNameChange(e.target.value)}
placeholder="My Obsidian Vault"
className="border-slate-400/20 focus-visible:border-slate-400/40"
/>
<p className="text-[10px] sm:text-xs text-muted-foreground">
A friendly name to identify this connector.
</p>
</div>
</div>
<Alert className="border-amber-500/40 bg-amber-500/10">
<AlertTriangle className="size-4 shrink-0 text-amber-500" />
<AlertTitle className="text-xs sm:text-sm">
Sync stopped, install the plugin to migrate
</AlertTitle>
<AlertDescription className="text-[11px] sm:text-xs leading-relaxed">
This Obsidian connector used the legacy server-path scanner, which has been removed. The
notes already indexed remain searchable, but they no longer reflect changes made in your
vault.
</AlertDescription>
</Alert>
{/* Configuration */}
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<div className="space-y-1 sm:space-y-2">
<h3 className="font-medium text-sm sm:text-base flex items-center gap-2">
Vault Configuration
</h3>
</div>
<div className="space-y-4">
<div className="space-y-2">
<Label className="text-xs sm:text-sm">Vault Path</Label>
<Input
value={vaultPath}
onChange={(e) => handleVaultPathChange(e.target.value)}
placeholder="/path/to/your/obsidian/vault"
className="border-slate-400/20 focus-visible:border-slate-400/40 font-mono"
/>
<p className="text-[10px] sm:text-xs text-muted-foreground">
The absolute path to your Obsidian vault on the server.
</p>
</div>
<div className="space-y-2">
<Label className="text-xs sm:text-sm">Vault Name</Label>
<Input
value={vaultName}
onChange={(e) => handleVaultNameChange(e.target.value)}
placeholder="My Knowledge Base"
className="border-slate-400/20 focus-visible:border-slate-400/40"
/>
<p className="text-[10px] sm:text-xs text-muted-foreground">
A display name for your vault in search results.
</p>
</div>
<div className="space-y-2">
<Label className="text-xs sm:text-sm">Exclude Folders</Label>
<Input
value={excludeFolders}
onChange={(e) => handleExcludeFoldersChange(e.target.value)}
placeholder=".obsidian, .trash, templates"
className="border-slate-400/20 focus-visible:border-slate-400/40 font-mono"
/>
<p className="text-[10px] sm:text-xs text-muted-foreground">
Comma-separated list of folder names to exclude from indexing.
</p>
</div>
<div className="flex items-center justify-between rounded-lg border border-slate-400/20 p-3">
<div className="space-y-0.5">
<Label className="text-xs sm:text-sm">Include Attachments</Label>
<p className="text-[10px] sm:text-xs text-muted-foreground">
Index attachment folders and embedded files
</p>
</div>
<Switch checked={includeAttachments} onCheckedChange={handleIncludeAttachmentsChange} />
</div>
</div>
<div className="rounded-xl border border-border bg-slate-400/5 p-3 sm:p-6 dark:bg-white/5">
<h3 className="mb-3 text-sm font-medium sm:text-base">Migration required</h3>
<p className="mb-3 text-[11px] leading-relaxed text-muted-foreground sm:text-xs">
Follow the{" "}
<a
href={OBSIDIAN_SETUP_DOCS_URL}
className="font-medium text-primary underline underline-offset-4 hover:text-primary/80"
>
Obsidian setup guide
</a>{" "}
to reconnect this vault through the plugin.
</p>
<p className="text-[11px] leading-relaxed text-amber-600 dark:text-amber-400 sm:text-xs">
Heads up: Disconnect also deletes every document this connector previously indexed.
</p>
</div>
</div>
);
};
const PluginStats: FC<{ config: Record<string, unknown> }> = ({ config }) => {
const vaultId = typeof config.vault_id === "string" ? config.vault_id : null;
const [stats, setStats] = useState<ObsidianStats | null>(null);
const [statsError, setStatsError] = useState(false);
useEffect(() => {
if (!vaultId) return;
let cancelled = false;
setStats(null);
setStatsError(false);
connectorsApiService
.getObsidianStats(vaultId)
.then((result) => {
if (!cancelled) setStats(result);
})
.catch((err) => {
if (!cancelled) {
console.error("Failed to fetch Obsidian stats", err);
setStatsError(true);
}
});
return () => {
cancelled = true;
};
}, [vaultId]);
const tileRows = useMemo(() => {
const placeholder = statsError ? "—" : stats ? null : "…";
return [
{ label: "Vault", value: (config.vault_name as string) || "—" },
{
label: "Last sync",
value: placeholder ?? formatTimestamp(stats?.last_sync_at ?? null),
},
{
label: "Files synced",
value:
placeholder ??
(typeof stats?.files_synced === "number" ? stats.files_synced.toLocaleString() : "—"),
},
];
}, [config.vault_name, stats, statsError]);
return (
<div className="space-y-4">
<Alert className="border-emerald-500/30 bg-emerald-500/10">
<Info className="size-4 shrink-0 text-emerald-500" />
<AlertTitle className="text-xs sm:text-sm">Plugin connected</AlertTitle>
<AlertDescription className="text-[11px] sm:text-xs">
Your notes stay synced automatically. To stop syncing, disable or uninstall the plugin in
Obsidian, or delete this connector.
</AlertDescription>
</Alert>
<div className="rounded-xl bg-slate-400/5 p-3 sm:p-6 dark:bg-white/5">
<h3 className="mb-3 text-sm font-medium sm:text-base">Vault status</h3>
<dl className="grid grid-cols-1 gap-3 sm:grid-cols-2">
{tileRows.map((stat) => (
<div key={stat.label} className="rounded-lg bg-background/50 p-3">
<dt className="text-xs tracking-wide text-muted-foreground sm:text-sm">
{stat.label}
</dt>
<dd className="mt-1 truncate text-xs font-medium sm:text-sm">{stat.value}</dd>
</div>
))}
</dl>
</div>
</div>
);
};
const UnknownConnectorState: FC = () => (
<Alert>
<Info className="size-4 shrink-0" />
<AlertTitle className="text-xs sm:text-sm">Unrecognized config</AlertTitle>
<AlertDescription className="text-[11px] sm:text-xs">
This connector has neither plugin metadata nor a legacy marker. It may predate migration you
can safely delete it and re-install the SurfSense Obsidian plugin to resume syncing.
</AlertDescription>
</Alert>
);

View file

@ -111,7 +111,9 @@ export const ConnectorConnectView: FC<ConnectorConnectViewProps> = ({
: getConnectorTypeDisplay(connectorType)}
</h2>
<p className="text-xs sm:text-base text-muted-foreground mt-1">
Enter your connection details
{connectorType === "OBSIDIAN_CONNECTOR"
? "Follow the plugin setup steps below"
: "Enter your connection details"}
</p>
</div>
</div>
@ -149,7 +151,9 @@ export const ConnectorConnectView: FC<ConnectorConnectViewProps> = ({
<span className={isSubmitting ? "opacity-0" : ""}>
{connectorType === "MCP_CONNECTOR"
? "Connect"
: `Connect ${getConnectorTypeDisplay(connectorType)}`}
: connectorType === "OBSIDIAN_CONNECTOR"
? "Done"
: `Connect ${getConnectorTypeDisplay(connectorType)}`}
</span>
{isSubmitting && <Spinner size="sm" className="absolute" />}
</Button>

View file

@ -87,6 +87,10 @@ export const ConnectorEditView: FC<ConnectorEditViewProps> = ({
const isAuthExpired = connector.config?.auth_expired === true;
const reauthEndpoint = REAUTH_ENDPOINTS[connector.connector_type];
const [reauthing, setReauthing] = useState(false);
// Obsidian is plugin-driven: name + config are owned by the plugin, so
// the web edit view has nothing the user can persist back. Hide Save
// (and re-auth, which Obsidian never uses) entirely for that type.
const isPluginManagedReadOnly = connector.connector_type === EnumConnectorName.OBSIDIAN_CONNECTOR;
const handleReauth = useCallback(async () => {
const spaceId = searchSpaceId ?? searchSpaceIdAtom;
@ -412,7 +416,7 @@ export const ConnectorEditView: FC<ConnectorEditViewProps> = ({
Disconnect
</Button>
)}
{isAuthExpired && reauthEndpoint ? (
{isPluginManagedReadOnly ? null : isAuthExpired && reauthEndpoint ? (
<Button
onClick={handleReauth}
disabled={reauthing || isDisconnecting}

View file

@ -180,7 +180,7 @@ export const OTHER_CONNECTORS = [
{
id: "obsidian-connector",
title: "Obsidian",
description: "Index your Obsidian vault (Local folder scan on Desktop)",
description: "Sync your Obsidian vault on desktop or mobile",
connectorType: EnumConnectorName.OBSIDIAN_CONNECTOR,
},
] as const;
@ -349,12 +349,7 @@ export const AUTO_INDEX_CONNECTOR_TYPES = new Set<string>(Object.keys(AUTO_INDEX
// `lib/posthog/events.ts` or per-connector tracking code.
// ============================================================================
export type ConnectorTelemetryGroup =
| "oauth"
| "composio"
| "crawler"
| "other"
| "unknown";
export type ConnectorTelemetryGroup = "oauth" | "composio" | "crawler" | "other" | "unknown";
export interface ConnectorTelemetryMeta {
connector_type: string;
@ -363,45 +358,44 @@ export interface ConnectorTelemetryMeta {
is_oauth: boolean;
}
const CONNECTOR_TELEMETRY_REGISTRY: ReadonlyMap<string, ConnectorTelemetryMeta> =
(() => {
const map = new Map<string, ConnectorTelemetryMeta>();
const CONNECTOR_TELEMETRY_REGISTRY: ReadonlyMap<string, ConnectorTelemetryMeta> = (() => {
const map = new Map<string, ConnectorTelemetryMeta>();
for (const c of OAUTH_CONNECTORS) {
map.set(c.connectorType, {
connector_type: c.connectorType,
connector_title: c.title,
connector_group: "oauth",
is_oauth: true,
});
}
for (const c of COMPOSIO_CONNECTORS) {
map.set(c.connectorType, {
connector_type: c.connectorType,
connector_title: c.title,
connector_group: "composio",
is_oauth: true,
});
}
for (const c of CRAWLERS) {
map.set(c.connectorType, {
connector_type: c.connectorType,
connector_title: c.title,
connector_group: "crawler",
is_oauth: false,
});
}
for (const c of OTHER_CONNECTORS) {
map.set(c.connectorType, {
connector_type: c.connectorType,
connector_title: c.title,
connector_group: "other",
is_oauth: false,
});
}
for (const c of OAUTH_CONNECTORS) {
map.set(c.connectorType, {
connector_type: c.connectorType,
connector_title: c.title,
connector_group: "oauth",
is_oauth: true,
});
}
for (const c of COMPOSIO_CONNECTORS) {
map.set(c.connectorType, {
connector_type: c.connectorType,
connector_title: c.title,
connector_group: "composio",
is_oauth: true,
});
}
for (const c of CRAWLERS) {
map.set(c.connectorType, {
connector_type: c.connectorType,
connector_title: c.title,
connector_group: "crawler",
is_oauth: false,
});
}
for (const c of OTHER_CONNECTORS) {
map.set(c.connectorType, {
connector_type: c.connectorType,
connector_title: c.title,
connector_group: "other",
is_oauth: false,
});
}
return map;
})();
return map;
})();
/**
* Returns telemetry metadata for a connector_type, or a minimal "unknown"

View file

@ -1,5 +1,5 @@
import { format } from "date-fns";
import { useAtom, useAtomValue, useSetAtom } from "jotai";
import { useAtom, useAtomValue } from "jotai";
import { useCallback, useEffect, useRef, useState } from "react";
import { toast } from "sonner";
import { connectorDialogOpenAtom } from "@/atoms/connector-dialog/connector-dialog.atoms";
@ -10,17 +10,11 @@ import {
updateConnectorMutationAtom,
} from "@/atoms/connectors/connector-mutation.atoms";
import { connectorsAtom } from "@/atoms/connectors/connector-query.atoms";
import {
folderWatchDialogOpenAtom,
folderWatchInitialFolderAtom,
} from "@/atoms/folder-sync/folder-sync.atoms";
import { activeSearchSpaceIdAtom } from "@/atoms/search-spaces/search-space-query.atoms";
import { EnumConnectorName } from "@/contracts/enums/connector";
import type { SearchSourceConnector } from "@/contracts/types/connector.types";
import { searchSourceConnector } from "@/contracts/types/connector.types";
import { usePlatform } from "@/hooks/use-platform";
import { authenticatedFetch } from "@/lib/auth-utils";
import { isSelfHosted } from "@/lib/env-config";
import {
trackConnectorConnected,
trackConnectorDeleted,
@ -70,10 +64,6 @@ export const useConnectorDialog = () => {
const { mutateAsync: updateConnector } = useAtomValue(updateConnectorMutationAtom);
const { mutateAsync: deleteConnector } = useAtomValue(deleteConnectorMutationAtom);
const { mutateAsync: createConnector } = useAtomValue(createConnectorMutationAtom);
const setFolderWatchOpen = useSetAtom(folderWatchDialogOpenAtom);
const setFolderWatchInitialFolder = useSetAtom(folderWatchInitialFolderAtom);
const { isDesktop } = usePlatform();
const selfHosted = isSelfHosted();
// Use global atom for dialog open state so it can be controlled from anywhere
const [isOpen, setIsOpen] = useAtom(connectorDialogOpenAtom);
@ -360,11 +350,7 @@ export const useConnectorDialog = () => {
// Set connecting state immediately to disable button and show spinner
setConnectingId(connector.id);
trackConnectorSetupStarted(
Number(searchSpaceId),
connector.connectorType,
"oauth_click"
);
trackConnectorSetupStarted(Number(searchSpaceId), connector.connectorType, "oauth_click");
try {
// Check if authEndpoint already has query parameters
@ -434,6 +420,7 @@ export const useConnectorDialog = () => {
indexing_frequency_minutes: null,
next_scheduled_at: null,
enable_summary: false,
enable_vision_llm: false,
},
queryParams: {
search_space_id: searchSpaceId,
@ -482,35 +469,16 @@ export const useConnectorDialog = () => {
}
}, [searchSpaceId, createConnector, refetchAllConnectors, setIsOpen]);
// Handle connecting non-OAuth connectors (like Tavily API)
// Handle connecting non-OAuth connectors (like Tavily API, Obsidian plugin, etc.)
const handleConnectNonOAuth = useCallback(
(connectorType: string) => {
if (!searchSpaceId) return;
trackConnectorSetupStarted(
Number(searchSpaceId),
connectorType,
"non_oauth_click"
);
// Handle Obsidian specifically on Desktop & Cloud
if (connectorType === EnumConnectorName.OBSIDIAN_CONNECTOR && !selfHosted && isDesktop) {
setIsOpen(false);
setFolderWatchInitialFolder(null);
setFolderWatchOpen(true);
return;
}
trackConnectorSetupStarted(Number(searchSpaceId), connectorType, "non_oauth_click");
setConnectingConnectorType(connectorType);
},
[
searchSpaceId,
selfHosted,
isDesktop,
setIsOpen,
setFolderWatchOpen,
setFolderWatchInitialFolder,
]
[searchSpaceId]
);
// Handle submitting connect form
@ -554,6 +522,7 @@ export const useConnectorDialog = () => {
is_active: true,
next_scheduled_at: connectorData.next_scheduled_at as string | null,
enable_summary: false,
enable_vision_llm: false,
},
queryParams: {
search_space_id: searchSpaceId,

View file

@ -210,8 +210,7 @@ export function FreeChatPage() {
trackAnonymousChatMessageSent({
modelSlug,
messageLength: userQuery.trim().length,
hasUploadedDoc:
anonMode.isAnonymous && anonMode.uploadedDoc !== null ? true : false,
hasUploadedDoc: anonMode.isAnonymous && anonMode.uploadedDoc !== null ? true : false,
surface: "free_chat_page",
});

View file

@ -426,15 +426,50 @@ const AiSortIllustration = () => (
<title>AI File Sorting illustration showing automatic folder organization</title>
{/* Scattered documents on the left */}
<g opacity="0.5">
<rect x="20" y="40" width="35" height="45" rx="4" className="fill-neutral-200 dark:fill-neutral-700" transform="rotate(-8 37 62)" />
<rect x="50" y="80" width="35" height="45" rx="4" className="fill-neutral-200 dark:fill-neutral-700" transform="rotate(5 67 102)" />
<rect x="15" y="110" width="35" height="45" rx="4" className="fill-neutral-200 dark:fill-neutral-700" transform="rotate(-3 32 132)" />
<rect
x="20"
y="40"
width="35"
height="45"
rx="4"
className="fill-neutral-200 dark:fill-neutral-700"
transform="rotate(-8 37 62)"
/>
<rect
x="50"
y="80"
width="35"
height="45"
rx="4"
className="fill-neutral-200 dark:fill-neutral-700"
transform="rotate(5 67 102)"
/>
<rect
x="15"
y="110"
width="35"
height="45"
rx="4"
className="fill-neutral-200 dark:fill-neutral-700"
transform="rotate(-3 32 132)"
/>
</g>
{/* AI sparkle / magic in the center */}
<g transform="translate(140, 90)">
<path d="M 0,-18 L 4,-6 L 16,-4 L 6,4 L 8,16 L 0,10 L -8,16 L -6,4 L -16,-4 L -4,-6 Z" className="fill-emerald-500 dark:fill-emerald-400" opacity="0.85">
<animateTransform attributeName="transform" type="rotate" from="0" to="360" dur="10s" repeatCount="indefinite" />
<path
d="M 0,-18 L 4,-6 L 16,-4 L 6,4 L 8,16 L 0,10 L -8,16 L -6,4 L -16,-4 L -4,-6 Z"
className="fill-emerald-500 dark:fill-emerald-400"
opacity="0.85"
>
<animateTransform
attributeName="transform"
type="rotate"
from="0"
to="360"
dur="10s"
repeatCount="indefinite"
/>
</path>
<circle cx="0" cy="0" r="3" className="fill-white dark:fill-emerald-200">
<animate attributeName="opacity" values="0.5;1;0.5" dur="2s" repeatCount="indefinite" />
@ -442,51 +477,208 @@ const AiSortIllustration = () => (
</g>
{/* Animated sorting arrows */}
<g className="stroke-emerald-500 dark:stroke-emerald-400" strokeWidth="2" fill="none" opacity="0.6">
<g
className="stroke-emerald-500 dark:stroke-emerald-400"
strokeWidth="2"
fill="none"
opacity="0.6"
>
<path d="M 100 70 Q 140 60, 180 50" strokeDasharray="4,4">
<animate attributeName="stroke-dashoffset" from="8" to="0" dur="1s" repeatCount="indefinite" />
<animate
attributeName="stroke-dashoffset"
from="8"
to="0"
dur="1s"
repeatCount="indefinite"
/>
</path>
<path d="M 100 100 Q 140 100, 180 100" strokeDasharray="4,4">
<animate attributeName="stroke-dashoffset" from="8" to="0" dur="1s" repeatCount="indefinite" />
<animate
attributeName="stroke-dashoffset"
from="8"
to="0"
dur="1s"
repeatCount="indefinite"
/>
</path>
<path d="M 100 130 Q 140 140, 180 150" strokeDasharray="4,4">
<animate attributeName="stroke-dashoffset" from="8" to="0" dur="1s" repeatCount="indefinite" />
<animate
attributeName="stroke-dashoffset"
from="8"
to="0"
dur="1s"
repeatCount="indefinite"
/>
</path>
</g>
{/* Organized folder tree on the right */}
{/* Root folder */}
<g>
<rect x="220" y="30" width="160" height="28" rx="6" className="fill-white dark:fill-neutral-800" opacity="0.9" />
<rect x="228" y="36" width="16" height="14" rx="3" className="fill-emerald-500 dark:fill-emerald-400" />
<line x1="252" y1="43" x2="330" y2="43" className="stroke-neutral-400 dark:stroke-neutral-500" strokeWidth="2.5" strokeLinecap="round" />
<rect
x="220"
y="30"
width="160"
height="28"
rx="6"
className="fill-white dark:fill-neutral-800"
opacity="0.9"
/>
<rect
x="228"
y="36"
width="16"
height="14"
rx="3"
className="fill-emerald-500 dark:fill-emerald-400"
/>
<line
x1="252"
y1="43"
x2="330"
y2="43"
className="stroke-neutral-400 dark:stroke-neutral-500"
strokeWidth="2.5"
strokeLinecap="round"
/>
</g>
{/* Subfolder 1 */}
<g>
<line x1="240" y1="58" x2="240" y2="76" className="stroke-neutral-300 dark:stroke-neutral-600" strokeWidth="1.5" />
<line x1="240" y1="76" x2="250" y2="76" className="stroke-neutral-300 dark:stroke-neutral-600" strokeWidth="1.5" />
<rect x="250" y="64" width="130" height="24" rx="5" className="fill-white dark:fill-neutral-800" opacity="0.85" />
<rect x="257" y="70" width="12" height="11" rx="2" className="fill-teal-400 dark:fill-teal-500" />
<line x1="276" y1="76" x2="340" y2="76" className="stroke-neutral-400 dark:stroke-neutral-500" strokeWidth="2" strokeLinecap="round" />
<line
x1="240"
y1="58"
x2="240"
y2="76"
className="stroke-neutral-300 dark:stroke-neutral-600"
strokeWidth="1.5"
/>
<line
x1="240"
y1="76"
x2="250"
y2="76"
className="stroke-neutral-300 dark:stroke-neutral-600"
strokeWidth="1.5"
/>
<rect
x="250"
y="64"
width="130"
height="24"
rx="5"
className="fill-white dark:fill-neutral-800"
opacity="0.85"
/>
<rect
x="257"
y="70"
width="12"
height="11"
rx="2"
className="fill-teal-400 dark:fill-teal-500"
/>
<line
x1="276"
y1="76"
x2="340"
y2="76"
className="stroke-neutral-400 dark:stroke-neutral-500"
strokeWidth="2"
strokeLinecap="round"
/>
</g>
{/* Subfolder 2 */}
<g>
<line x1="240" y1="76" x2="240" y2="108" className="stroke-neutral-300 dark:stroke-neutral-600" strokeWidth="1.5" />
<line x1="240" y1="108" x2="250" y2="108" className="stroke-neutral-300 dark:stroke-neutral-600" strokeWidth="1.5" />
<rect x="250" y="96" width="130" height="24" rx="5" className="fill-white dark:fill-neutral-800" opacity="0.85" />
<rect x="257" y="102" width="12" height="11" rx="2" className="fill-cyan-400 dark:fill-cyan-500" />
<line x1="276" y1="108" x2="350" y2="108" className="stroke-neutral-400 dark:stroke-neutral-500" strokeWidth="2" strokeLinecap="round" />
<line
x1="240"
y1="76"
x2="240"
y2="108"
className="stroke-neutral-300 dark:stroke-neutral-600"
strokeWidth="1.5"
/>
<line
x1="240"
y1="108"
x2="250"
y2="108"
className="stroke-neutral-300 dark:stroke-neutral-600"
strokeWidth="1.5"
/>
<rect
x="250"
y="96"
width="130"
height="24"
rx="5"
className="fill-white dark:fill-neutral-800"
opacity="0.85"
/>
<rect
x="257"
y="102"
width="12"
height="11"
rx="2"
className="fill-cyan-400 dark:fill-cyan-500"
/>
<line
x1="276"
y1="108"
x2="350"
y2="108"
className="stroke-neutral-400 dark:stroke-neutral-500"
strokeWidth="2"
strokeLinecap="round"
/>
</g>
{/* Subfolder 3 */}
<g>
<line x1="240" y1="108" x2="240" y2="140" className="stroke-neutral-300 dark:stroke-neutral-600" strokeWidth="1.5" />
<line x1="240" y1="140" x2="250" y2="140" className="stroke-neutral-300 dark:stroke-neutral-600" strokeWidth="1.5" />
<rect x="250" y="128" width="130" height="24" rx="5" className="fill-white dark:fill-neutral-800" opacity="0.85" />
<rect x="257" y="134" width="12" height="11" rx="2" className="fill-emerald-400 dark:fill-emerald-500" />
<line x1="276" y1="140" x2="325" y2="140" className="stroke-neutral-400 dark:stroke-neutral-500" strokeWidth="2" strokeLinecap="round" />
<line
x1="240"
y1="108"
x2="240"
y2="140"
className="stroke-neutral-300 dark:stroke-neutral-600"
strokeWidth="1.5"
/>
<line
x1="240"
y1="140"
x2="250"
y2="140"
className="stroke-neutral-300 dark:stroke-neutral-600"
strokeWidth="1.5"
/>
<rect
x="250"
y="128"
width="130"
height="24"
rx="5"
className="fill-white dark:fill-neutral-800"
opacity="0.85"
/>
<rect
x="257"
y="134"
width="12"
height="11"
rx="2"
className="fill-emerald-400 dark:fill-emerald-500"
/>
<line
x1="276"
y1="140"
x2="325"
y2="140"
className="stroke-neutral-400 dark:stroke-neutral-500"
strokeWidth="2"
strokeLinecap="round"
/>
</g>
{/* Sparkle accents */}
@ -495,10 +687,22 @@ const AiSortIllustration = () => (
<animate attributeName="opacity" values="0;1;0" dur="2s" repeatCount="indefinite" />
</circle>
<circle cx="190" cy="155" r="1.5" className="fill-teal-400">
<animate attributeName="opacity" values="0;1;0" dur="2.5s" begin="0.8s" repeatCount="indefinite" />
<animate
attributeName="opacity"
values="0;1;0"
dur="2.5s"
begin="0.8s"
repeatCount="indefinite"
/>
</circle>
<circle cx="155" cy="120" r="1.5" className="fill-cyan-400">
<animate attributeName="opacity" values="0;1;0" dur="3s" begin="0.4s" repeatCount="indefinite" />
<animate
attributeName="opacity"
values="0;1;0"
dur="3s"
begin="0.4s"
repeatCount="indefinite"
/>
</circle>
</g>
</svg>

View file

@ -546,35 +546,35 @@ export function DocumentUploadTab({
</button>
)
) : (
<div
role="button"
tabIndex={0}
className="flex flex-col items-center gap-4 py-12 px-4 cursor-pointer w-full bg-transparent outline-none select-none"
onClick={() => {
if (!isElectron) fileInputRef.current?.click();
}}
onKeyDown={(e) => {
if (e.key === "Enter" || e.key === " ") {
e.preventDefault();
<div
role="button"
tabIndex={0}
className="flex flex-col items-center gap-4 py-12 px-4 cursor-pointer w-full bg-transparent outline-none select-none"
onClick={() => {
if (!isElectron) fileInputRef.current?.click();
}
}}
>
<Upload className="h-10 w-10 text-muted-foreground" />
<div className="text-center space-y-1.5">
<p className="text-base font-medium">
{isElectron ? t("select_files_or_folder") : t("tap_select_files_or_folder")}
</p>
<p className="text-sm text-muted-foreground">{t("file_size_limit")}</p>
</div>
<fieldset
className="w-full mt-1 border-none p-0 m-0"
onClick={(e) => e.stopPropagation()}
onKeyDown={(e) => e.stopPropagation()}
}}
onKeyDown={(e) => {
if (e.key === "Enter" || e.key === " ") {
e.preventDefault();
if (!isElectron) fileInputRef.current?.click();
}
}}
>
{renderBrowseButton({ fullWidth: true })}
</fieldset>
</div>
<Upload className="h-10 w-10 text-muted-foreground" />
<div className="text-center space-y-1.5">
<p className="text-base font-medium">
{isElectron ? t("select_files_or_folder") : t("tap_select_files_or_folder")}
</p>
<p className="text-sm text-muted-foreground">{t("file_size_limit")}</p>
</div>
<fieldset
className="w-full mt-1 border-none p-0 m-0"
onClick={(e) => e.stopPropagation()}
onKeyDown={(e) => e.stopPropagation()}
>
{renderBrowseButton({ fullWidth: true })}
</fieldset>
</div>
)}
</div>

View file

@ -105,7 +105,7 @@ Connect SurfSense to your favorite tools and services. Browse the available inte
/>
<Card
title="Obsidian"
description="Connect your Obsidian vault to SurfSense"
description="Sync your Obsidian vault using the SurfSense plugin"
href="/docs/connectors/obsidian"
/>
<Card

View file

@ -1,143 +1,65 @@
---
title: Obsidian
description: Connect your Obsidian vault to SurfSense
description: Sync your Obsidian vault with the SurfSense plugin
---
# Obsidian Integration Setup Guide
# Obsidian Plugin Setup Guide
This guide walks you through connecting your Obsidian vault to SurfSense for note search and AI-powered insights.
<Callout type="warn">
This connector requires direct file system access and only works with self-hosted SurfSense installations.
</Callout>
SurfSense integrates with Obsidian through the SurfSense Obsidian plugin.
## How it works
The Obsidian connector scans your local Obsidian vault directory and indexes all Markdown files. It preserves your note structure and extracts metadata from YAML frontmatter.
The plugin runs inside your Obsidian app and pushes note updates to SurfSense over HTTPS.
This works for cloud and self-hosted deployments, including desktop and mobile clients.
- For follow-up indexing runs, the connector uses content hashing to skip unchanged files for faster sync.
- Indexing should be configured to run periodically, so updates should appear in your search results within minutes.
---
## What Gets Indexed
## What gets indexed
| Content Type | Description |
|--------------|-------------|
| Markdown Files | All `.md` files in your vault |
| Frontmatter | YAML metadata (title, tags, aliases, dates) |
| Wiki Links | Links between notes (`[[note]]`) |
| Inline Tags | Tags throughout your notes (`#tag`) |
| Note Content | Full content with intelligent chunking |
| Markdown files | Note content (`.md`) |
| Frontmatter | YAML metadata like title, tags, aliases, dates |
| Wiki links | Linked notes (`[[note]]`) |
| Tags | Inline and frontmatter tags |
| Vault metadata | Vault and path metadata used for deep links and sync state |
## Quick start
1. Open **Connectors** in SurfSense and choose **Obsidian**.
2. Click **Open plugin releases** and install the latest SurfSense Obsidian plugin.
3. In Obsidian, open **Settings → SurfSense**.
4. Paste your SurfSense API token from the user settings section.
5. Paste your Server URL in the plugin setting: either your SurfSense main domain (if `/api/v1` rewrites are enabled) or your direct backend URL.
6. Choose the Search Space in the plugin, then the first sync should run automatically.
7. Confirm the connector appears as **Obsidian - &lt;vault&gt;** in SurfSense.
## Migrating from the legacy connector
If you previously used the legacy Obsidian connector architecture, migrate to the plugin flow:
1. Delete the old legacy Obsidian connector from SurfSense.
2. Install and configure the SurfSense Obsidian plugin using the quick start above.
3. Run the first plugin sync and verify the new **Obsidian - &lt;vault&gt;** connector is active.
<Callout type="warn">
Binary files and attachments are not indexed by default. Enable "Include Attachments" to index embedded files.
Deleting the legacy connector also deletes all documents that were indexed by that connector. Always finish and verify plugin sync before deleting the old connector.
</Callout>
---
## Quick Start (Local Installation)
1. Navigate to **Connectors** → **Add Connector** → **Obsidian**
2. Enter your vault path: `/Users/yourname/Documents/MyVault`
3. Enter a vault name (e.g., `Personal Notes`)
4. Click **Connect Obsidian**
<Callout type="info">
Find your vault path: In Obsidian, right-click any note → "Reveal in Finder" (macOS) or "Show in Explorer" (Windows).
</Callout>
<Callout type="info" title="Periodic Sync">
Enable periodic sync to automatically re-index notes when content changes. Available frequencies: Every 5 minutes, 15 minutes, hourly, every 6 hours, daily, or weekly.
</Callout>
---
## Docker Setup
For Docker deployments, you need to mount your Obsidian vault as a volume.
### Step 1: Update docker-compose.yml
Add your vault as a volume mount to the SurfSense backend service:
```yaml
services:
surfsense:
# ... other config
volumes:
- /path/to/your/obsidian/vault:/app/obsidian_vaults/my-vault:ro
```
<Callout type="info">
The `:ro` flag mounts the vault as read-only, which is recommended for security.
</Callout>
### Step 2: Configure the Connector
Use the **container path** (not your local path) when setting up the connector:
| Your Local Path | Container Path (use this) |
|-----------------|---------------------------|
| `/Users/john/Documents/MyVault` | `/app/obsidian_vaults/my-vault` |
| `C:\Users\john\Documents\MyVault` | `/app/obsidian_vaults/my-vault` |
### Example: Multiple Vaults
```yaml
volumes:
- /Users/john/Documents/PersonalNotes:/app/obsidian_vaults/personal:ro
- /Users/john/Documents/WorkNotes:/app/obsidian_vaults/work:ro
```
Then create separate connectors for each vault using `/app/obsidian_vaults/personal` and `/app/obsidian_vaults/work`.
---
## Connector Configuration
| Field | Description | Required |
|-------|-------------|----------|
| **Connector Name** | A friendly name to identify this connector | Yes |
| **Vault Path** | Absolute path to your vault (container path for Docker) | Yes |
| **Vault Name** | Display name for your vault in search results | Yes |
| **Exclude Folders** | Comma-separated folder names to skip | No |
| **Include Attachments** | Index embedded files (images, PDFs) | No |
---
## Recommended Exclusions
Common folders to exclude from indexing:
| Folder | Reason |
|--------|--------|
| `.obsidian` | Obsidian config files (always exclude) |
| `.trash` | Obsidian's trash folder |
| `templates` | Template files you don't want searchable |
| `daily-notes` | If you want to exclude daily notes |
| `attachments` | If not using "Include Attachments" |
Default exclusions: `.obsidian,.trash`
---
## Troubleshooting
**Vault not found / Permission denied**
- Verify the path exists and is accessible
- For Docker: ensure the volume is mounted correctly in `docker-compose.yml`
- Check file permissions: SurfSense needs read access to the vault directory
**Plugin connects but no files appear**
- Verify the plugin is pointed to the correct Search Space.
- Trigger a manual sync from the plugin settings.
- Confirm your API token is valid and not expired.
**No notes indexed**
- Ensure your vault contains `.md` files
- Check that notes aren't in excluded folders
- Verify the path points to the vault root (contains `.obsidian` folder)
**Self-hosted URL issues**
- Use a public or LAN backend URL that your Obsidian device can reach.
- If your instance is behind TLS, ensure the URL/certificate is valid for the device running Obsidian.
**Changes not appearing**
- Wait for the next sync cycle, or manually trigger re-indexing
- For Docker: restart the container if you modified volume mounts
**Unauthorized / 401 errors**
- Regenerate and paste a fresh API token from SurfSense.
- Ensure the token belongs to the same account and workspace you are syncing into.
**Docker: "path not found" error**
- Use the container path (`/app/obsidian_vaults/...`), not your local path
- Verify the volume mount in `docker-compose.yml` matches
**Cannot reach server URL**
- Check that the backend URL is reachable from the Obsidian device.
- For self-hosted setups, verify firewall and reverse proxy rules.
- Avoid using localhost unless SurfSense and Obsidian run on the same machine.

View file

@ -443,6 +443,19 @@ class ConnectorsApiService {
body: JSON.stringify({ tool_name: toolName }),
});
};
/** Live stats for the Obsidian connector tile. */
getObsidianStats = async (vaultId: string): Promise<ObsidianStats> => {
return baseApiService.get<ObsidianStats>(
`/api/v1/obsidian/stats?vault_id=${encodeURIComponent(vaultId)}`
);
};
}
export interface ObsidianStats {
vault_id: string;
files_synced: number;
last_sync_at: string | null;
}
export type { SlackChannel, DiscordChannel };

3
versions.json Normal file
View file

@ -0,0 +1,3 @@
{
"0.1.1": "1.5.4"
}