mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-05-05 13:52:40 +02:00
Merge pull request #1286 from AnishSarkar22/feat/obsidian-plugin
feat: introduce Obsidian vault sync plugin
This commit is contained in:
commit
f607636ba6
83 changed files with 12540 additions and 1837 deletions
39
.github/workflows/obsidian-plugin-lint.yml
vendored
Normal file
39
.github/workflows/obsidian-plugin-lint.yml
vendored
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
name: Obsidian Plugin Lint
|
||||
|
||||
# Lints + type-checks + builds the Obsidian plugin on every push/PR that
|
||||
# touches its sources. The official obsidian-sample-plugin template ships
|
||||
# its own ESLint+esbuild setup; we run that here instead of folding the
|
||||
# plugin into the monorepo's Biome-based code-quality.yml so the tooling
|
||||
# stays aligned with what `obsidianmd/eslint-plugin-obsidianmd` checks
|
||||
# against.
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["**"]
|
||||
paths:
|
||||
- "surfsense_obsidian/**"
|
||||
- ".github/workflows/obsidian-plugin-lint.yml"
|
||||
pull_request:
|
||||
branches: ["**"]
|
||||
paths:
|
||||
- "surfsense_obsidian/**"
|
||||
- ".github/workflows/obsidian-plugin-lint.yml"
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: surfsense_obsidian
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22.x
|
||||
cache: npm
|
||||
cache-dependency-path: surfsense_obsidian/package-lock.json
|
||||
|
||||
- run: npm ci
|
||||
- run: npm run lint
|
||||
- run: npm run build
|
||||
119
.github/workflows/release-obsidian-plugin.yml
vendored
Normal file
119
.github/workflows/release-obsidian-plugin.yml
vendored
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
name: Release Obsidian Plugin
|
||||
|
||||
# Tag format: `obsidian-v<version>` and `<version>` must match `surfsense_obsidian/manifest.json` exactly.
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "obsidian-v*"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
publish:
|
||||
description: "Publish to GitHub Releases"
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- never
|
||||
- always
|
||||
default: "never"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
build-and-release:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: surfsense_obsidian
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
# Need write access for the manifest/versions.json mirror commit
|
||||
# back to main further down.
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 22.x
|
||||
cache: npm
|
||||
cache-dependency-path: surfsense_obsidian/package-lock.json
|
||||
|
||||
- name: Resolve plugin version
|
||||
id: version
|
||||
run: |
|
||||
manifest_version=$(node -p "require('./manifest.json').version")
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
|
||||
# Manual runs derive the release version from manifest.json.
|
||||
version="$manifest_version"
|
||||
tag="obsidian-v$version"
|
||||
else
|
||||
tag="${GITHUB_REF_NAME}"
|
||||
if [ -z "$tag" ] || [[ "$tag" != obsidian-v* ]]; then
|
||||
echo "::error::Invalid tag '$tag'. Expected format: obsidian-v<version>"
|
||||
exit 1
|
||||
fi
|
||||
version="${tag#obsidian-v}"
|
||||
if [ "$version" != "$manifest_version" ]; then
|
||||
echo "::error::Tag version '$version' does not match manifest version '$manifest_version'"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
echo "tag=$tag" >> "$GITHUB_OUTPUT"
|
||||
echo "version=$version" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Resolve publish mode
|
||||
id: release_mode
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" = "push" ] || [ "${{ inputs.publish }}" = "always" ]; then
|
||||
echo "should_publish=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "should_publish=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- run: npm ci
|
||||
|
||||
- run: npm run lint
|
||||
|
||||
- run: npm run build
|
||||
|
||||
- name: Verify build artifacts
|
||||
run: |
|
||||
for f in main.js manifest.json styles.css; do
|
||||
test -f "$f" || (echo "::error::Missing release artifact: $f" && exit 1)
|
||||
done
|
||||
|
||||
- name: Mirror manifest.json + versions.json to repo root
|
||||
if: steps.release_mode.outputs.should_publish == 'true'
|
||||
working-directory: ${{ github.workspace }}
|
||||
run: |
|
||||
cp surfsense_obsidian/manifest.json manifest.json
|
||||
cp surfsense_obsidian/versions.json versions.json
|
||||
if git diff --quiet manifest.json versions.json; then
|
||||
echo "Root manifest/versions already up to date."
|
||||
exit 0
|
||||
fi
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git add manifest.json versions.json
|
||||
git commit -m "chore(obsidian-plugin): mirror manifest+versions for ${{ steps.version.outputs.tag }}"
|
||||
# Push to the default branch so Obsidian can fetch raw files from HEAD.
|
||||
if ! git push origin HEAD:${{ github.event.repository.default_branch }}; then
|
||||
echo "::warning::Failed to push mirrored manifest/versions to default branch (likely branch protection). Continuing release."
|
||||
fi
|
||||
|
||||
# Publish release under bare `manifest.json` version (no `obsidian-v` prefix) for BRAT/store compatibility.
|
||||
# `make_latest: "false"` keeps the desktop app's `v*` release headlined since Obsidian and BRAT resolve plugins via getReleaseByTag, not the latest flag.
|
||||
- name: Create GitHub release
|
||||
if: steps.release_mode.outputs.should_publish == 'true'
|
||||
uses: softprops/action-gh-release@v3
|
||||
with:
|
||||
tag_name: ${{ steps.version.outputs.version }}
|
||||
name: SurfSense Obsidian Plugin ${{ steps.version.outputs.version }}
|
||||
generate_release_notes: true
|
||||
make_latest: "false"
|
||||
files: |
|
||||
surfsense_obsidian/main.js
|
||||
surfsense_obsidian/manifest.json
|
||||
surfsense_obsidian/styles.css
|
||||
10
manifest.json
Normal file
10
manifest.json
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"id": "surfsense-obsidian",
|
||||
"name": "SurfSense",
|
||||
"version": "0.1.0",
|
||||
"minAppVersion": "1.5.4",
|
||||
"description": "Turn your vault into a searchable second brain with SurfSense.",
|
||||
"author": "SurfSense",
|
||||
"authorUrl": "https://www.surfsense.com",
|
||||
"isDesktopOnly": false
|
||||
}
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
"""129_obsidian_plugin_vault_identity
|
||||
|
||||
Revision ID: 129
|
||||
Revises: 128
|
||||
Create Date: 2026-04-21
|
||||
|
||||
Locks down vault identity for the Obsidian plugin connector:
|
||||
|
||||
- Deactivates pre-plugin OBSIDIAN_CONNECTOR rows.
|
||||
- Partial unique index on ``(user_id, (config->>'vault_id'))`` for the
|
||||
``/obsidian/connect`` upsert fast path.
|
||||
- Partial unique index on ``(user_id, (config->>'vault_fingerprint'))``
|
||||
so two devices observing the same vault content can never produce
|
||||
two connector rows. Collisions are caught by the route handler and
|
||||
routed through the merge path.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from alembic import op
|
||||
|
||||
revision: str = "129"
|
||||
down_revision: str | None = "128"
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
|
||||
conn.execute(
|
||||
sa.text(
|
||||
"""
|
||||
UPDATE search_source_connectors
|
||||
SET
|
||||
is_indexable = false,
|
||||
periodic_indexing_enabled = false,
|
||||
next_scheduled_at = NULL,
|
||||
config = COALESCE(config, '{}'::json)::jsonb
|
||||
|| jsonb_build_object(
|
||||
'legacy', true,
|
||||
'deactivated_at', to_char(
|
||||
now() AT TIME ZONE 'UTC',
|
||||
'YYYY-MM-DD"T"HH24:MI:SS"Z"'
|
||||
)
|
||||
)
|
||||
WHERE connector_type = 'OBSIDIAN_CONNECTOR'
|
||||
AND COALESCE((config::jsonb)->>'source', '') <> 'plugin'
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
conn.execute(
|
||||
sa.text(
|
||||
"""
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS
|
||||
search_source_connectors_obsidian_plugin_vault_uniq
|
||||
ON search_source_connectors (user_id, ((config->>'vault_id')))
|
||||
WHERE connector_type = 'OBSIDIAN_CONNECTOR'
|
||||
AND config->>'source' = 'plugin'
|
||||
AND config->>'vault_id' IS NOT NULL
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
conn.execute(
|
||||
sa.text(
|
||||
"""
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS
|
||||
search_source_connectors_obsidian_plugin_fingerprint_uniq
|
||||
ON search_source_connectors (user_id, ((config->>'vault_fingerprint')))
|
||||
WHERE connector_type = 'OBSIDIAN_CONNECTOR'
|
||||
AND config->>'source' = 'plugin'
|
||||
AND config->>'vault_fingerprint' IS NOT NULL
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
conn.execute(
|
||||
sa.text(
|
||||
"DROP INDEX IF EXISTS "
|
||||
"search_source_connectors_obsidian_plugin_fingerprint_uniq"
|
||||
)
|
||||
)
|
||||
conn.execute(
|
||||
sa.text(
|
||||
"DROP INDEX IF EXISTS search_source_connectors_obsidian_plugin_vault_uniq"
|
||||
)
|
||||
)
|
||||
conn.execute(
|
||||
sa.text(
|
||||
"""
|
||||
UPDATE search_source_connectors
|
||||
SET config = (config::jsonb - 'legacy' - 'deactivated_at')::json
|
||||
WHERE connector_type = 'OBSIDIAN_CONNECTOR'
|
||||
AND (config::jsonb) ? 'legacy'
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
|
@ -90,6 +90,7 @@ celery_app = Celery(
|
|||
"app.tasks.celery_tasks.podcast_tasks",
|
||||
"app.tasks.celery_tasks.video_presentation_tasks",
|
||||
"app.tasks.celery_tasks.connector_tasks",
|
||||
"app.tasks.celery_tasks.obsidian_tasks",
|
||||
"app.tasks.celery_tasks.schedule_checker_task",
|
||||
"app.tasks.celery_tasks.document_reindex_tasks",
|
||||
"app.tasks.celery_tasks.stale_notification_cleanup_task",
|
||||
|
|
@ -144,8 +145,8 @@ celery_app.conf.update(
|
|||
"index_elasticsearch_documents": {"queue": CONNECTORS_QUEUE},
|
||||
"index_crawled_urls": {"queue": CONNECTORS_QUEUE},
|
||||
"index_bookstack_pages": {"queue": CONNECTORS_QUEUE},
|
||||
"index_obsidian_vault": {"queue": CONNECTORS_QUEUE},
|
||||
"index_composio_connector": {"queue": CONNECTORS_QUEUE},
|
||||
"index_obsidian_attachment": {"queue": CONNECTORS_QUEUE},
|
||||
# Everything else (document processing, podcasts, reindexing,
|
||||
# schedule checker, cleanup) stays on the default fast queue.
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1510,6 +1510,31 @@ class SearchSourceConnector(BaseModel, TimestampMixin):
|
|||
"name",
|
||||
name="uq_searchspace_user_connector_type_name",
|
||||
),
|
||||
# Mirrors migration 129; backs the ``/obsidian/connect`` upsert.
|
||||
Index(
|
||||
"search_source_connectors_obsidian_plugin_vault_uniq",
|
||||
"user_id",
|
||||
text("(config->>'vault_id')"),
|
||||
unique=True,
|
||||
postgresql_where=text(
|
||||
"connector_type = 'OBSIDIAN_CONNECTOR' "
|
||||
"AND config->>'source' = 'plugin' "
|
||||
"AND config->>'vault_id' IS NOT NULL"
|
||||
),
|
||||
),
|
||||
# Cross-device dedup: same vault content from different devices
|
||||
# cannot produce two connector rows.
|
||||
Index(
|
||||
"search_source_connectors_obsidian_plugin_fingerprint_uniq",
|
||||
"user_id",
|
||||
text("(config->>'vault_fingerprint')"),
|
||||
unique=True,
|
||||
postgresql_where=text(
|
||||
"connector_type = 'OBSIDIAN_CONNECTOR' "
|
||||
"AND config->>'source' = 'plugin' "
|
||||
"AND config->>'vault_fingerprint' IS NOT NULL"
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
name = Column(String(100), nullable=False, index=True)
|
||||
|
|
|
|||
|
|
@ -38,6 +38,7 @@ from .new_llm_config_routes import router as new_llm_config_router
|
|||
from .notes_routes import router as notes_router
|
||||
from .notifications_routes import router as notifications_router
|
||||
from .notion_add_connector_route import router as notion_add_connector_router
|
||||
from .obsidian_plugin_routes import router as obsidian_plugin_router
|
||||
from .onedrive_add_connector_route import router as onedrive_add_connector_router
|
||||
from .podcasts_routes import router as podcasts_router
|
||||
from .prompts_routes import router as prompts_router
|
||||
|
|
@ -85,6 +86,7 @@ router.include_router(notion_add_connector_router)
|
|||
router.include_router(slack_add_connector_router)
|
||||
router.include_router(teams_add_connector_router)
|
||||
router.include_router(onedrive_add_connector_router)
|
||||
router.include_router(obsidian_plugin_router) # Obsidian plugin push API
|
||||
router.include_router(discord_add_connector_router)
|
||||
router.include_router(jira_add_connector_router)
|
||||
router.include_router(confluence_add_connector_router)
|
||||
|
|
|
|||
706
surfsense_backend/app/routes/obsidian_plugin_routes.py
Normal file
706
surfsense_backend/app/routes/obsidian_plugin_routes.py
Normal file
|
|
@ -0,0 +1,706 @@
|
|||
"""Obsidian plugin ingestion routes (``/api/v1/obsidian/*``).
|
||||
|
||||
Wire surface for the ``surfsense_obsidian/`` plugin. Versioning anchor is
|
||||
the ``/api/v1/`` URL prefix; additive feature detection rides the
|
||||
``capabilities`` array on /health and /connect.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from sqlalchemy import and_, case, func
|
||||
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.db import (
|
||||
Document,
|
||||
DocumentType,
|
||||
SearchSourceConnector,
|
||||
SearchSourceConnectorType,
|
||||
SearchSpace,
|
||||
User,
|
||||
get_async_session,
|
||||
)
|
||||
from app.schemas.obsidian_plugin import (
|
||||
ALLOWED_ATTACHMENT_EXTENSIONS,
|
||||
ATTACHMENT_MIME_TYPES,
|
||||
ConnectRequest,
|
||||
ConnectResponse,
|
||||
DeleteAck,
|
||||
DeleteAckItem,
|
||||
DeleteBatchRequest,
|
||||
HealthResponse,
|
||||
ManifestResponse,
|
||||
RenameAck,
|
||||
RenameAckItem,
|
||||
RenameBatchRequest,
|
||||
StatsResponse,
|
||||
SyncAck,
|
||||
SyncAckItem,
|
||||
SyncBatchRequest,
|
||||
)
|
||||
from app.services.notification_service import NotificationService
|
||||
from app.services.obsidian_plugin_indexer import (
|
||||
delete_note,
|
||||
get_manifest,
|
||||
merge_obsidian_connectors,
|
||||
rename_note,
|
||||
upsert_note,
|
||||
)
|
||||
from app.tasks.celery_tasks.obsidian_tasks import index_obsidian_attachment_task
|
||||
from app.users import current_active_user
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/obsidian", tags=["obsidian-plugin"])
|
||||
|
||||
|
||||
# Plugins feature-gate on these. Add entries, never rename or remove.
|
||||
OBSIDIAN_CAPABILITIES: list[str] = ["sync", "rename", "delete", "manifest", "stats"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _build_handshake() -> dict[str, object]:
|
||||
return {"capabilities": list(OBSIDIAN_CAPABILITIES)}
|
||||
|
||||
|
||||
def _connector_type_value(connector: SearchSourceConnector) -> str:
|
||||
connector_type = connector.connector_type
|
||||
if hasattr(connector_type, "value"):
|
||||
return str(connector_type.value)
|
||||
return str(connector_type)
|
||||
|
||||
|
||||
async def _start_obsidian_sync_notification(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
user: User,
|
||||
connector: SearchSourceConnector,
|
||||
total_count: int,
|
||||
):
|
||||
"""Create/update the rolling inbox item for Obsidian plugin sync.
|
||||
|
||||
Obsidian sync is continuous and batched, so we keep one stable
|
||||
operation_id per connector instead of creating a new notification per batch.
|
||||
"""
|
||||
handler = NotificationService.connector_indexing
|
||||
operation_id = f"obsidian_sync_connector_{connector.id}"
|
||||
connector_name = connector.name or "Obsidian"
|
||||
notification = await handler.find_or_create_notification(
|
||||
session=session,
|
||||
user_id=user.id,
|
||||
operation_id=operation_id,
|
||||
title=f"Syncing: {connector_name}",
|
||||
message="Syncing from Obsidian plugin",
|
||||
search_space_id=connector.search_space_id,
|
||||
initial_metadata={
|
||||
"connector_id": connector.id,
|
||||
"connector_name": connector_name,
|
||||
"connector_type": _connector_type_value(connector),
|
||||
"sync_stage": "processing",
|
||||
"indexed_count": 0,
|
||||
"failed_count": 0,
|
||||
"total_count": total_count,
|
||||
"source": "obsidian_plugin",
|
||||
},
|
||||
)
|
||||
return await handler.update_notification(
|
||||
session=session,
|
||||
notification=notification,
|
||||
status="in_progress",
|
||||
metadata_updates={
|
||||
"sync_stage": "processing",
|
||||
"total_count": total_count,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def _finish_obsidian_sync_notification(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
notification,
|
||||
indexed: int,
|
||||
failed: int,
|
||||
):
|
||||
"""Mark the rolling Obsidian sync inbox item complete or failed."""
|
||||
handler = NotificationService.connector_indexing
|
||||
connector_name = notification.notification_metadata.get(
|
||||
"connector_name", "Obsidian"
|
||||
)
|
||||
if failed > 0 and indexed == 0:
|
||||
title = f"Failed: {connector_name}"
|
||||
message = (
|
||||
f"Sync failed: {failed} file(s) failed"
|
||||
if failed > 1
|
||||
else "Sync failed: 1 file failed"
|
||||
)
|
||||
status_value = "failed"
|
||||
stage = "failed"
|
||||
else:
|
||||
title = f"Ready: {connector_name}"
|
||||
if failed > 0:
|
||||
message = f"Partially synced: {indexed} file(s) synced, {failed} failed."
|
||||
elif indexed == 0:
|
||||
message = "Already up to date!"
|
||||
elif indexed == 1:
|
||||
message = "Now searchable! 1 file synced."
|
||||
else:
|
||||
message = f"Now searchable! {indexed} files synced."
|
||||
status_value = "completed"
|
||||
stage = "completed"
|
||||
|
||||
await handler.update_notification(
|
||||
session=session,
|
||||
notification=notification,
|
||||
title=title,
|
||||
message=message,
|
||||
status=status_value,
|
||||
metadata_updates={
|
||||
"indexed_count": indexed,
|
||||
"failed_count": failed,
|
||||
"sync_stage": stage,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def _resolve_vault_connector(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
user: User,
|
||||
vault_id: str,
|
||||
) -> SearchSourceConnector:
|
||||
"""Find the OBSIDIAN_CONNECTOR row that owns ``vault_id`` for this user."""
|
||||
# ``config`` is core ``JSON`` (not ``JSONB``); ``as_string()`` is the
|
||||
# cross-dialect equivalent of ``.astext`` and compiles to ``->>``.
|
||||
stmt = select(SearchSourceConnector).where(
|
||||
and_(
|
||||
SearchSourceConnector.user_id == user.id,
|
||||
SearchSourceConnector.connector_type
|
||||
== SearchSourceConnectorType.OBSIDIAN_CONNECTOR,
|
||||
SearchSourceConnector.config["vault_id"].as_string() == vault_id,
|
||||
SearchSourceConnector.config["source"].as_string() == "plugin",
|
||||
)
|
||||
)
|
||||
|
||||
connector = (await session.execute(stmt)).scalars().first()
|
||||
if connector is not None:
|
||||
return connector
|
||||
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail={
|
||||
"code": "VAULT_NOT_REGISTERED",
|
||||
"message": (
|
||||
"No Obsidian plugin connector found for this vault. "
|
||||
"Call POST /obsidian/connect first."
|
||||
),
|
||||
"vault_id": vault_id,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def _queue_obsidian_attachment(
|
||||
*, connector_id: int, note_payload: dict, user_id: str
|
||||
) -> None:
|
||||
"""Enqueue one non-markdown Obsidian note for background ETL/indexing."""
|
||||
index_obsidian_attachment_task.delay(
|
||||
connector_id=connector_id,
|
||||
payload_data=note_payload,
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
|
||||
async def _ensure_search_space_access(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
user: User,
|
||||
search_space_id: int,
|
||||
) -> SearchSpace:
|
||||
"""Owner-only access to the search space (shared spaces are a follow-up)."""
|
||||
result = await session.execute(
|
||||
select(SearchSpace).where(
|
||||
and_(SearchSpace.id == search_space_id, SearchSpace.user_id == user.id)
|
||||
)
|
||||
)
|
||||
space = result.scalars().first()
|
||||
if space is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail={
|
||||
"code": "SEARCH_SPACE_FORBIDDEN",
|
||||
"message": "You don't own that search space.",
|
||||
},
|
||||
)
|
||||
return space
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Endpoints
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@router.get("/health", response_model=HealthResponse)
|
||||
async def obsidian_health(
|
||||
user: User = Depends(current_active_user),
|
||||
) -> HealthResponse:
|
||||
"""Return the API contract handshake; plugin caches it per onload."""
|
||||
return HealthResponse(
|
||||
**_build_handshake(),
|
||||
server_time_utc=datetime.now(UTC),
|
||||
)
|
||||
|
||||
|
||||
async def _find_by_vault_id(
|
||||
session: AsyncSession, *, user_id, vault_id: str
|
||||
) -> SearchSourceConnector | None:
|
||||
stmt = select(SearchSourceConnector).where(
|
||||
and_(
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
SearchSourceConnector.connector_type
|
||||
== SearchSourceConnectorType.OBSIDIAN_CONNECTOR,
|
||||
SearchSourceConnector.config["source"].as_string() == "plugin",
|
||||
SearchSourceConnector.config["vault_id"].as_string() == vault_id,
|
||||
)
|
||||
)
|
||||
return (await session.execute(stmt)).scalars().first()
|
||||
|
||||
|
||||
async def _find_by_fingerprint(
|
||||
session: AsyncSession, *, user_id, vault_fingerprint: str
|
||||
) -> SearchSourceConnector | None:
|
||||
stmt = select(SearchSourceConnector).where(
|
||||
and_(
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
SearchSourceConnector.connector_type
|
||||
== SearchSourceConnectorType.OBSIDIAN_CONNECTOR,
|
||||
SearchSourceConnector.config["source"].as_string() == "plugin",
|
||||
SearchSourceConnector.config["vault_fingerprint"].as_string()
|
||||
== vault_fingerprint,
|
||||
)
|
||||
)
|
||||
return (await session.execute(stmt)).scalars().first()
|
||||
|
||||
|
||||
def _build_config(payload: ConnectRequest, *, now_iso: str) -> dict[str, object]:
|
||||
return {
|
||||
"vault_id": payload.vault_id,
|
||||
"vault_name": payload.vault_name,
|
||||
"vault_fingerprint": payload.vault_fingerprint,
|
||||
"source": "plugin",
|
||||
"last_connect_at": now_iso,
|
||||
}
|
||||
|
||||
|
||||
def _display_name(vault_name: str) -> str:
|
||||
return f"Obsidian - {vault_name}"
|
||||
|
||||
|
||||
@router.post("/connect", response_model=ConnectResponse)
|
||||
async def obsidian_connect(
|
||||
payload: ConnectRequest,
|
||||
user: User = Depends(current_active_user),
|
||||
session: AsyncSession = Depends(get_async_session),
|
||||
) -> ConnectResponse:
|
||||
"""Register a vault, refresh an existing one, or adopt another device's row.
|
||||
|
||||
Resolution order:
|
||||
1. ``(user_id, vault_id)`` → known device, refresh metadata.
|
||||
2. ``(user_id, vault_fingerprint)`` → another device of the same vault,
|
||||
caller adopts the surviving ``vault_id``.
|
||||
3. Insert a new row.
|
||||
|
||||
Fingerprint collisions on (1) trigger ``merge_obsidian_connectors`` so
|
||||
the partial unique index can never produce two live rows for one vault.
|
||||
"""
|
||||
await _ensure_search_space_access(
|
||||
session, user=user, search_space_id=payload.search_space_id
|
||||
)
|
||||
|
||||
now_iso = datetime.now(UTC).isoformat()
|
||||
cfg = _build_config(payload, now_iso=now_iso)
|
||||
display_name = _display_name(payload.vault_name)
|
||||
|
||||
existing_by_vid = await _find_by_vault_id(
|
||||
session, user_id=user.id, vault_id=payload.vault_id
|
||||
)
|
||||
if existing_by_vid is not None:
|
||||
collision = await _find_by_fingerprint(
|
||||
session, user_id=user.id, vault_fingerprint=payload.vault_fingerprint
|
||||
)
|
||||
if collision is not None and collision.id != existing_by_vid.id:
|
||||
await merge_obsidian_connectors(
|
||||
session, source=existing_by_vid, target=collision
|
||||
)
|
||||
collision_cfg = dict(collision.config or {})
|
||||
collision_cfg["vault_name"] = payload.vault_name
|
||||
collision_cfg["last_connect_at"] = now_iso
|
||||
collision.config = collision_cfg
|
||||
collision.name = _display_name(payload.vault_name)
|
||||
response = ConnectResponse(
|
||||
connector_id=collision.id,
|
||||
vault_id=collision_cfg["vault_id"],
|
||||
search_space_id=collision.search_space_id,
|
||||
server_time_utc=datetime.now(UTC),
|
||||
**_build_handshake(),
|
||||
)
|
||||
await session.commit()
|
||||
return response
|
||||
|
||||
existing_by_vid.name = display_name
|
||||
existing_by_vid.config = cfg
|
||||
existing_by_vid.search_space_id = payload.search_space_id
|
||||
existing_by_vid.is_indexable = False
|
||||
response = ConnectResponse(
|
||||
connector_id=existing_by_vid.id,
|
||||
vault_id=payload.vault_id,
|
||||
search_space_id=existing_by_vid.search_space_id,
|
||||
server_time_utc=datetime.now(UTC),
|
||||
**_build_handshake(),
|
||||
)
|
||||
await session.commit()
|
||||
return response
|
||||
|
||||
existing_by_fp = await _find_by_fingerprint(
|
||||
session, user_id=user.id, vault_fingerprint=payload.vault_fingerprint
|
||||
)
|
||||
if existing_by_fp is not None:
|
||||
survivor_cfg = dict(existing_by_fp.config or {})
|
||||
survivor_cfg["vault_name"] = payload.vault_name
|
||||
survivor_cfg["last_connect_at"] = now_iso
|
||||
existing_by_fp.config = survivor_cfg
|
||||
existing_by_fp.name = display_name
|
||||
response = ConnectResponse(
|
||||
connector_id=existing_by_fp.id,
|
||||
vault_id=survivor_cfg["vault_id"],
|
||||
search_space_id=existing_by_fp.search_space_id,
|
||||
server_time_utc=datetime.now(UTC),
|
||||
**_build_handshake(),
|
||||
)
|
||||
await session.commit()
|
||||
return response
|
||||
|
||||
# ON CONFLICT DO NOTHING matches any unique index (vault_id OR
|
||||
# fingerprint), so concurrent first-time connects from two devices
|
||||
# of the same vault never raise IntegrityError — the loser just
|
||||
# gets an empty RETURNING and falls through to re-fetch the winner.
|
||||
insert_stmt = (
|
||||
pg_insert(SearchSourceConnector)
|
||||
.values(
|
||||
name=display_name,
|
||||
connector_type=SearchSourceConnectorType.OBSIDIAN_CONNECTOR,
|
||||
is_indexable=False,
|
||||
config=cfg,
|
||||
user_id=user.id,
|
||||
search_space_id=payload.search_space_id,
|
||||
)
|
||||
.on_conflict_do_nothing()
|
||||
.returning(
|
||||
SearchSourceConnector.id,
|
||||
SearchSourceConnector.search_space_id,
|
||||
)
|
||||
)
|
||||
inserted = (await session.execute(insert_stmt)).first()
|
||||
if inserted is not None:
|
||||
response = ConnectResponse(
|
||||
connector_id=inserted.id,
|
||||
vault_id=payload.vault_id,
|
||||
search_space_id=inserted.search_space_id,
|
||||
server_time_utc=datetime.now(UTC),
|
||||
**_build_handshake(),
|
||||
)
|
||||
await session.commit()
|
||||
return response
|
||||
|
||||
winner = await _find_by_fingerprint(
|
||||
session, user_id=user.id, vault_fingerprint=payload.vault_fingerprint
|
||||
)
|
||||
if winner is None:
|
||||
winner = await _find_by_vault_id(
|
||||
session, user_id=user.id, vault_id=payload.vault_id
|
||||
)
|
||||
if winner is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail="vault registration conflicted but winning row could not be located",
|
||||
)
|
||||
response = ConnectResponse(
|
||||
connector_id=winner.id,
|
||||
vault_id=(winner.config or {})["vault_id"],
|
||||
search_space_id=winner.search_space_id,
|
||||
server_time_utc=datetime.now(UTC),
|
||||
**_build_handshake(),
|
||||
)
|
||||
await session.commit()
|
||||
return response
|
||||
|
||||
|
||||
@router.post("/sync", response_model=SyncAck)
|
||||
async def obsidian_sync(
|
||||
payload: SyncBatchRequest,
|
||||
user: User = Depends(current_active_user),
|
||||
session: AsyncSession = Depends(get_async_session),
|
||||
) -> SyncAck:
|
||||
"""Batch-upsert notes; returns per-note ack so the plugin can dequeue/retry."""
|
||||
connector = await _resolve_vault_connector(
|
||||
session, user=user, vault_id=payload.vault_id
|
||||
)
|
||||
notification = None
|
||||
try:
|
||||
notification = await _start_obsidian_sync_notification(
|
||||
session, user=user, connector=connector, total_count=len(payload.notes)
|
||||
)
|
||||
except Exception:
|
||||
logger.warning(
|
||||
"obsidian sync notification start failed connector=%s user=%s",
|
||||
connector.id,
|
||||
user.id,
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
items: list[SyncAckItem] = []
|
||||
indexed = 0
|
||||
failed = 0
|
||||
|
||||
for note in payload.notes:
|
||||
try:
|
||||
if note.is_binary:
|
||||
ext = note.extension.lstrip(".").lower()
|
||||
if ext not in ALLOWED_ATTACHMENT_EXTENSIONS:
|
||||
failed += 1
|
||||
items.append(
|
||||
SyncAckItem(
|
||||
path=note.path,
|
||||
status="error",
|
||||
error=f"unsupported attachment extension: .{ext}",
|
||||
)
|
||||
)
|
||||
continue
|
||||
expected_mime = ATTACHMENT_MIME_TYPES[ext]
|
||||
if note.mime_type != expected_mime:
|
||||
failed += 1
|
||||
items.append(
|
||||
SyncAckItem(
|
||||
path=note.path,
|
||||
status="error",
|
||||
error=(
|
||||
f"mime_type '{note.mime_type}' does not match "
|
||||
f"extension .{ext}"
|
||||
),
|
||||
)
|
||||
)
|
||||
continue
|
||||
_queue_obsidian_attachment(
|
||||
connector_id=connector.id,
|
||||
note_payload=note.model_dump(mode="json"),
|
||||
user_id=str(user.id),
|
||||
)
|
||||
indexed += 1
|
||||
items.append(SyncAckItem(path=note.path, status="queued"))
|
||||
continue
|
||||
|
||||
doc = await upsert_note(
|
||||
session, connector=connector, payload=note, user_id=str(user.id)
|
||||
)
|
||||
indexed += 1
|
||||
items.append(SyncAckItem(path=note.path, status="ok", document_id=doc.id))
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as exc:
|
||||
failed += 1
|
||||
logger.exception(
|
||||
"obsidian /sync failed for path=%s vault=%s",
|
||||
note.path,
|
||||
payload.vault_id,
|
||||
)
|
||||
items.append(
|
||||
SyncAckItem(path=note.path, status="error", error=str(exc)[:300])
|
||||
)
|
||||
|
||||
if notification is not None:
|
||||
try:
|
||||
await _finish_obsidian_sync_notification(
|
||||
session,
|
||||
notification=notification,
|
||||
indexed=indexed,
|
||||
failed=failed,
|
||||
)
|
||||
except Exception:
|
||||
logger.warning(
|
||||
"obsidian sync notification finish failed connector=%s user=%s",
|
||||
connector.id,
|
||||
user.id,
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
return SyncAck(
|
||||
vault_id=payload.vault_id,
|
||||
indexed=indexed,
|
||||
failed=failed,
|
||||
items=items,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/rename", response_model=RenameAck)
|
||||
async def obsidian_rename(
|
||||
payload: RenameBatchRequest,
|
||||
user: User = Depends(current_active_user),
|
||||
session: AsyncSession = Depends(get_async_session),
|
||||
) -> RenameAck:
|
||||
"""Apply a batch of vault rename events."""
|
||||
connector = await _resolve_vault_connector(
|
||||
session, user=user, vault_id=payload.vault_id
|
||||
)
|
||||
|
||||
items: list[RenameAckItem] = []
|
||||
renamed = 0
|
||||
missing = 0
|
||||
|
||||
for item in payload.renames:
|
||||
try:
|
||||
doc = await rename_note(
|
||||
session,
|
||||
connector=connector,
|
||||
old_path=item.old_path,
|
||||
new_path=item.new_path,
|
||||
vault_id=payload.vault_id,
|
||||
)
|
||||
if doc is None:
|
||||
missing += 1
|
||||
items.append(
|
||||
RenameAckItem(
|
||||
old_path=item.old_path,
|
||||
new_path=item.new_path,
|
||||
status="missing",
|
||||
)
|
||||
)
|
||||
else:
|
||||
renamed += 1
|
||||
items.append(
|
||||
RenameAckItem(
|
||||
old_path=item.old_path,
|
||||
new_path=item.new_path,
|
||||
status="ok",
|
||||
document_id=doc.id,
|
||||
)
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.exception(
|
||||
"obsidian /rename failed for old=%s new=%s vault=%s",
|
||||
item.old_path,
|
||||
item.new_path,
|
||||
payload.vault_id,
|
||||
)
|
||||
items.append(
|
||||
RenameAckItem(
|
||||
old_path=item.old_path,
|
||||
new_path=item.new_path,
|
||||
status="error",
|
||||
error=str(exc)[:300],
|
||||
)
|
||||
)
|
||||
|
||||
return RenameAck(
|
||||
vault_id=payload.vault_id,
|
||||
renamed=renamed,
|
||||
missing=missing,
|
||||
items=items,
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/notes", response_model=DeleteAck)
|
||||
async def obsidian_delete_notes(
|
||||
payload: DeleteBatchRequest,
|
||||
user: User = Depends(current_active_user),
|
||||
session: AsyncSession = Depends(get_async_session),
|
||||
) -> DeleteAck:
|
||||
"""Soft-delete a batch of notes by vault-relative path."""
|
||||
connector = await _resolve_vault_connector(
|
||||
session, user=user, vault_id=payload.vault_id
|
||||
)
|
||||
|
||||
deleted = 0
|
||||
missing = 0
|
||||
items: list[DeleteAckItem] = []
|
||||
for path in payload.paths:
|
||||
try:
|
||||
ok = await delete_note(
|
||||
session,
|
||||
connector=connector,
|
||||
vault_id=payload.vault_id,
|
||||
path=path,
|
||||
)
|
||||
if ok:
|
||||
deleted += 1
|
||||
items.append(DeleteAckItem(path=path, status="ok"))
|
||||
else:
|
||||
missing += 1
|
||||
items.append(DeleteAckItem(path=path, status="missing"))
|
||||
except Exception as exc:
|
||||
logger.exception(
|
||||
"obsidian DELETE /notes failed for path=%s vault=%s",
|
||||
path,
|
||||
payload.vault_id,
|
||||
)
|
||||
items.append(DeleteAckItem(path=path, status="error", error=str(exc)[:300]))
|
||||
|
||||
return DeleteAck(
|
||||
vault_id=payload.vault_id,
|
||||
deleted=deleted,
|
||||
missing=missing,
|
||||
items=items,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/manifest", response_model=ManifestResponse)
|
||||
async def obsidian_manifest(
|
||||
vault_id: str = Query(..., description="Plugin-side stable vault UUID"),
|
||||
user: User = Depends(current_active_user),
|
||||
session: AsyncSession = Depends(get_async_session),
|
||||
) -> ManifestResponse:
|
||||
"""Return ``{path: {hash, mtime}}`` for the plugin's onload reconcile diff."""
|
||||
connector = await _resolve_vault_connector(session, user=user, vault_id=vault_id)
|
||||
return await get_manifest(session, connector=connector, vault_id=vault_id)
|
||||
|
||||
|
||||
@router.get("/stats", response_model=StatsResponse)
|
||||
async def obsidian_stats(
|
||||
vault_id: str = Query(..., description="Plugin-side stable vault UUID"),
|
||||
user: User = Depends(current_active_user),
|
||||
session: AsyncSession = Depends(get_async_session),
|
||||
) -> StatsResponse:
|
||||
"""Active-note count + last sync time for the web tile.
|
||||
|
||||
``files_synced`` excludes tombstones so it matches ``/manifest``;
|
||||
``last_sync_at`` includes them so deletes advance the freshness signal.
|
||||
"""
|
||||
connector = await _resolve_vault_connector(session, user=user, vault_id=vault_id)
|
||||
|
||||
is_active = Document.document_metadata["deleted_at"].as_string().is_(None)
|
||||
|
||||
row = (
|
||||
await session.execute(
|
||||
select(
|
||||
func.count(case((is_active, 1))).label("files_synced"),
|
||||
func.max(Document.updated_at).label("last_sync_at"),
|
||||
).where(
|
||||
and_(
|
||||
Document.connector_id == connector.id,
|
||||
Document.document_type == DocumentType.OBSIDIAN_CONNECTOR,
|
||||
)
|
||||
)
|
||||
)
|
||||
).first()
|
||||
|
||||
return StatsResponse(
|
||||
vault_id=vault_id,
|
||||
files_synced=int(row[0] or 0),
|
||||
last_sync_at=row[1],
|
||||
)
|
||||
|
|
@ -1058,25 +1058,6 @@ async def index_connector_content(
|
|||
)
|
||||
response_message = "Web page indexing started in the background."
|
||||
|
||||
elif connector.connector_type == SearchSourceConnectorType.OBSIDIAN_CONNECTOR:
|
||||
from app.config import config as app_config
|
||||
from app.tasks.celery_tasks.connector_tasks import index_obsidian_vault_task
|
||||
|
||||
# Obsidian connector only available in self-hosted mode
|
||||
if not app_config.is_self_hosted():
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Obsidian connector is only available in self-hosted mode",
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Triggering Obsidian vault indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}"
|
||||
)
|
||||
index_obsidian_vault_task.delay(
|
||||
connector_id, search_space_id, str(user.id), indexing_from, indexing_to
|
||||
)
|
||||
response_message = "Obsidian vault indexing started in the background."
|
||||
|
||||
elif (
|
||||
connector.connector_type
|
||||
== SearchSourceConnectorType.COMPOSIO_GOOGLE_DRIVE_CONNECTOR
|
||||
|
|
@ -2549,59 +2530,6 @@ async def run_bookstack_indexing(
|
|||
)
|
||||
|
||||
|
||||
# Add new helper functions for Obsidian indexing
|
||||
async def run_obsidian_indexing_with_new_session(
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
):
|
||||
"""Wrapper to run Obsidian indexing with its own database session."""
|
||||
logger.info(
|
||||
f"Background task started: Indexing Obsidian connector {connector_id} into space {search_space_id} from {start_date} to {end_date}"
|
||||
)
|
||||
async with async_session_maker() as session:
|
||||
await run_obsidian_indexing(
|
||||
session, connector_id, search_space_id, user_id, start_date, end_date
|
||||
)
|
||||
logger.info(f"Background task finished: Indexing Obsidian connector {connector_id}")
|
||||
|
||||
|
||||
async def run_obsidian_indexing(
|
||||
session: AsyncSession,
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
):
|
||||
"""
|
||||
Background task to run Obsidian vault indexing.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
connector_id: ID of the Obsidian connector
|
||||
search_space_id: ID of the search space
|
||||
user_id: ID of the user
|
||||
start_date: Start date for indexing
|
||||
end_date: End date for indexing
|
||||
"""
|
||||
from app.tasks.connector_indexers import index_obsidian_vault
|
||||
|
||||
await _run_indexing_with_notifications(
|
||||
session=session,
|
||||
connector_id=connector_id,
|
||||
search_space_id=search_space_id,
|
||||
user_id=user_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
indexing_function=index_obsidian_vault,
|
||||
update_timestamp_func=_update_connector_timestamp_by_id,
|
||||
supports_heartbeat_callback=True,
|
||||
)
|
||||
|
||||
|
||||
async def run_composio_indexing_with_new_session(
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
|
|
|
|||
|
|
@ -1,59 +0,0 @@
|
|||
"""
|
||||
Obsidian Connector Credentials Schema.
|
||||
|
||||
Obsidian is a local-first note-taking app that stores notes as markdown files.
|
||||
This connector supports indexing from local file system (self-hosted only).
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel, field_validator
|
||||
|
||||
|
||||
class ObsidianAuthCredentialsBase(BaseModel):
|
||||
"""
|
||||
Credentials/configuration for the Obsidian connector.
|
||||
|
||||
Since Obsidian vaults are local directories, this schema primarily
|
||||
holds the vault path and configuration options rather than API tokens.
|
||||
"""
|
||||
|
||||
vault_path: str
|
||||
vault_name: str | None = None
|
||||
exclude_folders: list[str] | None = None
|
||||
include_attachments: bool = False
|
||||
|
||||
@field_validator("vault_path")
|
||||
@classmethod
|
||||
def validate_vault_path(cls, v: str) -> str:
|
||||
"""Ensure vault path is provided and stripped of whitespace."""
|
||||
if not v or not v.strip():
|
||||
raise ValueError("Vault path is required")
|
||||
return v.strip()
|
||||
|
||||
@field_validator("exclude_folders", mode="before")
|
||||
@classmethod
|
||||
def parse_exclude_folders(cls, v):
|
||||
"""Parse exclude_folders from string if needed."""
|
||||
if v is None:
|
||||
return [".trash", ".obsidian", "templates"]
|
||||
if isinstance(v, str):
|
||||
return [f.strip() for f in v.split(",") if f.strip()]
|
||||
return v
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""Convert credentials to dictionary for storage."""
|
||||
return {
|
||||
"vault_path": self.vault_path,
|
||||
"vault_name": self.vault_name,
|
||||
"exclude_folders": self.exclude_folders,
|
||||
"include_attachments": self.include_attachments,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> "ObsidianAuthCredentialsBase":
|
||||
"""Create credentials from dictionary."""
|
||||
return cls(
|
||||
vault_path=data.get("vault_path", ""),
|
||||
vault_name=data.get("vault_name"),
|
||||
exclude_folders=data.get("exclude_folders"),
|
||||
include_attachments=data.get("include_attachments", False),
|
||||
)
|
||||
234
surfsense_backend/app/schemas/obsidian_plugin.py
Normal file
234
surfsense_backend/app/schemas/obsidian_plugin.py
Normal file
|
|
@ -0,0 +1,234 @@
|
|||
"""Wire schemas spoken between the SurfSense Obsidian plugin and the backend.
|
||||
|
||||
All schemas inherit ``extra='ignore'`` from :class:`_PluginBase` so additive
|
||||
field changes never break either side; hard breaks live behind a new URL
|
||||
prefix (``/api/v2/...``).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any, Literal
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field, model_validator
|
||||
|
||||
_PLUGIN_MODEL_CONFIG = ConfigDict(extra="ignore")
|
||||
|
||||
|
||||
# Source of truth for the attachment whitelist. Mirrors MIME_BY_EXTENSION in
|
||||
# surfsense_obsidian/src/sync-engine.ts — keep in sync.
|
||||
ATTACHMENT_MIME_TYPES: dict[str, str] = {
|
||||
"pdf": "application/pdf",
|
||||
"png": "image/png",
|
||||
"jpg": "image/jpeg",
|
||||
"jpeg": "image/jpeg",
|
||||
"gif": "image/gif",
|
||||
"webp": "image/webp",
|
||||
"svg": "image/svg+xml",
|
||||
"txt": "text/plain",
|
||||
}
|
||||
ALLOWED_ATTACHMENT_EXTENSIONS: frozenset[str] = frozenset(ATTACHMENT_MIME_TYPES)
|
||||
|
||||
|
||||
class _PluginBase(BaseModel):
|
||||
"""Base schema carrying the shared forward-compatibility config."""
|
||||
|
||||
model_config = _PLUGIN_MODEL_CONFIG
|
||||
|
||||
|
||||
class HeadingRef(_PluginBase):
|
||||
"""One markdown heading extracted from Obsidian metadata cache."""
|
||||
|
||||
heading: str
|
||||
level: int = Field(ge=1, le=6)
|
||||
|
||||
|
||||
class NotePayload(_PluginBase):
|
||||
"""One Obsidian note as pushed by the plugin (the source of truth)."""
|
||||
|
||||
vault_id: str = Field(
|
||||
..., description="Stable plugin-generated UUID for this vault"
|
||||
)
|
||||
path: str = Field(..., description="Vault-relative path, e.g. 'notes/foo.md'")
|
||||
name: str = Field(..., description="File stem (no extension)")
|
||||
extension: str = Field(
|
||||
default="md", description="File extension without leading dot"
|
||||
)
|
||||
content: str = Field(default="", description="Raw markdown body (post-frontmatter)")
|
||||
|
||||
frontmatter: dict[str, Any] = Field(default_factory=dict)
|
||||
tags: list[str] = Field(default_factory=list)
|
||||
headings: list[HeadingRef] = Field(default_factory=list)
|
||||
resolved_links: list[str] = Field(default_factory=list)
|
||||
unresolved_links: list[str] = Field(default_factory=list)
|
||||
embeds: list[str] = Field(default_factory=list)
|
||||
aliases: list[str] = Field(default_factory=list)
|
||||
|
||||
content_hash: str = Field(
|
||||
..., description="Plugin-computed SHA-256 of the raw content"
|
||||
)
|
||||
is_binary: bool = Field(
|
||||
default=False,
|
||||
description=(
|
||||
"True when payload represents a non-markdown attachment. "
|
||||
"If set, the plugin may include binary_base64 for ETL extraction."
|
||||
),
|
||||
)
|
||||
binary_base64: str | None = Field(
|
||||
default=None,
|
||||
description=(
|
||||
"Base64-encoded raw file bytes for binary attachments. "
|
||||
"Used by the backend ETL pipeline."
|
||||
),
|
||||
)
|
||||
mime_type: str | None = Field(
|
||||
default=None,
|
||||
description="Optional MIME type hint for binary attachments.",
|
||||
)
|
||||
size: int | None = Field(
|
||||
default=None,
|
||||
ge=0,
|
||||
description="Byte size of the local file (mtime+size short-circuit signal). Optional for forward compatibility.",
|
||||
)
|
||||
mtime: datetime
|
||||
ctime: datetime
|
||||
|
||||
@model_validator(mode="after")
|
||||
def _enforce_binary_invariants(self) -> NotePayload:
|
||||
if self.is_binary:
|
||||
if not self.binary_base64:
|
||||
raise ValueError("binary_base64 is required when is_binary is True")
|
||||
if not self.mime_type:
|
||||
raise ValueError("mime_type is required when is_binary is True")
|
||||
elif self.binary_base64 is not None or self.mime_type is not None:
|
||||
raise ValueError(
|
||||
"binary_base64 and mime_type must be omitted when is_binary is False",
|
||||
)
|
||||
return self
|
||||
|
||||
|
||||
class SyncBatchRequest(_PluginBase):
|
||||
"""Batch upsert; plugin sends 10-20 notes per request."""
|
||||
|
||||
vault_id: str
|
||||
notes: list[NotePayload] = Field(default_factory=list, max_length=100)
|
||||
|
||||
|
||||
class RenameItem(_PluginBase):
|
||||
old_path: str
|
||||
new_path: str
|
||||
|
||||
|
||||
class RenameBatchRequest(_PluginBase):
|
||||
vault_id: str
|
||||
renames: list[RenameItem] = Field(default_factory=list, max_length=200)
|
||||
|
||||
|
||||
class DeleteBatchRequest(_PluginBase):
|
||||
vault_id: str
|
||||
paths: list[str] = Field(default_factory=list, max_length=500)
|
||||
|
||||
|
||||
class ManifestEntry(_PluginBase):
|
||||
hash: str
|
||||
mtime: datetime
|
||||
size: int | None = Field(
|
||||
default=None,
|
||||
description="Byte size last seen by the server. Enables mtime+size short-circuit; absent when not yet recorded.",
|
||||
)
|
||||
|
||||
|
||||
class ManifestResponse(_PluginBase):
|
||||
"""Path-keyed manifest of every non-deleted note for a vault."""
|
||||
|
||||
vault_id: str
|
||||
items: dict[str, ManifestEntry] = Field(default_factory=dict)
|
||||
|
||||
|
||||
class ConnectRequest(_PluginBase):
|
||||
"""Vault registration / heartbeat. Replayed on every plugin onload."""
|
||||
|
||||
vault_id: str
|
||||
vault_name: str
|
||||
search_space_id: int
|
||||
vault_fingerprint: str = Field(
|
||||
...,
|
||||
description=(
|
||||
"Deterministic SHA-256 over the sorted markdown paths in the vault "
|
||||
"(plus vault_name). Same vault content on any device produces the "
|
||||
"same value; the server uses it to dedup connectors across devices."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class ConnectResponse(_PluginBase):
|
||||
"""Carries the same handshake fields as ``HealthResponse`` so the plugin
|
||||
learns the contract without a separate ``GET /health`` round-trip."""
|
||||
|
||||
connector_id: int
|
||||
vault_id: str
|
||||
search_space_id: int
|
||||
capabilities: list[str]
|
||||
server_time_utc: datetime
|
||||
|
||||
|
||||
class HealthResponse(_PluginBase):
|
||||
"""API contract handshake. ``capabilities`` is additive-only string list."""
|
||||
|
||||
capabilities: list[str]
|
||||
server_time_utc: datetime
|
||||
|
||||
|
||||
# Per-item batch ack schemas — wire shape is load-bearing for the plugin
|
||||
# queue (see api-client.ts / sync-engine.ts:processBatch).
|
||||
|
||||
|
||||
class SyncAckItem(_PluginBase):
|
||||
path: str
|
||||
status: Literal["ok", "queued", "error"]
|
||||
document_id: int | None = None
|
||||
error: str | None = None
|
||||
|
||||
|
||||
class SyncAck(_PluginBase):
|
||||
vault_id: str
|
||||
indexed: int
|
||||
failed: int
|
||||
items: list[SyncAckItem] = Field(default_factory=list)
|
||||
|
||||
|
||||
class RenameAckItem(_PluginBase):
|
||||
old_path: str
|
||||
new_path: str
|
||||
# ``missing`` is treated as success client-side (end state reached).
|
||||
status: Literal["ok", "error", "missing"]
|
||||
document_id: int | None = None
|
||||
error: str | None = None
|
||||
|
||||
|
||||
class RenameAck(_PluginBase):
|
||||
vault_id: str
|
||||
renamed: int
|
||||
missing: int
|
||||
items: list[RenameAckItem] = Field(default_factory=list)
|
||||
|
||||
|
||||
class DeleteAckItem(_PluginBase):
|
||||
path: str
|
||||
status: Literal["ok", "error", "missing"]
|
||||
error: str | None = None
|
||||
|
||||
|
||||
class DeleteAck(_PluginBase):
|
||||
vault_id: str
|
||||
deleted: int
|
||||
missing: int
|
||||
items: list[DeleteAckItem] = Field(default_factory=list)
|
||||
|
||||
|
||||
class StatsResponse(_PluginBase):
|
||||
"""Backs the Obsidian connector tile in the web UI."""
|
||||
|
||||
vault_id: str
|
||||
files_synced: int
|
||||
last_sync_at: datetime | None = None
|
||||
616
surfsense_backend/app/services/obsidian_plugin_indexer.py
Normal file
616
surfsense_backend/app/services/obsidian_plugin_indexer.py
Normal file
|
|
@ -0,0 +1,616 @@
|
|||
"""
|
||||
Obsidian plugin indexer service.
|
||||
|
||||
Bridges the SurfSense Obsidian plugin's HTTP payloads
|
||||
(see ``app/schemas/obsidian_plugin.py``) into the shared
|
||||
``IndexingPipelineService``.
|
||||
|
||||
Responsibilities:
|
||||
|
||||
- ``upsert_note`` — push one note through the indexing pipeline; respects
|
||||
unchanged content (skip) and version-snapshots existing rows before
|
||||
rewrite.
|
||||
- ``rename_note`` — rewrite path-derived fields (path metadata,
|
||||
``unique_identifier_hash``, ``source_url``) without re-indexing content.
|
||||
- ``delete_note`` — soft delete with a tombstone in ``document_metadata``
|
||||
so reconciliation can distinguish "user explicitly killed this in the UI"
|
||||
from "plugin hasn't synced yet".
|
||||
- ``get_manifest`` — return ``{path: {hash, mtime, size}}`` for every
|
||||
non-deleted note belonging to a vault, used by the plugin's reconcile
|
||||
pass on ``onload``.
|
||||
|
||||
Design notes
|
||||
------------
|
||||
|
||||
The plugin's content hash and the backend's ``content_hash`` are computed
|
||||
differently (plugin uses raw SHA-256 of the markdown body; backend salts
|
||||
with ``search_space_id``). We persist the plugin's hash in
|
||||
``document_metadata['plugin_content_hash']`` so the manifest endpoint can
|
||||
return what the plugin sent — that's the only number the plugin can
|
||||
compare without re-downloading content.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import contextlib
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
from urllib.parse import quote
|
||||
|
||||
from sqlalchemy import and_, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.db import (
|
||||
Document,
|
||||
DocumentStatus,
|
||||
DocumentType,
|
||||
SearchSourceConnector,
|
||||
)
|
||||
from app.indexing_pipeline.connector_document import ConnectorDocument
|
||||
from app.indexing_pipeline.indexing_pipeline_service import IndexingPipelineService
|
||||
from app.schemas.obsidian_plugin import (
|
||||
ManifestEntry,
|
||||
ManifestResponse,
|
||||
NotePayload,
|
||||
)
|
||||
from app.utils.document_converters import generate_unique_identifier_hash
|
||||
from app.utils.document_versioning import create_version_snapshot
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _vault_path_unique_id(vault_id: str, path: str) -> str:
|
||||
"""Stable identifier for a note. Vault-scoped so the same path under two
|
||||
different vaults doesn't collide."""
|
||||
return f"{vault_id}:{path}"
|
||||
|
||||
|
||||
def _build_source_url(vault_name: str, path: str) -> str:
|
||||
"""Build the ``obsidian://`` deep link for the web UI's "Open in Obsidian"
|
||||
button. Both segments are URL-encoded because vault names and paths can
|
||||
contain spaces, ``#``, ``?``, etc.
|
||||
"""
|
||||
return (
|
||||
"obsidian://open"
|
||||
f"?vault={quote(vault_name, safe='')}"
|
||||
f"&file={quote(path, safe='')}"
|
||||
)
|
||||
|
||||
|
||||
def _build_metadata(
|
||||
payload: NotePayload,
|
||||
*,
|
||||
vault_name: str,
|
||||
connector_id: int,
|
||||
extra: dict[str, Any] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Flatten the rich plugin payload into the JSONB ``document_metadata``
|
||||
column. Keys here are what the chat UI / search UI surface to users.
|
||||
"""
|
||||
meta: dict[str, Any] = {
|
||||
"source": "plugin",
|
||||
"vault_id": payload.vault_id,
|
||||
"vault_name": vault_name,
|
||||
"file_path": payload.path,
|
||||
"file_name": payload.name,
|
||||
"extension": payload.extension,
|
||||
"frontmatter": payload.frontmatter,
|
||||
"tags": payload.tags,
|
||||
"headings": [h.model_dump() for h in payload.headings],
|
||||
"outgoing_links": payload.resolved_links,
|
||||
"unresolved_links": payload.unresolved_links,
|
||||
"embeds": payload.embeds,
|
||||
"aliases": payload.aliases,
|
||||
"plugin_content_hash": payload.content_hash,
|
||||
"plugin_file_size": payload.size,
|
||||
"mtime": payload.mtime.isoformat(),
|
||||
"ctime": payload.ctime.isoformat(),
|
||||
"connector_id": connector_id,
|
||||
"url": _build_source_url(vault_name, payload.path),
|
||||
}
|
||||
if payload.is_binary:
|
||||
meta["is_binary"] = True
|
||||
meta["mime_type"] = payload.mime_type
|
||||
if extra:
|
||||
meta.update(extra)
|
||||
return meta
|
||||
|
||||
|
||||
def _build_document_string(
|
||||
payload: NotePayload, vault_name: str, *, content_override: str | None = None
|
||||
) -> str:
|
||||
"""Compose the indexable string the pipeline embeds and chunks.
|
||||
|
||||
Mirrors the legacy obsidian indexer's METADATA + CONTENT framing so
|
||||
existing search relevance heuristics keep working unchanged.
|
||||
"""
|
||||
tags_line = ", ".join(payload.tags) if payload.tags else "None"
|
||||
links_line = ", ".join(payload.resolved_links) if payload.resolved_links else "None"
|
||||
body = payload.content if content_override is None else content_override
|
||||
return (
|
||||
"<METADATA>\n"
|
||||
f"Title: {payload.name}\n"
|
||||
f"Vault: {vault_name}\n"
|
||||
f"Path: {payload.path}\n"
|
||||
f"Tags: {tags_line}\n"
|
||||
f"Links to: {links_line}\n"
|
||||
"</METADATA>\n\n"
|
||||
"<CONTENT>\n"
|
||||
f"{body}\n"
|
||||
"</CONTENT>\n"
|
||||
)
|
||||
|
||||
|
||||
async def _extract_binary_attachment_markdown(
|
||||
payload: NotePayload, *, vision_llm
|
||||
) -> tuple[str, dict[str, Any]]:
|
||||
try:
|
||||
raw_bytes = base64.b64decode(payload.binary_base64, validate=True)
|
||||
except Exception:
|
||||
logger.warning("obsidian attachment payload had invalid base64: %s", payload.path)
|
||||
return "", {"attachment_extraction_status": "invalid_binary_payload"}
|
||||
|
||||
suffix = f".{payload.extension.lstrip('.')}"
|
||||
temp_path: str | None = None
|
||||
filename = payload.path.rsplit("/", 1)[-1] or payload.name
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(delete=False, suffix=suffix) as tmp:
|
||||
tmp.write(raw_bytes)
|
||||
temp_path = tmp.name
|
||||
|
||||
result = await _run_etl_extract(
|
||||
file_path=temp_path,
|
||||
filename=filename,
|
||||
vision_llm=vision_llm,
|
||||
)
|
||||
metadata: dict[str, Any] = {
|
||||
"attachment_extraction_status": "ok",
|
||||
"attachment_etl_service": result.etl_service,
|
||||
"attachment_content_type": result.content_type,
|
||||
}
|
||||
return result.markdown_content, metadata
|
||||
except Exception as exc:
|
||||
logger.warning(
|
||||
"obsidian attachment ETL failed for %s: %s", payload.path, exc, exc_info=True
|
||||
)
|
||||
return "", {
|
||||
"attachment_extraction_status": "etl_failed",
|
||||
"attachment_extraction_error": str(exc)[:300],
|
||||
}
|
||||
finally:
|
||||
if temp_path and os.path.exists(temp_path):
|
||||
with contextlib.suppress(Exception):
|
||||
os.unlink(temp_path)
|
||||
|
||||
|
||||
async def _run_etl_extract(*, file_path: str, filename: str, vision_llm):
|
||||
"""Lazy-load ETL dependencies to avoid module-import cycles."""
|
||||
from app.etl_pipeline.etl_document import EtlRequest
|
||||
from app.etl_pipeline.etl_pipeline_service import EtlPipelineService
|
||||
|
||||
return await EtlPipelineService(vision_llm=vision_llm).extract(
|
||||
EtlRequest(file_path=file_path, filename=filename)
|
||||
)
|
||||
|
||||
|
||||
def _is_image_attachment(payload: NotePayload) -> bool:
|
||||
ext = payload.extension.lower().lstrip(".")
|
||||
return ext in {"png", "jpg", "jpeg", "gif", "webp", "svg"}
|
||||
|
||||
|
||||
async def _resolve_attachment_vision_llm(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
connector: SearchSourceConnector,
|
||||
search_space_id: int,
|
||||
payload: NotePayload,
|
||||
):
|
||||
"""Match connector indexers: only fetch vision LLM for image attachments
|
||||
when the connector has vision indexing enabled."""
|
||||
if not payload.is_binary:
|
||||
return None
|
||||
if not _is_image_attachment(payload):
|
||||
return None
|
||||
if not getattr(connector, "enable_vision_llm", False):
|
||||
return None
|
||||
|
||||
from app.services.llm_service import get_vision_llm
|
||||
|
||||
return await get_vision_llm(session, search_space_id)
|
||||
|
||||
|
||||
async def _resolve_summary_llm(
|
||||
session: AsyncSession, *, user_id: str, search_space_id: int, should_summarize: bool
|
||||
):
|
||||
"""Fetch summary LLM only when indexing summary is enabled."""
|
||||
if not should_summarize:
|
||||
return None
|
||||
|
||||
from app.services.llm_service import get_user_long_context_llm
|
||||
|
||||
return await get_user_long_context_llm(session, user_id, search_space_id)
|
||||
|
||||
|
||||
def _require_extracted_attachment_content(
|
||||
*, content: str, etl_meta: dict[str, Any], path: str
|
||||
) -> str:
|
||||
extracted = content.strip()
|
||||
if extracted:
|
||||
return extracted
|
||||
|
||||
status = etl_meta.get("attachment_extraction_status", "unknown")
|
||||
reason = etl_meta.get("attachment_extraction_error")
|
||||
if reason:
|
||||
raise RuntimeError(
|
||||
f"Attachment extraction failed for {path} ({status}): {reason}"
|
||||
)
|
||||
raise RuntimeError(f"Attachment extraction failed for {path} ({status})")
|
||||
|
||||
|
||||
async def _find_existing_document(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
search_space_id: int,
|
||||
vault_id: str,
|
||||
path: str,
|
||||
) -> Document | None:
|
||||
unique_id = _vault_path_unique_id(vault_id, path)
|
||||
uid_hash = generate_unique_identifier_hash(
|
||||
DocumentType.OBSIDIAN_CONNECTOR,
|
||||
unique_id,
|
||||
search_space_id,
|
||||
)
|
||||
result = await session.execute(
|
||||
select(Document).where(Document.unique_identifier_hash == uid_hash)
|
||||
)
|
||||
return result.scalars().first()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public API
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
async def upsert_note(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
connector: SearchSourceConnector,
|
||||
payload: NotePayload,
|
||||
user_id: str,
|
||||
) -> Document:
|
||||
"""Index or refresh a single note pushed by the plugin.
|
||||
|
||||
Returns the resulting ``Document`` (whether newly created, updated, or
|
||||
a skip-because-unchanged hit).
|
||||
"""
|
||||
vault_name: str = (connector.config or {}).get("vault_name") or "Vault"
|
||||
search_space_id = connector.search_space_id
|
||||
|
||||
existing = await _find_existing_document(
|
||||
session,
|
||||
search_space_id=search_space_id,
|
||||
vault_id=payload.vault_id,
|
||||
path=payload.path,
|
||||
)
|
||||
|
||||
plugin_hash = payload.content_hash
|
||||
if existing is not None:
|
||||
existing_meta = existing.document_metadata or {}
|
||||
was_tombstoned = bool(existing_meta.get("deleted_at"))
|
||||
|
||||
if (
|
||||
not was_tombstoned
|
||||
and existing_meta.get("plugin_content_hash") == plugin_hash
|
||||
and DocumentStatus.is_state(existing.status, DocumentStatus.READY)
|
||||
):
|
||||
return existing
|
||||
|
||||
try:
|
||||
await create_version_snapshot(session, existing)
|
||||
except Exception:
|
||||
logger.debug(
|
||||
"version snapshot failed for obsidian doc %s",
|
||||
existing.id,
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
content_for_index = payload.content
|
||||
extra_meta: dict[str, Any] = {}
|
||||
vision_llm = None
|
||||
if payload.is_binary:
|
||||
vision_llm = await _resolve_attachment_vision_llm(
|
||||
session,
|
||||
connector=connector,
|
||||
search_space_id=search_space_id,
|
||||
payload=payload,
|
||||
)
|
||||
content_for_index, etl_meta = await _extract_binary_attachment_markdown(
|
||||
payload, vision_llm=vision_llm
|
||||
)
|
||||
extra_meta.update(etl_meta)
|
||||
# Strict KB behavior: do not index metadata-only attachments.
|
||||
content_for_index = _require_extracted_attachment_content(
|
||||
content=content_for_index,
|
||||
etl_meta=etl_meta,
|
||||
path=payload.path,
|
||||
)
|
||||
|
||||
llm = await _resolve_summary_llm(
|
||||
session,
|
||||
user_id=str(user_id),
|
||||
search_space_id=search_space_id,
|
||||
should_summarize=connector.enable_summary,
|
||||
)
|
||||
|
||||
document_string = _build_document_string(
|
||||
payload, vault_name, content_override=content_for_index
|
||||
)
|
||||
metadata = _build_metadata(
|
||||
payload,
|
||||
vault_name=vault_name,
|
||||
connector_id=connector.id,
|
||||
extra=extra_meta,
|
||||
)
|
||||
|
||||
connector_doc = ConnectorDocument(
|
||||
title=payload.name,
|
||||
source_markdown=document_string,
|
||||
unique_id=_vault_path_unique_id(payload.vault_id, payload.path),
|
||||
document_type=DocumentType.OBSIDIAN_CONNECTOR,
|
||||
search_space_id=search_space_id,
|
||||
connector_id=connector.id,
|
||||
created_by_id=str(user_id),
|
||||
should_summarize=connector.enable_summary,
|
||||
fallback_summary=f"Obsidian Note: {payload.name}\n\n{content_for_index}",
|
||||
metadata=metadata,
|
||||
)
|
||||
|
||||
pipeline = IndexingPipelineService(session)
|
||||
prepared = await pipeline.prepare_for_indexing([connector_doc])
|
||||
if not prepared:
|
||||
if existing is not None:
|
||||
return existing
|
||||
raise RuntimeError(f"Indexing pipeline rejected obsidian note {payload.path}")
|
||||
|
||||
document = prepared[0]
|
||||
|
||||
return await pipeline.index(document, connector_doc, llm)
|
||||
|
||||
|
||||
async def rename_note(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
connector: SearchSourceConnector,
|
||||
old_path: str,
|
||||
new_path: str,
|
||||
vault_id: str,
|
||||
) -> Document | None:
|
||||
"""Rewrite path-derived columns without re-indexing content.
|
||||
|
||||
Returns the updated document, or ``None`` if no row matched the
|
||||
``old_path`` (this happens when the plugin is renaming a file that was
|
||||
never synced — safe to ignore, the next ``sync`` will create it under
|
||||
the new path).
|
||||
"""
|
||||
vault_name: str = (connector.config or {}).get("vault_name") or "Vault"
|
||||
search_space_id = connector.search_space_id
|
||||
|
||||
existing = await _find_existing_document(
|
||||
session,
|
||||
search_space_id=search_space_id,
|
||||
vault_id=vault_id,
|
||||
path=old_path,
|
||||
)
|
||||
if existing is None:
|
||||
return None
|
||||
|
||||
new_unique_id = _vault_path_unique_id(vault_id, new_path)
|
||||
new_uid_hash = generate_unique_identifier_hash(
|
||||
DocumentType.OBSIDIAN_CONNECTOR,
|
||||
new_unique_id,
|
||||
search_space_id,
|
||||
)
|
||||
|
||||
collision = await session.execute(
|
||||
select(Document).where(
|
||||
and_(
|
||||
Document.unique_identifier_hash == new_uid_hash,
|
||||
Document.id != existing.id,
|
||||
)
|
||||
)
|
||||
)
|
||||
collision_row = collision.scalars().first()
|
||||
if collision_row is not None:
|
||||
logger.warning(
|
||||
"obsidian rename target already exists "
|
||||
"(vault=%s old=%s new=%s); skipping rename so the next /sync "
|
||||
"can resolve the conflict via content_hash",
|
||||
vault_id,
|
||||
old_path,
|
||||
new_path,
|
||||
)
|
||||
return existing
|
||||
|
||||
new_filename = new_path.rsplit("/", 1)[-1]
|
||||
new_stem = new_filename.rsplit(".", 1)[0] if "." in new_filename else new_filename
|
||||
|
||||
existing.unique_identifier_hash = new_uid_hash
|
||||
existing.title = new_stem
|
||||
|
||||
meta = dict(existing.document_metadata or {})
|
||||
meta["file_path"] = new_path
|
||||
meta["file_name"] = new_stem
|
||||
meta["url"] = _build_source_url(vault_name, new_path)
|
||||
existing.document_metadata = meta
|
||||
existing.updated_at = datetime.now(UTC)
|
||||
|
||||
await session.commit()
|
||||
return existing
|
||||
|
||||
|
||||
async def delete_note(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
connector: SearchSourceConnector,
|
||||
vault_id: str,
|
||||
path: str,
|
||||
) -> bool:
|
||||
"""Soft-delete via tombstone in ``document_metadata``.
|
||||
|
||||
The row is *not* removed and chunks are *not* dropped, so existing
|
||||
citations in chat threads remain resolvable. The manifest endpoint
|
||||
filters tombstoned rows out, so the plugin's reconcile pass will not
|
||||
see this path and won't try to "resurrect" a note the user deleted in
|
||||
the SurfSense UI.
|
||||
|
||||
Returns True if a row was tombstoned, False if no matching row existed.
|
||||
"""
|
||||
existing = await _find_existing_document(
|
||||
session,
|
||||
search_space_id=connector.search_space_id,
|
||||
vault_id=vault_id,
|
||||
path=path,
|
||||
)
|
||||
if existing is None:
|
||||
return False
|
||||
|
||||
meta = dict(existing.document_metadata or {})
|
||||
if meta.get("deleted_at"):
|
||||
return True
|
||||
|
||||
meta["deleted_at"] = datetime.now(UTC).isoformat()
|
||||
meta["deleted_by_source"] = "plugin"
|
||||
existing.document_metadata = meta
|
||||
existing.updated_at = datetime.now(UTC)
|
||||
|
||||
await session.commit()
|
||||
return True
|
||||
|
||||
|
||||
async def merge_obsidian_connectors(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
source: SearchSourceConnector,
|
||||
target: SearchSourceConnector,
|
||||
) -> None:
|
||||
"""Fold ``source``'s documents into ``target`` and delete ``source``.
|
||||
|
||||
Triggered when the fingerprint dedup detects two plugin connectors
|
||||
pointing at the same vault (e.g. a mobile install raced with iCloud
|
||||
hydration and got a partial fingerprint, then caught up). Path
|
||||
collisions resolve in favour of ``target`` (the surviving row);
|
||||
``source``'s duplicate documents are hard-deleted along with their
|
||||
chunks via the ``cascade='all, delete-orphan'`` on ``Document.chunks``.
|
||||
"""
|
||||
if source.id == target.id:
|
||||
return
|
||||
|
||||
target_vault_id = (target.config or {}).get("vault_id")
|
||||
target_search_space_id = target.search_space_id
|
||||
if not target_vault_id:
|
||||
raise RuntimeError("merge target is missing vault_id")
|
||||
|
||||
target_paths_result = await session.execute(
|
||||
select(Document).where(
|
||||
and_(
|
||||
Document.connector_id == target.id,
|
||||
Document.document_type == DocumentType.OBSIDIAN_CONNECTOR,
|
||||
)
|
||||
)
|
||||
)
|
||||
target_paths: set[str] = set()
|
||||
for doc in target_paths_result.scalars().all():
|
||||
meta = doc.document_metadata or {}
|
||||
path = meta.get("file_path")
|
||||
if path:
|
||||
target_paths.add(path)
|
||||
|
||||
source_docs_result = await session.execute(
|
||||
select(Document).where(
|
||||
and_(
|
||||
Document.connector_id == source.id,
|
||||
Document.document_type == DocumentType.OBSIDIAN_CONNECTOR,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
for doc in source_docs_result.scalars().all():
|
||||
meta = dict(doc.document_metadata or {})
|
||||
path = meta.get("file_path")
|
||||
if not path or path in target_paths:
|
||||
await session.delete(doc)
|
||||
continue
|
||||
|
||||
new_unique_id = _vault_path_unique_id(target_vault_id, path)
|
||||
new_uid_hash = generate_unique_identifier_hash(
|
||||
DocumentType.OBSIDIAN_CONNECTOR,
|
||||
new_unique_id,
|
||||
target_search_space_id,
|
||||
)
|
||||
meta["vault_id"] = target_vault_id
|
||||
meta["connector_id"] = target.id
|
||||
doc.document_metadata = meta
|
||||
doc.connector_id = target.id
|
||||
doc.search_space_id = target_search_space_id
|
||||
doc.unique_identifier_hash = new_uid_hash
|
||||
target_paths.add(path)
|
||||
|
||||
await session.flush()
|
||||
await session.delete(source)
|
||||
|
||||
|
||||
async def get_manifest(
|
||||
session: AsyncSession,
|
||||
*,
|
||||
connector: SearchSourceConnector,
|
||||
vault_id: str,
|
||||
) -> ManifestResponse:
|
||||
"""Return ``{path: {hash, mtime, size}}`` for every non-deleted note in
|
||||
this vault.
|
||||
|
||||
The plugin compares this against its local vault on every ``onload`` to
|
||||
catch up edits made while offline. Rows missing ``plugin_content_hash``
|
||||
(e.g. tombstoned, or somehow indexed without going through this
|
||||
service) are excluded so the plugin doesn't get confused by partial
|
||||
data.
|
||||
"""
|
||||
result = await session.execute(
|
||||
select(Document).where(
|
||||
and_(
|
||||
Document.search_space_id == connector.search_space_id,
|
||||
Document.connector_id == connector.id,
|
||||
Document.document_type == DocumentType.OBSIDIAN_CONNECTOR,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
items: dict[str, ManifestEntry] = {}
|
||||
for doc in result.scalars().all():
|
||||
meta = doc.document_metadata or {}
|
||||
if meta.get("deleted_at"):
|
||||
continue
|
||||
if meta.get("vault_id") != vault_id:
|
||||
continue
|
||||
path = meta.get("file_path")
|
||||
plugin_hash = meta.get("plugin_content_hash")
|
||||
mtime_raw = meta.get("mtime")
|
||||
if not path or not plugin_hash or not mtime_raw:
|
||||
continue
|
||||
try:
|
||||
mtime = datetime.fromisoformat(mtime_raw)
|
||||
except ValueError:
|
||||
continue
|
||||
size_raw = meta.get("plugin_file_size")
|
||||
size = int(size_raw) if isinstance(size_raw, int) else None
|
||||
items[path] = ManifestEntry(hash=plugin_hash, mtime=mtime, size=size)
|
||||
|
||||
return ManifestResponse(vault_id=vault_id, items=items)
|
||||
|
|
@ -536,49 +536,6 @@ async def _index_bookstack_pages(
|
|||
)
|
||||
|
||||
|
||||
@celery_app.task(name="index_obsidian_vault", bind=True)
|
||||
def index_obsidian_vault_task(
|
||||
self,
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
):
|
||||
"""Celery task to index Obsidian vault notes."""
|
||||
import asyncio
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
try:
|
||||
loop.run_until_complete(
|
||||
_index_obsidian_vault(
|
||||
connector_id, search_space_id, user_id, start_date, end_date
|
||||
)
|
||||
)
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
|
||||
async def _index_obsidian_vault(
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
):
|
||||
"""Index Obsidian vault with new session."""
|
||||
from app.routes.search_source_connectors_routes import (
|
||||
run_obsidian_indexing,
|
||||
)
|
||||
|
||||
async with get_celery_session_maker()() as session:
|
||||
await run_obsidian_indexing(
|
||||
session, connector_id, search_space_id, user_id, start_date, end_date
|
||||
)
|
||||
|
||||
|
||||
@celery_app.task(name="index_composio_connector", bind=True)
|
||||
def index_composio_connector_task(
|
||||
self,
|
||||
|
|
|
|||
59
surfsense_backend/app/tasks/celery_tasks/obsidian_tasks.py
Normal file
59
surfsense_backend/app/tasks/celery_tasks/obsidian_tasks.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
"""Celery tasks for Obsidian plugin background processing."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from app.celery_app import celery_app
|
||||
from app.db import SearchSourceConnector
|
||||
from app.schemas.obsidian_plugin import NotePayload
|
||||
from app.services.obsidian_plugin_indexer import upsert_note
|
||||
from app.tasks.celery_tasks import get_celery_session_maker
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@celery_app.task(name="index_obsidian_attachment", bind=True)
|
||||
def index_obsidian_attachment_task(
|
||||
self,
|
||||
connector_id: int,
|
||||
payload_data: dict,
|
||||
user_id: str,
|
||||
) -> None:
|
||||
"""Process one Obsidian non-markdown attachment asynchronously."""
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
loop.run_until_complete(
|
||||
_index_obsidian_attachment(
|
||||
connector_id=connector_id,
|
||||
payload_data=payload_data,
|
||||
user_id=user_id,
|
||||
)
|
||||
)
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
|
||||
async def _index_obsidian_attachment(
|
||||
*,
|
||||
connector_id: int,
|
||||
payload_data: dict,
|
||||
user_id: str,
|
||||
) -> None:
|
||||
async with get_celery_session_maker()() as session:
|
||||
connector = await session.get(SearchSourceConnector, connector_id)
|
||||
if connector is None:
|
||||
logger.warning(
|
||||
"obsidian attachment task skipped: connector %s not found", connector_id
|
||||
)
|
||||
return
|
||||
|
||||
payload = NotePayload.model_validate(payload_data)
|
||||
await upsert_note(
|
||||
session,
|
||||
connector=connector,
|
||||
payload=payload,
|
||||
user_id=user_id,
|
||||
)
|
||||
|
|
@ -14,18 +14,16 @@ from .google_calendar_indexer import index_google_calendar_events
|
|||
from .google_drive_indexer import index_google_drive_files
|
||||
from .google_gmail_indexer import index_google_gmail_messages
|
||||
from .notion_indexer import index_notion_pages
|
||||
from .obsidian_indexer import index_obsidian_vault
|
||||
from .webcrawler_indexer import index_crawled_urls
|
||||
|
||||
__all__ = [
|
||||
"index_bookstack_pages",
|
||||
"index_confluence_pages",
|
||||
"index_crawled_urls",
|
||||
"index_elasticsearch_documents",
|
||||
"index_github_repos",
|
||||
"index_google_calendar_events",
|
||||
"index_google_drive_files",
|
||||
"index_google_gmail_messages",
|
||||
"index_notion_pages",
|
||||
"index_obsidian_vault",
|
||||
"index_crawled_urls",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,676 +0,0 @@
|
|||
"""
|
||||
Obsidian connector indexer.
|
||||
|
||||
Indexes markdown notes from a local Obsidian vault.
|
||||
This connector is only available in self-hosted mode.
|
||||
|
||||
Implements 2-phase document status updates for real-time UI feedback:
|
||||
- Phase 1: Create all documents with 'pending' status (visible in UI immediately)
|
||||
- Phase 2: Process each document: pending → processing → ready/failed
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
from collections.abc import Awaitable, Callable
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.config import config
|
||||
from app.db import Document, DocumentStatus, DocumentType, SearchSourceConnectorType
|
||||
from app.services.llm_service import get_user_long_context_llm
|
||||
from app.services.task_logging_service import TaskLoggingService
|
||||
from app.utils.document_converters import (
|
||||
create_document_chunks,
|
||||
embed_text,
|
||||
generate_content_hash,
|
||||
generate_document_summary,
|
||||
generate_unique_identifier_hash,
|
||||
)
|
||||
|
||||
from .base import (
|
||||
build_document_metadata_string,
|
||||
check_document_by_unique_identifier,
|
||||
check_duplicate_document_by_hash,
|
||||
get_connector_by_id,
|
||||
get_current_timestamp,
|
||||
logger,
|
||||
safe_set_chunks,
|
||||
update_connector_last_indexed,
|
||||
)
|
||||
|
||||
# Type hint for heartbeat callback
|
||||
HeartbeatCallbackType = Callable[[int], Awaitable[None]]
|
||||
|
||||
# Heartbeat interval in seconds
|
||||
HEARTBEAT_INTERVAL_SECONDS = 30
|
||||
|
||||
|
||||
def parse_frontmatter(content: str) -> tuple[dict | None, str]:
|
||||
"""
|
||||
Parse YAML frontmatter from markdown content.
|
||||
|
||||
Args:
|
||||
content: The full markdown content
|
||||
|
||||
Returns:
|
||||
Tuple of (frontmatter dict or None, content without frontmatter)
|
||||
"""
|
||||
if not content.startswith("---"):
|
||||
return None, content
|
||||
|
||||
# Find the closing ---
|
||||
end_match = re.search(r"\n---\n", content[3:])
|
||||
if not end_match:
|
||||
return None, content
|
||||
|
||||
frontmatter_str = content[3 : end_match.start() + 3]
|
||||
remaining_content = content[end_match.end() + 3 :]
|
||||
|
||||
try:
|
||||
frontmatter = yaml.safe_load(frontmatter_str)
|
||||
return frontmatter, remaining_content.strip()
|
||||
except yaml.YAMLError:
|
||||
return None, content
|
||||
|
||||
|
||||
def extract_wiki_links(content: str) -> list[str]:
|
||||
"""
|
||||
Extract [[wiki-style links]] from content.
|
||||
|
||||
Args:
|
||||
content: Markdown content
|
||||
|
||||
Returns:
|
||||
List of linked note names
|
||||
"""
|
||||
# Match [[link]] or [[link|alias]]
|
||||
pattern = r"\[\[([^\]|]+)(?:\|[^\]]+)?\]\]"
|
||||
matches = re.findall(pattern, content)
|
||||
return list(set(matches))
|
||||
|
||||
|
||||
def extract_tags(content: str) -> list[str]:
|
||||
"""
|
||||
Extract #tags from content (both inline and frontmatter).
|
||||
|
||||
Args:
|
||||
content: Markdown content
|
||||
|
||||
Returns:
|
||||
List of tags (without # prefix)
|
||||
"""
|
||||
# Match #tag but not ## headers
|
||||
pattern = r"(?<!\S)#([a-zA-Z][a-zA-Z0-9_/-]*)"
|
||||
matches = re.findall(pattern, content)
|
||||
return list(set(matches))
|
||||
|
||||
|
||||
def scan_vault(
|
||||
vault_path: str,
|
||||
exclude_folders: list[str] | None = None,
|
||||
) -> list[dict]:
|
||||
"""
|
||||
Scan an Obsidian vault for markdown files.
|
||||
|
||||
Args:
|
||||
vault_path: Path to the Obsidian vault
|
||||
exclude_folders: List of folder names to exclude
|
||||
|
||||
Returns:
|
||||
List of file info dicts with path, name, modified time
|
||||
"""
|
||||
if exclude_folders is None:
|
||||
exclude_folders = [".trash", ".obsidian", "templates"]
|
||||
|
||||
vault = Path(vault_path)
|
||||
if not vault.exists():
|
||||
raise ValueError(f"Vault path does not exist: {vault_path}")
|
||||
|
||||
files = []
|
||||
for md_file in vault.rglob("*.md"):
|
||||
# Check if file is in an excluded folder
|
||||
relative_path = md_file.relative_to(vault)
|
||||
parts = relative_path.parts
|
||||
|
||||
if any(excluded in parts for excluded in exclude_folders):
|
||||
continue
|
||||
|
||||
try:
|
||||
stat = md_file.stat()
|
||||
files.append(
|
||||
{
|
||||
"path": str(md_file),
|
||||
"relative_path": str(relative_path),
|
||||
"name": md_file.stem,
|
||||
"modified_at": datetime.fromtimestamp(stat.st_mtime, tz=UTC),
|
||||
"created_at": datetime.fromtimestamp(stat.st_ctime, tz=UTC),
|
||||
"size": stat.st_size,
|
||||
}
|
||||
)
|
||||
except OSError as e:
|
||||
logger.warning(f"Could not stat file {md_file}: {e}")
|
||||
|
||||
return files
|
||||
|
||||
|
||||
async def index_obsidian_vault(
|
||||
session: AsyncSession,
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str | None = None,
|
||||
end_date: str | None = None,
|
||||
update_last_indexed: bool = True,
|
||||
on_heartbeat_callback: HeartbeatCallbackType | None = None,
|
||||
) -> tuple[int, str | None]:
|
||||
"""
|
||||
Index notes from a local Obsidian vault.
|
||||
|
||||
This indexer is only available in self-hosted mode as it requires
|
||||
direct file system access to the user's Obsidian vault.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
connector_id: ID of the Obsidian connector
|
||||
search_space_id: ID of the search space to store documents in
|
||||
user_id: ID of the user
|
||||
start_date: Start date for filtering (YYYY-MM-DD format) - optional
|
||||
end_date: End date for filtering (YYYY-MM-DD format) - optional
|
||||
update_last_indexed: Whether to update the last_indexed_at timestamp
|
||||
on_heartbeat_callback: Optional callback to update notification during long-running indexing.
|
||||
|
||||
Returns:
|
||||
Tuple containing (number of documents indexed, error message or None)
|
||||
"""
|
||||
task_logger = TaskLoggingService(session, search_space_id)
|
||||
|
||||
# Check if self-hosted mode
|
||||
if not config.is_self_hosted():
|
||||
return 0, "Obsidian connector is only available in self-hosted mode"
|
||||
|
||||
# Log task start
|
||||
log_entry = await task_logger.log_task_start(
|
||||
task_name="obsidian_vault_indexing",
|
||||
source="connector_indexing_task",
|
||||
message=f"Starting Obsidian vault indexing for connector {connector_id}",
|
||||
metadata={
|
||||
"connector_id": connector_id,
|
||||
"user_id": str(user_id),
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
},
|
||||
)
|
||||
|
||||
try:
|
||||
# Get the connector
|
||||
await task_logger.log_task_progress(
|
||||
log_entry,
|
||||
f"Retrieving Obsidian connector {connector_id} from database",
|
||||
{"stage": "connector_retrieval"},
|
||||
)
|
||||
|
||||
connector = await get_connector_by_id(
|
||||
session, connector_id, SearchSourceConnectorType.OBSIDIAN_CONNECTOR
|
||||
)
|
||||
|
||||
if not connector:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Connector with ID {connector_id} not found or is not an Obsidian connector",
|
||||
"Connector not found",
|
||||
{"error_type": "ConnectorNotFound"},
|
||||
)
|
||||
return (
|
||||
0,
|
||||
f"Connector with ID {connector_id} not found or is not an Obsidian connector",
|
||||
)
|
||||
|
||||
# Get vault path from connector config
|
||||
vault_path = connector.config.get("vault_path")
|
||||
if not vault_path:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
"Vault path not configured for this connector",
|
||||
"Missing vault path",
|
||||
{"error_type": "MissingVaultPath"},
|
||||
)
|
||||
return 0, "Vault path not configured for this connector"
|
||||
|
||||
# Validate vault path exists
|
||||
if not os.path.exists(vault_path):
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Vault path does not exist: {vault_path}",
|
||||
"Vault path not found",
|
||||
{"error_type": "VaultNotFound", "vault_path": vault_path},
|
||||
)
|
||||
return 0, f"Vault path does not exist: {vault_path}"
|
||||
|
||||
# Get configuration options
|
||||
exclude_folders = connector.config.get(
|
||||
"exclude_folders", [".trash", ".obsidian", "templates"]
|
||||
)
|
||||
vault_name = connector.config.get("vault_name") or os.path.basename(vault_path)
|
||||
|
||||
await task_logger.log_task_progress(
|
||||
log_entry,
|
||||
f"Scanning Obsidian vault: {vault_name}",
|
||||
{"stage": "vault_scan", "vault_path": vault_path},
|
||||
)
|
||||
|
||||
# Scan vault for markdown files
|
||||
try:
|
||||
files = scan_vault(vault_path, exclude_folders)
|
||||
except Exception as e:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Failed to scan vault: {e}",
|
||||
"Vault scan error",
|
||||
{"error_type": "VaultScanError"},
|
||||
)
|
||||
return 0, f"Failed to scan vault: {e}"
|
||||
|
||||
logger.info(f"Found {len(files)} markdown files in vault")
|
||||
|
||||
await task_logger.log_task_progress(
|
||||
log_entry,
|
||||
f"Found {len(files)} markdown files to process",
|
||||
{"stage": "files_discovered", "file_count": len(files)},
|
||||
)
|
||||
|
||||
# Filter by date if provided (handle "undefined" string from frontend)
|
||||
# Also handle inverted dates (start > end) by skipping filtering
|
||||
start_dt = None
|
||||
end_dt = None
|
||||
|
||||
if start_date and start_date != "undefined":
|
||||
start_dt = datetime.strptime(start_date, "%Y-%m-%d").replace(tzinfo=UTC)
|
||||
|
||||
if end_date and end_date != "undefined":
|
||||
# Make end_date inclusive (end of day)
|
||||
end_dt = datetime.strptime(end_date, "%Y-%m-%d").replace(tzinfo=UTC)
|
||||
end_dt = end_dt.replace(hour=23, minute=59, second=59)
|
||||
|
||||
# Only apply date filtering if dates are valid and in correct order
|
||||
if start_dt and end_dt and start_dt > end_dt:
|
||||
logger.warning(
|
||||
f"start_date ({start_date}) is after end_date ({end_date}), skipping date filter"
|
||||
)
|
||||
else:
|
||||
if start_dt:
|
||||
files = [f for f in files if f["modified_at"] >= start_dt]
|
||||
logger.info(
|
||||
f"After start_date filter ({start_date}): {len(files)} files"
|
||||
)
|
||||
if end_dt:
|
||||
files = [f for f in files if f["modified_at"] <= end_dt]
|
||||
logger.info(f"After end_date filter ({end_date}): {len(files)} files")
|
||||
|
||||
logger.info(f"Processing {len(files)} files after date filtering")
|
||||
|
||||
indexed_count = 0
|
||||
skipped_count = 0
|
||||
failed_count = 0
|
||||
duplicate_content_count = 0
|
||||
|
||||
# Heartbeat tracking - update notification periodically to prevent appearing stuck
|
||||
last_heartbeat_time = time.time()
|
||||
|
||||
# =======================================================================
|
||||
# PHASE 1: Analyze all files, create pending documents
|
||||
# This makes ALL documents visible in the UI immediately with pending status
|
||||
# =======================================================================
|
||||
files_to_process = [] # List of dicts with document and file data
|
||||
new_documents_created = False
|
||||
|
||||
for file_info in files:
|
||||
try:
|
||||
file_path = file_info["path"]
|
||||
relative_path = file_info["relative_path"]
|
||||
|
||||
# Read file content
|
||||
try:
|
||||
with open(file_path, encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except UnicodeDecodeError:
|
||||
logger.warning(f"Could not decode file {file_path}, skipping")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
if not content.strip():
|
||||
logger.debug(f"Empty file {file_path}, skipping")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Parse frontmatter and extract metadata
|
||||
frontmatter, body_content = parse_frontmatter(content)
|
||||
wiki_links = extract_wiki_links(content)
|
||||
tags = extract_tags(content)
|
||||
|
||||
# Get title from frontmatter or filename
|
||||
title = file_info["name"]
|
||||
if frontmatter:
|
||||
title = frontmatter.get("title", title)
|
||||
# Also extract tags from frontmatter
|
||||
fm_tags = frontmatter.get("tags", [])
|
||||
if isinstance(fm_tags, list):
|
||||
tags = list({*tags, *fm_tags})
|
||||
elif isinstance(fm_tags, str):
|
||||
tags = list({*tags, fm_tags})
|
||||
|
||||
# Generate unique identifier using vault name and relative path
|
||||
unique_identifier = f"{vault_name}:{relative_path}"
|
||||
unique_identifier_hash = generate_unique_identifier_hash(
|
||||
DocumentType.OBSIDIAN_CONNECTOR,
|
||||
unique_identifier,
|
||||
search_space_id,
|
||||
)
|
||||
|
||||
# Generate content hash
|
||||
content_hash = generate_content_hash(content, search_space_id)
|
||||
|
||||
# Check for existing document
|
||||
existing_document = await check_document_by_unique_identifier(
|
||||
session, unique_identifier_hash
|
||||
)
|
||||
|
||||
if existing_document:
|
||||
# Document exists - check if content has changed
|
||||
if existing_document.content_hash == content_hash:
|
||||
# Ensure status is ready (might have been stuck in processing/pending)
|
||||
if not DocumentStatus.is_state(
|
||||
existing_document.status, DocumentStatus.READY
|
||||
):
|
||||
existing_document.status = DocumentStatus.ready()
|
||||
logger.debug(f"Note {title} unchanged, skipping")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Queue existing document for update (will be set to processing in Phase 2)
|
||||
files_to_process.append(
|
||||
{
|
||||
"document": existing_document,
|
||||
"is_new": False,
|
||||
"file_info": file_info,
|
||||
"content": content,
|
||||
"body_content": body_content,
|
||||
"frontmatter": frontmatter,
|
||||
"wiki_links": wiki_links,
|
||||
"tags": tags,
|
||||
"title": title,
|
||||
"relative_path": relative_path,
|
||||
"content_hash": content_hash,
|
||||
"unique_identifier_hash": unique_identifier_hash,
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
# Document doesn't exist by unique_identifier_hash
|
||||
# Check if a document with the same content_hash exists (from another connector)
|
||||
with session.no_autoflush:
|
||||
duplicate_by_content = await check_duplicate_document_by_hash(
|
||||
session, content_hash
|
||||
)
|
||||
|
||||
if duplicate_by_content:
|
||||
logger.info(
|
||||
f"Obsidian note {title} already indexed by another connector "
|
||||
f"(existing document ID: {duplicate_by_content.id}, "
|
||||
f"type: {duplicate_by_content.document_type}). Skipping."
|
||||
)
|
||||
duplicate_content_count += 1
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Create new document with PENDING status (visible in UI immediately)
|
||||
document = Document(
|
||||
search_space_id=search_space_id,
|
||||
title=title,
|
||||
document_type=DocumentType.OBSIDIAN_CONNECTOR,
|
||||
document_metadata={
|
||||
"vault_name": vault_name,
|
||||
"file_path": relative_path,
|
||||
"connector_id": connector_id,
|
||||
},
|
||||
content="Pending...", # Placeholder until processed
|
||||
content_hash=unique_identifier_hash, # Temporary unique value - updated when ready
|
||||
unique_identifier_hash=unique_identifier_hash,
|
||||
embedding=None,
|
||||
chunks=[], # Empty at creation - safe for async
|
||||
status=DocumentStatus.pending(), # Pending until processing starts
|
||||
updated_at=get_current_timestamp(),
|
||||
created_by_id=user_id,
|
||||
connector_id=connector_id,
|
||||
)
|
||||
session.add(document)
|
||||
new_documents_created = True
|
||||
|
||||
files_to_process.append(
|
||||
{
|
||||
"document": document,
|
||||
"is_new": True,
|
||||
"file_info": file_info,
|
||||
"content": content,
|
||||
"body_content": body_content,
|
||||
"frontmatter": frontmatter,
|
||||
"wiki_links": wiki_links,
|
||||
"tags": tags,
|
||||
"title": title,
|
||||
"relative_path": relative_path,
|
||||
"content_hash": content_hash,
|
||||
"unique_identifier_hash": unique_identifier_hash,
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"Error in Phase 1 for file {file_info.get('path', 'unknown')}: {e}"
|
||||
)
|
||||
failed_count += 1
|
||||
continue
|
||||
|
||||
# Commit all pending documents - they all appear in UI now
|
||||
if new_documents_created:
|
||||
logger.info(
|
||||
f"Phase 1: Committing {len([f for f in files_to_process if f['is_new']])} pending documents"
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
# =======================================================================
|
||||
# PHASE 2: Process each document one by one
|
||||
# Each document transitions: pending → processing → ready/failed
|
||||
# =======================================================================
|
||||
logger.info(f"Phase 2: Processing {len(files_to_process)} documents")
|
||||
|
||||
# Get LLM for summarization
|
||||
long_context_llm = await get_user_long_context_llm(
|
||||
session, user_id, search_space_id
|
||||
)
|
||||
|
||||
for item in files_to_process:
|
||||
# Send heartbeat periodically
|
||||
if on_heartbeat_callback:
|
||||
current_time = time.time()
|
||||
if current_time - last_heartbeat_time >= HEARTBEAT_INTERVAL_SECONDS:
|
||||
await on_heartbeat_callback(indexed_count)
|
||||
last_heartbeat_time = current_time
|
||||
|
||||
document = item["document"]
|
||||
try:
|
||||
# Set to PROCESSING and commit - shows "processing" in UI for THIS document only
|
||||
document.status = DocumentStatus.processing()
|
||||
await session.commit()
|
||||
|
||||
# Extract data from item
|
||||
title = item["title"]
|
||||
relative_path = item["relative_path"]
|
||||
content = item["content"]
|
||||
body_content = item["body_content"]
|
||||
frontmatter = item["frontmatter"]
|
||||
wiki_links = item["wiki_links"]
|
||||
tags = item["tags"]
|
||||
content_hash = item["content_hash"]
|
||||
file_info = item["file_info"]
|
||||
|
||||
# Build metadata
|
||||
document_metadata = {
|
||||
"vault_name": vault_name,
|
||||
"file_path": relative_path,
|
||||
"tags": tags,
|
||||
"outgoing_links": wiki_links,
|
||||
"frontmatter": frontmatter,
|
||||
"modified_at": file_info["modified_at"].isoformat(),
|
||||
"created_at": file_info["created_at"].isoformat(),
|
||||
"word_count": len(body_content.split()),
|
||||
}
|
||||
|
||||
# Build document content with metadata
|
||||
metadata_sections = [
|
||||
(
|
||||
"METADATA",
|
||||
[
|
||||
f"Title: {title}",
|
||||
f"Vault: {vault_name}",
|
||||
f"Path: {relative_path}",
|
||||
f"Tags: {', '.join(tags) if tags else 'None'}",
|
||||
f"Links to: {', '.join(wiki_links) if wiki_links else 'None'}",
|
||||
],
|
||||
),
|
||||
("CONTENT", [body_content]),
|
||||
]
|
||||
document_string = build_document_metadata_string(metadata_sections)
|
||||
|
||||
# Generate summary
|
||||
summary_content = ""
|
||||
if long_context_llm and connector.enable_summary:
|
||||
summary_content, _ = await generate_document_summary(
|
||||
document_string,
|
||||
long_context_llm,
|
||||
document_metadata,
|
||||
)
|
||||
|
||||
# Generate embedding
|
||||
embedding = embed_text(document_string)
|
||||
|
||||
# Add URL and summary to metadata
|
||||
document_metadata["url"] = f"obsidian://{vault_name}/{relative_path}"
|
||||
document_metadata["summary"] = summary_content
|
||||
document_metadata["connector_id"] = connector_id
|
||||
|
||||
# Create chunks
|
||||
chunks = await create_document_chunks(document_string)
|
||||
|
||||
# Update document to READY with actual content
|
||||
document.title = title
|
||||
document.content = document_string
|
||||
document.content_hash = content_hash
|
||||
document.embedding = embedding
|
||||
document.document_metadata = document_metadata
|
||||
await safe_set_chunks(session, document, chunks)
|
||||
document.updated_at = get_current_timestamp()
|
||||
document.status = DocumentStatus.ready()
|
||||
|
||||
indexed_count += 1
|
||||
|
||||
# Batch commit every 10 documents (for ready status updates)
|
||||
if indexed_count % 10 == 0:
|
||||
logger.info(
|
||||
f"Committing batch: {indexed_count} Obsidian notes processed so far"
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"Error processing file {item.get('file_info', {}).get('path', 'unknown')}: {e}"
|
||||
)
|
||||
# Mark document as failed with reason (visible in UI)
|
||||
try:
|
||||
document.status = DocumentStatus.failed(str(e))
|
||||
document.updated_at = get_current_timestamp()
|
||||
except Exception as status_error:
|
||||
logger.error(
|
||||
f"Failed to update document status to failed: {status_error}"
|
||||
)
|
||||
failed_count += 1
|
||||
continue
|
||||
|
||||
# CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
# Final commit for any remaining documents not yet committed in batches
|
||||
logger.info(f"Final commit: Total {indexed_count} Obsidian notes processed")
|
||||
try:
|
||||
await session.commit()
|
||||
logger.info(
|
||||
"Successfully committed all Obsidian document changes to database"
|
||||
)
|
||||
except Exception as e:
|
||||
# Handle any remaining integrity errors gracefully (race conditions, etc.)
|
||||
if (
|
||||
"duplicate key value violates unique constraint" in str(e).lower()
|
||||
or "uniqueviolationerror" in str(e).lower()
|
||||
):
|
||||
logger.warning(
|
||||
f"Duplicate content_hash detected during final commit. "
|
||||
f"This may occur if the same note was indexed by multiple connectors. "
|
||||
f"Rolling back and continuing. Error: {e!s}"
|
||||
)
|
||||
await session.rollback()
|
||||
# Don't fail the entire task - some documents may have been successfully indexed
|
||||
else:
|
||||
raise
|
||||
|
||||
# Build warning message if there were issues
|
||||
warning_parts = []
|
||||
if duplicate_content_count > 0:
|
||||
warning_parts.append(f"{duplicate_content_count} duplicate")
|
||||
if failed_count > 0:
|
||||
warning_parts.append(f"{failed_count} failed")
|
||||
warning_message = ", ".join(warning_parts) if warning_parts else None
|
||||
|
||||
total_processed = indexed_count
|
||||
|
||||
await task_logger.log_task_success(
|
||||
log_entry,
|
||||
f"Successfully completed Obsidian vault indexing for connector {connector_id}",
|
||||
{
|
||||
"notes_processed": total_processed,
|
||||
"documents_indexed": indexed_count,
|
||||
"documents_skipped": skipped_count,
|
||||
"documents_failed": failed_count,
|
||||
"duplicate_content_count": duplicate_content_count,
|
||||
},
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Obsidian vault indexing completed: {indexed_count} ready, "
|
||||
f"{skipped_count} skipped, {failed_count} failed "
|
||||
f"({duplicate_content_count} duplicate content)"
|
||||
)
|
||||
return total_processed, warning_message
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.exception(f"Database error during Obsidian indexing: {e}")
|
||||
await session.rollback()
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Database error during Obsidian indexing: {e}",
|
||||
"Database error",
|
||||
{"error_type": "SQLAlchemyError"},
|
||||
)
|
||||
return 0, f"Database error: {e}"
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Error during Obsidian indexing: {e}")
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Error during Obsidian indexing: {e}",
|
||||
"Unexpected error",
|
||||
{"error_type": type(e).__name__},
|
||||
)
|
||||
return 0, str(e)
|
||||
|
|
@ -24,7 +24,6 @@ CONNECTOR_TASK_MAP = {
|
|||
SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR: "index_elasticsearch_documents",
|
||||
SearchSourceConnectorType.WEBCRAWLER_CONNECTOR: "index_crawled_urls",
|
||||
SearchSourceConnectorType.BOOKSTACK_CONNECTOR: "index_bookstack_pages",
|
||||
SearchSourceConnectorType.OBSIDIAN_CONNECTOR: "index_obsidian_vault",
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -81,7 +80,6 @@ def create_periodic_schedule(
|
|||
index_elasticsearch_documents_task,
|
||||
index_github_repos_task,
|
||||
index_notion_pages_task,
|
||||
index_obsidian_vault_task,
|
||||
)
|
||||
|
||||
task_map = {
|
||||
|
|
@ -91,7 +89,6 @@ def create_periodic_schedule(
|
|||
SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR: index_elasticsearch_documents_task,
|
||||
SearchSourceConnectorType.WEBCRAWLER_CONNECTOR: index_crawled_urls_task,
|
||||
SearchSourceConnectorType.BOOKSTACK_CONNECTOR: index_bookstack_pages_task,
|
||||
SearchSourceConnectorType.OBSIDIAN_CONNECTOR: index_obsidian_vault_task,
|
||||
}
|
||||
|
||||
# Trigger the first run immediately
|
||||
|
|
|
|||
|
|
@ -0,0 +1,625 @@
|
|||
"""Integration tests for the Obsidian plugin HTTP wire contract.
|
||||
|
||||
Three concerns:
|
||||
|
||||
1. The /connect upsert really collapses concurrent first-time connects to
|
||||
exactly one row. This locks the partial unique index from migration 129
|
||||
to its purpose.
|
||||
2. The fingerprint dedup path: a second device connecting with a fresh
|
||||
``vault_id`` but the same ``vault_fingerprint`` adopts the existing
|
||||
connector instead of creating a duplicate.
|
||||
3. The end-to-end response shapes returned by /connect /sync /rename
|
||||
/notes /manifest /stats match the schemas the plugin's TypeScript
|
||||
decoders expect. Each renamed field is a contract change, and a smoke
|
||||
pass like this is the cheapest way to catch a future drift before it
|
||||
ships.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from sqlalchemy import func, select, text
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.db import (
|
||||
SearchSourceConnector,
|
||||
SearchSourceConnectorType,
|
||||
SearchSpace,
|
||||
User,
|
||||
)
|
||||
from app.routes.obsidian_plugin_routes import (
|
||||
obsidian_connect,
|
||||
obsidian_delete_notes,
|
||||
obsidian_manifest,
|
||||
obsidian_rename,
|
||||
obsidian_stats,
|
||||
obsidian_sync,
|
||||
)
|
||||
from app.schemas.obsidian_plugin import (
|
||||
ConnectRequest,
|
||||
DeleteAck,
|
||||
DeleteBatchRequest,
|
||||
HeadingRef,
|
||||
ManifestResponse,
|
||||
NotePayload,
|
||||
RenameAck,
|
||||
RenameBatchRequest,
|
||||
RenameItem,
|
||||
StatsResponse,
|
||||
SyncAck,
|
||||
SyncBatchRequest,
|
||||
)
|
||||
|
||||
pytestmark = pytest.mark.integration
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _make_note_payload(vault_id: str, path: str, content_hash: str) -> NotePayload:
|
||||
"""Minimal NotePayload that the schema accepts; the indexer is mocked
|
||||
out so the values don't have to round-trip through the real pipeline."""
|
||||
now = datetime.now(UTC)
|
||||
return NotePayload(
|
||||
vault_id=vault_id,
|
||||
path=path,
|
||||
name=path.rsplit("/", 1)[-1].rsplit(".", 1)[0],
|
||||
extension="md",
|
||||
content="# Test\n\nbody",
|
||||
headings=[HeadingRef(heading="Test", level=1)],
|
||||
content_hash=content_hash,
|
||||
mtime=now,
|
||||
ctime=now,
|
||||
)
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def race_user_and_space(async_engine):
|
||||
"""User + SearchSpace committed via the live engine so the two
|
||||
concurrent /connect sessions in the race test can both see them.
|
||||
|
||||
We can't use the savepoint-trapped ``db_session`` fixture here
|
||||
because the concurrent sessions need to see committed rows.
|
||||
"""
|
||||
user_id = uuid.uuid4()
|
||||
async with AsyncSession(async_engine) as setup:
|
||||
user = User(
|
||||
id=user_id,
|
||||
email=f"obsidian-race-{uuid.uuid4()}@surfsense.test",
|
||||
hashed_password="x",
|
||||
is_active=True,
|
||||
is_superuser=False,
|
||||
is_verified=True,
|
||||
)
|
||||
space = SearchSpace(name="Race Space", user_id=user_id)
|
||||
setup.add_all([user, space])
|
||||
await setup.commit()
|
||||
await setup.refresh(space)
|
||||
space_id = space.id
|
||||
|
||||
yield user_id, space_id
|
||||
|
||||
async with AsyncSession(async_engine) as cleanup:
|
||||
# Order matters: connectors -> documents -> space -> user. The
|
||||
# connectors test creates documents, so we wipe them too. The
|
||||
# CASCADE on user_id catches anything we missed.
|
||||
await cleanup.execute(
|
||||
text("DELETE FROM search_source_connectors WHERE user_id = :uid"),
|
||||
{"uid": user_id},
|
||||
)
|
||||
await cleanup.execute(
|
||||
text("DELETE FROM searchspaces WHERE id = :id"),
|
||||
{"id": space_id},
|
||||
)
|
||||
await cleanup.execute(
|
||||
text('DELETE FROM "user" WHERE id = :uid'),
|
||||
{"uid": user_id},
|
||||
)
|
||||
await cleanup.commit()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# /connect race + index enforcement
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestConnectRace:
|
||||
async def test_concurrent_first_connects_collapse_to_one_row(
|
||||
self, async_engine, race_user_and_space
|
||||
):
|
||||
"""Two simultaneous /connect calls for the same vault should
|
||||
produce exactly one row, not two. Same vault_id + same
|
||||
fingerprint funnels through both partial unique indexes; the
|
||||
loser falls back to the survivor row via the IntegrityError
|
||||
branch in obsidian_connect."""
|
||||
user_id, space_id = race_user_and_space
|
||||
vault_id = str(uuid.uuid4())
|
||||
fingerprint = "fp-" + uuid.uuid4().hex
|
||||
|
||||
async def _call(name_suffix: str) -> None:
|
||||
async with AsyncSession(async_engine) as s:
|
||||
fresh_user = await s.get(User, user_id)
|
||||
payload = ConnectRequest(
|
||||
vault_id=vault_id,
|
||||
vault_name=f"My Vault {name_suffix}",
|
||||
search_space_id=space_id,
|
||||
vault_fingerprint=fingerprint,
|
||||
)
|
||||
await obsidian_connect(payload, user=fresh_user, session=s)
|
||||
|
||||
results = await asyncio.gather(_call("a"), _call("b"), return_exceptions=True)
|
||||
for r in results:
|
||||
assert not isinstance(r, Exception), f"Connect raised: {r!r}"
|
||||
|
||||
async with AsyncSession(async_engine) as verify:
|
||||
count = (
|
||||
await verify.execute(
|
||||
select(func.count(SearchSourceConnector.id)).where(
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
)
|
||||
)
|
||||
).scalar_one()
|
||||
assert count == 1
|
||||
|
||||
async def test_partial_unique_index_blocks_raw_duplicate(
|
||||
self, async_engine, race_user_and_space
|
||||
):
|
||||
"""Raw INSERTs that bypass the route must still be blocked by
|
||||
the partial unique indexes from migration 129."""
|
||||
user_id, space_id = race_user_and_space
|
||||
vault_id = str(uuid.uuid4())
|
||||
|
||||
async with AsyncSession(async_engine) as s:
|
||||
s.add(
|
||||
SearchSourceConnector(
|
||||
name="Obsidian - First",
|
||||
connector_type=SearchSourceConnectorType.OBSIDIAN_CONNECTOR,
|
||||
is_indexable=False,
|
||||
config={
|
||||
"vault_id": vault_id,
|
||||
"vault_name": "First",
|
||||
"source": "plugin",
|
||||
"vault_fingerprint": "fp-1",
|
||||
},
|
||||
user_id=user_id,
|
||||
search_space_id=space_id,
|
||||
)
|
||||
)
|
||||
await s.commit()
|
||||
|
||||
with pytest.raises(IntegrityError):
|
||||
async with AsyncSession(async_engine) as s:
|
||||
s.add(
|
||||
SearchSourceConnector(
|
||||
name="Obsidian - Second",
|
||||
connector_type=SearchSourceConnectorType.OBSIDIAN_CONNECTOR,
|
||||
is_indexable=False,
|
||||
config={
|
||||
"vault_id": vault_id,
|
||||
"vault_name": "Second",
|
||||
"source": "plugin",
|
||||
"vault_fingerprint": "fp-2",
|
||||
},
|
||||
user_id=user_id,
|
||||
search_space_id=space_id,
|
||||
)
|
||||
)
|
||||
await s.commit()
|
||||
|
||||
async def test_fingerprint_blocks_raw_cross_device_duplicate(
|
||||
self, async_engine, race_user_and_space
|
||||
):
|
||||
"""Two connectors for the same user with different vault_ids but
|
||||
the same fingerprint cannot coexist."""
|
||||
user_id, space_id = race_user_and_space
|
||||
fingerprint = "fp-" + uuid.uuid4().hex
|
||||
|
||||
async with AsyncSession(async_engine) as s:
|
||||
s.add(
|
||||
SearchSourceConnector(
|
||||
name="Obsidian - Desktop",
|
||||
connector_type=SearchSourceConnectorType.OBSIDIAN_CONNECTOR,
|
||||
is_indexable=False,
|
||||
config={
|
||||
"vault_id": str(uuid.uuid4()),
|
||||
"vault_name": "Vault",
|
||||
"source": "plugin",
|
||||
"vault_fingerprint": fingerprint,
|
||||
},
|
||||
user_id=user_id,
|
||||
search_space_id=space_id,
|
||||
)
|
||||
)
|
||||
await s.commit()
|
||||
|
||||
with pytest.raises(IntegrityError):
|
||||
async with AsyncSession(async_engine) as s:
|
||||
s.add(
|
||||
SearchSourceConnector(
|
||||
name="Obsidian - Mobile",
|
||||
connector_type=SearchSourceConnectorType.OBSIDIAN_CONNECTOR,
|
||||
is_indexable=False,
|
||||
config={
|
||||
"vault_id": str(uuid.uuid4()),
|
||||
"vault_name": "Vault",
|
||||
"source": "plugin",
|
||||
"vault_fingerprint": fingerprint,
|
||||
},
|
||||
user_id=user_id,
|
||||
search_space_id=space_id,
|
||||
)
|
||||
)
|
||||
await s.commit()
|
||||
|
||||
async def test_second_device_adopts_existing_connector_via_fingerprint(
|
||||
self, async_engine, race_user_and_space
|
||||
):
|
||||
"""Device A connects with vault_id=A. Device B then connects with
|
||||
a fresh vault_id=B but the same fingerprint. The route must
|
||||
return A's identity (not create a B row), proving cross-device
|
||||
dedup happens transparently to the plugin."""
|
||||
user_id, space_id = race_user_and_space
|
||||
vault_id_a = str(uuid.uuid4())
|
||||
vault_id_b = str(uuid.uuid4())
|
||||
fingerprint = "fp-" + uuid.uuid4().hex
|
||||
|
||||
async with AsyncSession(async_engine) as s:
|
||||
fresh_user = await s.get(User, user_id)
|
||||
resp_a = await obsidian_connect(
|
||||
ConnectRequest(
|
||||
vault_id=vault_id_a,
|
||||
vault_name="Shared Vault",
|
||||
search_space_id=space_id,
|
||||
vault_fingerprint=fingerprint,
|
||||
),
|
||||
user=fresh_user,
|
||||
session=s,
|
||||
)
|
||||
|
||||
async with AsyncSession(async_engine) as s:
|
||||
fresh_user = await s.get(User, user_id)
|
||||
resp_b = await obsidian_connect(
|
||||
ConnectRequest(
|
||||
vault_id=vault_id_b,
|
||||
vault_name="Shared Vault",
|
||||
search_space_id=space_id,
|
||||
vault_fingerprint=fingerprint,
|
||||
),
|
||||
user=fresh_user,
|
||||
session=s,
|
||||
)
|
||||
|
||||
assert resp_b.vault_id == vault_id_a
|
||||
assert resp_b.connector_id == resp_a.connector_id
|
||||
|
||||
async with AsyncSession(async_engine) as verify:
|
||||
count = (
|
||||
await verify.execute(
|
||||
select(func.count(SearchSourceConnector.id)).where(
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
)
|
||||
)
|
||||
).scalar_one()
|
||||
assert count == 1
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Combined wire-shape smoke test
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestWireContractSmoke:
|
||||
"""Walks /connect -> /sync -> /rename -> /notes -> /manifest -> /stats
|
||||
sequentially and asserts each response matches the new schema. With
|
||||
`response_model=` on every route, FastAPI is already validating the
|
||||
shape on real traffic; this test mainly guards against accidental
|
||||
field renames the way the TypeScript decoder would catch them."""
|
||||
|
||||
async def test_full_flow_returns_typed_payloads(
|
||||
self, db_session: AsyncSession, db_user: User, db_search_space: SearchSpace
|
||||
):
|
||||
vault_id = str(uuid.uuid4())
|
||||
|
||||
# 1. /connect
|
||||
connect_resp = await obsidian_connect(
|
||||
ConnectRequest(
|
||||
vault_id=vault_id,
|
||||
vault_name="Smoke Vault",
|
||||
search_space_id=db_search_space.id,
|
||||
vault_fingerprint="fp-" + uuid.uuid4().hex,
|
||||
),
|
||||
user=db_user,
|
||||
session=db_session,
|
||||
)
|
||||
assert connect_resp.connector_id > 0
|
||||
assert connect_resp.vault_id == vault_id
|
||||
assert "sync" in connect_resp.capabilities
|
||||
assert connect_resp.server_time_utc is not None
|
||||
|
||||
# 2. /sync — stub the indexer so the call doesn't drag the LLM /
|
||||
# embedding pipeline in. We're testing the wire contract, not the
|
||||
# indexer itself.
|
||||
fake_doc = type("FakeDoc", (), {"id": 12345})()
|
||||
with patch(
|
||||
"app.routes.obsidian_plugin_routes.upsert_note",
|
||||
new=AsyncMock(return_value=fake_doc),
|
||||
):
|
||||
sync_resp = await obsidian_sync(
|
||||
SyncBatchRequest(
|
||||
vault_id=vault_id,
|
||||
notes=[
|
||||
_make_note_payload(vault_id, "ok.md", "hash-ok"),
|
||||
_make_note_payload(vault_id, "fail.md", "hash-fail"),
|
||||
],
|
||||
),
|
||||
user=db_user,
|
||||
session=db_session,
|
||||
)
|
||||
|
||||
assert isinstance(sync_resp, SyncAck)
|
||||
assert sync_resp.vault_id == vault_id
|
||||
assert sync_resp.indexed == 2
|
||||
assert sync_resp.failed == 0
|
||||
assert len(sync_resp.items) == 2
|
||||
assert all(it.status == "ok" for it in sync_resp.items)
|
||||
# The TypeScript decoder filters on items[].status === "error" and
|
||||
# extracts .path, so confirm both fields are present and named.
|
||||
assert {it.path for it in sync_resp.items} == {"ok.md", "fail.md"}
|
||||
|
||||
# 2b. Re-run /sync but force the indexer to raise on one note so
|
||||
# the per-item failure decoder gets exercised end-to-end.
|
||||
async def _selective_upsert(session, *, connector, payload, user_id):
|
||||
if payload.path == "fail.md":
|
||||
raise RuntimeError("simulated indexing failure")
|
||||
return fake_doc
|
||||
|
||||
with patch(
|
||||
"app.routes.obsidian_plugin_routes.upsert_note",
|
||||
new=AsyncMock(side_effect=_selective_upsert),
|
||||
):
|
||||
sync_resp = await obsidian_sync(
|
||||
SyncBatchRequest(
|
||||
vault_id=vault_id,
|
||||
notes=[
|
||||
_make_note_payload(vault_id, "ok.md", "h1"),
|
||||
_make_note_payload(vault_id, "fail.md", "h2"),
|
||||
],
|
||||
),
|
||||
user=db_user,
|
||||
session=db_session,
|
||||
)
|
||||
assert sync_resp.indexed == 1
|
||||
assert sync_resp.failed == 1
|
||||
statuses = {it.path: it.status for it in sync_resp.items}
|
||||
assert statuses == {"ok.md": "ok", "fail.md": "error"}
|
||||
|
||||
# 3. /rename — patch rename_note so we don't need a real Document.
|
||||
async def _rename(*args, **kwargs) -> object:
|
||||
if kwargs.get("old_path") == "missing.md":
|
||||
return None
|
||||
return fake_doc
|
||||
|
||||
with patch(
|
||||
"app.routes.obsidian_plugin_routes.rename_note",
|
||||
new=AsyncMock(side_effect=_rename),
|
||||
):
|
||||
rename_resp = await obsidian_rename(
|
||||
RenameBatchRequest(
|
||||
vault_id=vault_id,
|
||||
renames=[
|
||||
RenameItem(old_path="a.md", new_path="b.md"),
|
||||
RenameItem(old_path="missing.md", new_path="x.md"),
|
||||
],
|
||||
),
|
||||
user=db_user,
|
||||
session=db_session,
|
||||
)
|
||||
assert isinstance(rename_resp, RenameAck)
|
||||
assert rename_resp.renamed == 1
|
||||
assert rename_resp.missing == 1
|
||||
assert {it.status for it in rename_resp.items} == {"ok", "missing"}
|
||||
# snake_case fields are deliberate — the plugin decoder maps them
|
||||
# to camelCase explicitly.
|
||||
assert all(it.old_path and it.new_path for it in rename_resp.items)
|
||||
|
||||
# 4. /notes DELETE
|
||||
async def _delete(*args, **kwargs) -> bool:
|
||||
return kwargs.get("path") != "ghost.md"
|
||||
|
||||
with patch(
|
||||
"app.routes.obsidian_plugin_routes.delete_note",
|
||||
new=AsyncMock(side_effect=_delete),
|
||||
):
|
||||
delete_resp = await obsidian_delete_notes(
|
||||
DeleteBatchRequest(vault_id=vault_id, paths=["b.md", "ghost.md"]),
|
||||
user=db_user,
|
||||
session=db_session,
|
||||
)
|
||||
assert isinstance(delete_resp, DeleteAck)
|
||||
assert delete_resp.deleted == 1
|
||||
assert delete_resp.missing == 1
|
||||
assert {it.path: it.status for it in delete_resp.items} == {
|
||||
"b.md": "ok",
|
||||
"ghost.md": "missing",
|
||||
}
|
||||
|
||||
# 5. /manifest — empty (no real Documents were created because
|
||||
# upsert_note was mocked) but the response shape is what we care
|
||||
# about.
|
||||
manifest_resp = await obsidian_manifest(
|
||||
vault_id=vault_id, user=db_user, session=db_session
|
||||
)
|
||||
assert isinstance(manifest_resp, ManifestResponse)
|
||||
assert manifest_resp.vault_id == vault_id
|
||||
assert manifest_resp.items == {}
|
||||
|
||||
# 6. /stats — same; row count is 0 because upsert_note was mocked.
|
||||
stats_resp = await obsidian_stats(
|
||||
vault_id=vault_id, user=db_user, session=db_session
|
||||
)
|
||||
assert isinstance(stats_resp, StatsResponse)
|
||||
assert stats_resp.vault_id == vault_id
|
||||
assert stats_resp.files_synced == 0
|
||||
assert stats_resp.last_sync_at is None
|
||||
|
||||
async def test_sync_queues_binary_attachments(
|
||||
self, db_session: AsyncSession, db_user: User, db_search_space: SearchSpace
|
||||
):
|
||||
vault_id = str(uuid.uuid4())
|
||||
await obsidian_connect(
|
||||
ConnectRequest(
|
||||
vault_id=vault_id,
|
||||
vault_name="Queue Vault",
|
||||
search_space_id=db_search_space.id,
|
||||
vault_fingerprint="fp-" + uuid.uuid4().hex,
|
||||
),
|
||||
user=db_user,
|
||||
session=db_session,
|
||||
)
|
||||
|
||||
fake_doc = type("FakeDoc", (), {"id": 12345})()
|
||||
binary_note = _make_note_payload(vault_id, "image.png", "hash-bin")
|
||||
binary_note.extension = "png"
|
||||
binary_note.is_binary = True
|
||||
binary_note.binary_base64 = "aGVsbG8="
|
||||
binary_note.mime_type = "image/png"
|
||||
binary_note.content = ""
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.routes.obsidian_plugin_routes.upsert_note",
|
||||
new=AsyncMock(return_value=fake_doc),
|
||||
) as upsert_mock,
|
||||
patch("app.routes.obsidian_plugin_routes._queue_obsidian_attachment") as queue_mock,
|
||||
):
|
||||
sync_resp = await obsidian_sync(
|
||||
SyncBatchRequest(
|
||||
vault_id=vault_id,
|
||||
notes=[
|
||||
_make_note_payload(vault_id, "ok.md", "hash-ok"),
|
||||
binary_note,
|
||||
],
|
||||
),
|
||||
user=db_user,
|
||||
session=db_session,
|
||||
)
|
||||
|
||||
assert sync_resp.indexed == 2
|
||||
assert sync_resp.failed == 0
|
||||
statuses = {it.path: it.status for it in sync_resp.items}
|
||||
assert statuses == {"ok.md": "ok", "image.png": "queued"}
|
||||
assert upsert_mock.await_count == 1
|
||||
queue_mock.assert_called_once()
|
||||
|
||||
async def test_sync_rejects_unsupported_attachment_extension(
|
||||
self, db_session: AsyncSession, db_user: User, db_search_space: SearchSpace
|
||||
):
|
||||
vault_id = str(uuid.uuid4())
|
||||
await obsidian_connect(
|
||||
ConnectRequest(
|
||||
vault_id=vault_id,
|
||||
vault_name="Reject Vault",
|
||||
search_space_id=db_search_space.id,
|
||||
vault_fingerprint="fp-" + uuid.uuid4().hex,
|
||||
),
|
||||
user=db_user,
|
||||
session=db_session,
|
||||
)
|
||||
|
||||
fake_doc = type("FakeDoc", (), {"id": 12345})()
|
||||
bad_note = _make_note_payload(vault_id, "photo.heic", "hash-heic")
|
||||
bad_note.extension = "heic"
|
||||
bad_note.is_binary = True
|
||||
bad_note.binary_base64 = "aGVsbG8="
|
||||
bad_note.mime_type = "image/heic"
|
||||
bad_note.content = ""
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.routes.obsidian_plugin_routes.upsert_note",
|
||||
new=AsyncMock(return_value=fake_doc),
|
||||
),
|
||||
patch("app.routes.obsidian_plugin_routes._queue_obsidian_attachment") as queue_mock,
|
||||
):
|
||||
sync_resp = await obsidian_sync(
|
||||
SyncBatchRequest(
|
||||
vault_id=vault_id,
|
||||
notes=[
|
||||
_make_note_payload(vault_id, "ok.md", "hash-ok"),
|
||||
bad_note,
|
||||
],
|
||||
),
|
||||
user=db_user,
|
||||
session=db_session,
|
||||
)
|
||||
|
||||
assert sync_resp.indexed == 1
|
||||
assert sync_resp.failed == 1
|
||||
items_by_path = {it.path: it for it in sync_resp.items}
|
||||
assert items_by_path["ok.md"].status == "ok"
|
||||
assert items_by_path["photo.heic"].status == "error"
|
||||
assert "unsupported attachment extension" in (
|
||||
items_by_path["photo.heic"].error or ""
|
||||
)
|
||||
queue_mock.assert_not_called()
|
||||
|
||||
async def test_sync_rejects_mime_extension_mismatch(
|
||||
self, db_session: AsyncSession, db_user: User, db_search_space: SearchSpace
|
||||
):
|
||||
vault_id = str(uuid.uuid4())
|
||||
await obsidian_connect(
|
||||
ConnectRequest(
|
||||
vault_id=vault_id,
|
||||
vault_name="Mismatch Vault",
|
||||
search_space_id=db_search_space.id,
|
||||
vault_fingerprint="fp-" + uuid.uuid4().hex,
|
||||
),
|
||||
user=db_user,
|
||||
session=db_session,
|
||||
)
|
||||
|
||||
fake_doc = type("FakeDoc", (), {"id": 12345})()
|
||||
mismatched = _make_note_payload(vault_id, "image.png", "hash-png")
|
||||
mismatched.extension = "png"
|
||||
mismatched.is_binary = True
|
||||
mismatched.binary_base64 = "aGVsbG8="
|
||||
mismatched.mime_type = "application/pdf"
|
||||
mismatched.content = ""
|
||||
|
||||
with (
|
||||
patch(
|
||||
"app.routes.obsidian_plugin_routes.upsert_note",
|
||||
new=AsyncMock(return_value=fake_doc),
|
||||
),
|
||||
patch("app.routes.obsidian_plugin_routes._queue_obsidian_attachment") as queue_mock,
|
||||
):
|
||||
sync_resp = await obsidian_sync(
|
||||
SyncBatchRequest(
|
||||
vault_id=vault_id,
|
||||
notes=[
|
||||
_make_note_payload(vault_id, "ok.md", "hash-ok"),
|
||||
mismatched,
|
||||
],
|
||||
),
|
||||
user=db_user,
|
||||
session=db_session,
|
||||
)
|
||||
|
||||
assert sync_resp.indexed == 1
|
||||
assert sync_resp.failed == 1
|
||||
items_by_path = {it.path: it for it in sync_resp.items}
|
||||
assert items_by_path["ok.md"].status == "ok"
|
||||
assert items_by_path["image.png"].status == "error"
|
||||
assert "does not match extension" in (
|
||||
items_by_path["image.png"].error or ""
|
||||
)
|
||||
queue_mock.assert_not_called()
|
||||
225
surfsense_backend/tests/unit/test_obsidian_plugin_indexer.py
Normal file
225
surfsense_backend/tests/unit/test_obsidian_plugin_indexer.py
Normal file
|
|
@ -0,0 +1,225 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
from datetime import UTC, datetime
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from app.etl_pipeline.etl_document import EtlResult
|
||||
from app.schemas.obsidian_plugin import HeadingRef, NotePayload
|
||||
from app.services.obsidian_plugin_indexer import (
|
||||
_build_metadata,
|
||||
_extract_binary_attachment_markdown,
|
||||
_is_image_attachment,
|
||||
_require_extracted_attachment_content,
|
||||
)
|
||||
|
||||
|
||||
_FAKE_PNG_B64 = base64.b64encode(b"\x89PNG\r\n\x1a\n").decode("ascii")
|
||||
|
||||
|
||||
def test_build_metadata_serializes_headings_to_plain_json() -> None:
|
||||
now = datetime.now(UTC)
|
||||
payload = NotePayload(
|
||||
vault_id="vault-1",
|
||||
path="notes.md",
|
||||
name="notes",
|
||||
extension="md",
|
||||
content="# Notes",
|
||||
headings=[HeadingRef(heading="Notes", level=1)],
|
||||
content_hash="abc123",
|
||||
mtime=now,
|
||||
ctime=now,
|
||||
)
|
||||
|
||||
metadata = _build_metadata(payload, vault_name="My Vault", connector_id=42)
|
||||
|
||||
assert metadata["headings"] == [{"heading": "Notes", "level": 1}]
|
||||
|
||||
|
||||
def test_build_metadata_marks_binary_attachment_fields() -> None:
|
||||
now = datetime.now(UTC)
|
||||
payload = NotePayload(
|
||||
vault_id="vault-1",
|
||||
path="assets/diagram.png",
|
||||
name="diagram",
|
||||
extension="png",
|
||||
content="",
|
||||
content_hash="abc123",
|
||||
mtime=now,
|
||||
ctime=now,
|
||||
is_binary=True,
|
||||
binary_base64=_FAKE_PNG_B64,
|
||||
mime_type="image/png",
|
||||
)
|
||||
|
||||
metadata = _build_metadata(payload, vault_name="My Vault", connector_id=42)
|
||||
|
||||
assert metadata["is_binary"] is True
|
||||
assert metadata["mime_type"] == "image/png"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_extract_binary_attachment_markdown_handles_invalid_base64() -> None:
|
||||
now = datetime.now(UTC)
|
||||
payload = NotePayload(
|
||||
vault_id="vault-1",
|
||||
path="assets/diagram.png",
|
||||
name="diagram",
|
||||
extension="png",
|
||||
content="",
|
||||
content_hash="abc123",
|
||||
mtime=now,
|
||||
ctime=now,
|
||||
is_binary=True,
|
||||
binary_base64="not-valid-base64!!",
|
||||
mime_type="image/png",
|
||||
)
|
||||
|
||||
content, metadata = await _extract_binary_attachment_markdown(
|
||||
payload, vision_llm=None
|
||||
)
|
||||
|
||||
assert content == ""
|
||||
assert metadata["attachment_extraction_status"] == "invalid_binary_payload"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_extract_binary_attachment_markdown_uses_etl(monkeypatch) -> None:
|
||||
now = datetime.now(UTC)
|
||||
payload = NotePayload(
|
||||
vault_id="vault-1",
|
||||
path="assets/spec.pdf",
|
||||
name="spec",
|
||||
extension="pdf",
|
||||
content="",
|
||||
content_hash="abc123",
|
||||
mtime=now,
|
||||
ctime=now,
|
||||
is_binary=True,
|
||||
binary_base64=base64.b64encode(b"%PDF-1.7 fake bytes").decode("ascii"),
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
|
||||
async def _fake_run_etl_extract( # noqa: ANN001
|
||||
*, file_path, filename, vision_llm
|
||||
):
|
||||
assert filename == "spec.pdf"
|
||||
assert file_path
|
||||
assert vision_llm is None
|
||||
return EtlResult(
|
||||
markdown_content="Extracted content",
|
||||
etl_service="TEST_ETL",
|
||||
content_type="document",
|
||||
)
|
||||
|
||||
monkeypatch.setattr(
|
||||
"app.services.obsidian_plugin_indexer._run_etl_extract",
|
||||
_fake_run_etl_extract,
|
||||
)
|
||||
|
||||
content, metadata = await _extract_binary_attachment_markdown(
|
||||
payload, vision_llm=None
|
||||
)
|
||||
|
||||
assert content == "Extracted content"
|
||||
assert metadata["attachment_extraction_status"] == "ok"
|
||||
assert metadata["attachment_etl_service"] == "TEST_ETL"
|
||||
|
||||
|
||||
def test_is_image_attachment_detects_image_extensions() -> None:
|
||||
now = datetime.now(UTC)
|
||||
image_payload = NotePayload(
|
||||
vault_id="vault-1",
|
||||
path="assets/screenshot.PNG",
|
||||
name="screenshot",
|
||||
extension="PNG",
|
||||
content="",
|
||||
content_hash="abc123",
|
||||
mtime=now,
|
||||
ctime=now,
|
||||
is_binary=True,
|
||||
binary_base64=_FAKE_PNG_B64,
|
||||
mime_type="image/png",
|
||||
)
|
||||
pdf_payload = NotePayload(
|
||||
vault_id="vault-1",
|
||||
path="assets/spec.pdf",
|
||||
name="spec",
|
||||
extension="pdf",
|
||||
content="",
|
||||
content_hash="abc123",
|
||||
mtime=now,
|
||||
ctime=now,
|
||||
is_binary=True,
|
||||
binary_base64=_FAKE_PNG_B64,
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
|
||||
assert _is_image_attachment(image_payload) is True
|
||||
assert _is_image_attachment(pdf_payload) is False
|
||||
|
||||
|
||||
def test_note_payload_rejects_binary_without_base64() -> None:
|
||||
now = datetime.now(UTC)
|
||||
with pytest.raises(ValidationError, match="binary_base64 is required"):
|
||||
NotePayload(
|
||||
vault_id="vault-1",
|
||||
path="assets/diagram.png",
|
||||
name="diagram",
|
||||
extension="png",
|
||||
content="",
|
||||
content_hash="abc123",
|
||||
mtime=now,
|
||||
ctime=now,
|
||||
is_binary=True,
|
||||
mime_type="image/png",
|
||||
)
|
||||
|
||||
|
||||
def test_note_payload_rejects_binary_without_mime_type() -> None:
|
||||
now = datetime.now(UTC)
|
||||
with pytest.raises(ValidationError, match="mime_type is required"):
|
||||
NotePayload(
|
||||
vault_id="vault-1",
|
||||
path="assets/diagram.png",
|
||||
name="diagram",
|
||||
extension="png",
|
||||
content="",
|
||||
content_hash="abc123",
|
||||
mtime=now,
|
||||
ctime=now,
|
||||
is_binary=True,
|
||||
binary_base64=_FAKE_PNG_B64,
|
||||
)
|
||||
|
||||
|
||||
def test_note_payload_rejects_markdown_with_binary_fields() -> None:
|
||||
now = datetime.now(UTC)
|
||||
with pytest.raises(
|
||||
ValidationError,
|
||||
match="binary_base64 and mime_type must be omitted when is_binary is False",
|
||||
):
|
||||
NotePayload(
|
||||
vault_id="vault-1",
|
||||
path="notes.md",
|
||||
name="notes",
|
||||
extension="md",
|
||||
content="# Notes",
|
||||
content_hash="abc123",
|
||||
mtime=now,
|
||||
ctime=now,
|
||||
binary_base64=_FAKE_PNG_B64,
|
||||
)
|
||||
|
||||
|
||||
def test_require_extracted_attachment_content_rejects_empty_content() -> None:
|
||||
with pytest.raises(
|
||||
RuntimeError, match="Attachment extraction failed for assets/img.png"
|
||||
):
|
||||
_require_extracted_attachment_content(
|
||||
content=" ",
|
||||
etl_meta={"attachment_extraction_status": "etl_failed"},
|
||||
path="assets/img.png",
|
||||
)
|
||||
10
surfsense_obsidian/.editorconfig
Normal file
10
surfsense_obsidian/.editorconfig
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
# top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
tab_width = 4
|
||||
22
surfsense_obsidian/.gitignore
vendored
Normal file
22
surfsense_obsidian/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# vscode
|
||||
.vscode
|
||||
|
||||
# Intellij
|
||||
*.iml
|
||||
.idea
|
||||
|
||||
# npm
|
||||
node_modules
|
||||
|
||||
# Don't include the compiled main.js file in the repo.
|
||||
# They should be uploaded to GitHub releases instead.
|
||||
main.js
|
||||
|
||||
# Exclude sourcemaps
|
||||
*.map
|
||||
|
||||
# obsidian
|
||||
data.json
|
||||
|
||||
# Exclude macOS Finder (System Explorer) View States
|
||||
.DS_Store
|
||||
1
surfsense_obsidian/.npmrc
Normal file
1
surfsense_obsidian/.npmrc
Normal file
|
|
@ -0,0 +1 @@
|
|||
tag-version-prefix=""
|
||||
251
surfsense_obsidian/AGENTS.md
Normal file
251
surfsense_obsidian/AGENTS.md
Normal file
|
|
@ -0,0 +1,251 @@
|
|||
# Obsidian community plugin
|
||||
|
||||
## Project overview
|
||||
|
||||
- Target: Obsidian Community Plugin (TypeScript → bundled JavaScript).
|
||||
- Entry point: `main.ts` compiled to `main.js` and loaded by Obsidian.
|
||||
- Required release artifacts: `main.js`, `manifest.json`, and optional `styles.css`.
|
||||
|
||||
## Environment & tooling
|
||||
|
||||
- Node.js: use current LTS (Node 18+ recommended).
|
||||
- **Package manager: npm** (required for this sample - `package.json` defines npm scripts and dependencies).
|
||||
- **Bundler: esbuild** (required for this sample - `esbuild.config.mjs` and build scripts depend on it). Alternative bundlers like Rollup or webpack are acceptable for other projects if they bundle all external dependencies into `main.js`.
|
||||
- Types: `obsidian` type definitions.
|
||||
|
||||
**Note**: This sample project has specific technical dependencies on npm and esbuild. If you're creating a plugin from scratch, you can choose different tools, but you'll need to replace the build configuration accordingly.
|
||||
|
||||
### Install
|
||||
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
### Dev (watch)
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
### Production build
|
||||
|
||||
```bash
|
||||
npm run build
|
||||
```
|
||||
|
||||
## Linting
|
||||
|
||||
- To use eslint install eslint from terminal: `npm install -g eslint`
|
||||
- To use eslint to analyze this project use this command: `eslint main.ts`
|
||||
- eslint will then create a report with suggestions for code improvement by file and line number.
|
||||
- If your source code is in a folder, such as `src`, you can use eslint with this command to analyze all files in that folder: `eslint ./src/`
|
||||
|
||||
## File & folder conventions
|
||||
|
||||
- **Organize code into multiple files**: Split functionality across separate modules rather than putting everything in `main.ts`.
|
||||
- Source lives in `src/`. Keep `main.ts` small and focused on plugin lifecycle (loading, unloading, registering commands).
|
||||
- **Example file structure**:
|
||||
```
|
||||
src/
|
||||
main.ts # Plugin entry point, lifecycle management
|
||||
settings.ts # Settings interface and defaults
|
||||
commands/ # Command implementations
|
||||
command1.ts
|
||||
command2.ts
|
||||
ui/ # UI components, modals, views
|
||||
modal.ts
|
||||
view.ts
|
||||
utils/ # Utility functions, helpers
|
||||
helpers.ts
|
||||
constants.ts
|
||||
types.ts # TypeScript interfaces and types
|
||||
```
|
||||
- **Do not commit build artifacts**: Never commit `node_modules/`, `main.js`, or other generated files to version control.
|
||||
- Keep the plugin small. Avoid large dependencies. Prefer browser-compatible packages.
|
||||
- Generated output should be placed at the plugin root or `dist/` depending on your build setup. Release artifacts must end up at the top level of the plugin folder in the vault (`main.js`, `manifest.json`, `styles.css`).
|
||||
|
||||
## Manifest rules (`manifest.json`)
|
||||
|
||||
- Must include (non-exhaustive):
|
||||
- `id` (plugin ID; for local dev it should match the folder name)
|
||||
- `name`
|
||||
- `version` (Semantic Versioning `x.y.z`)
|
||||
- `minAppVersion`
|
||||
- `description`
|
||||
- `isDesktopOnly` (boolean)
|
||||
- Optional: `author`, `authorUrl`, `fundingUrl` (string or map)
|
||||
- Never change `id` after release. Treat it as stable API.
|
||||
- Keep `minAppVersion` accurate when using newer APIs.
|
||||
- Canonical requirements are coded here: https://github.com/obsidianmd/obsidian-releases/blob/master/.github/workflows/validate-plugin-entry.yml
|
||||
|
||||
## Testing
|
||||
|
||||
- Manual install for testing: copy `main.js`, `manifest.json`, `styles.css` (if any) to:
|
||||
```
|
||||
<Vault>/.obsidian/plugins/<plugin-id>/
|
||||
```
|
||||
- Reload Obsidian and enable the plugin in **Settings → Community plugins**.
|
||||
|
||||
## Commands & settings
|
||||
|
||||
- Any user-facing commands should be added via `this.addCommand(...)`.
|
||||
- If the plugin has configuration, provide a settings tab and sensible defaults.
|
||||
- Persist settings using `this.loadData()` / `this.saveData()`.
|
||||
- Use stable command IDs; avoid renaming once released.
|
||||
|
||||
## Versioning & releases
|
||||
|
||||
- Bump `version` in `manifest.json` (SemVer) and update `versions.json` to map plugin version → minimum app version.
|
||||
- Create a GitHub release whose tag exactly matches `manifest.json`'s `version`. Do not use a leading `v`.
|
||||
- Attach `manifest.json`, `main.js`, and `styles.css` (if present) to the release as individual assets.
|
||||
- After the initial release, follow the process to add/update your plugin in the community catalog as required.
|
||||
|
||||
## Security, privacy, and compliance
|
||||
|
||||
Follow Obsidian's **Developer Policies** and **Plugin Guidelines**. In particular:
|
||||
|
||||
- Default to local/offline operation. Only make network requests when essential to the feature.
|
||||
- No hidden telemetry. If you collect optional analytics or call third-party services, require explicit opt-in and document clearly in `README.md` and in settings.
|
||||
- Never execute remote code, fetch and eval scripts, or auto-update plugin code outside of normal releases.
|
||||
- Minimize scope: read/write only what's necessary inside the vault. Do not access files outside the vault.
|
||||
- Clearly disclose any external services used, data sent, and risks.
|
||||
- Respect user privacy. Do not collect vault contents, filenames, or personal information unless absolutely necessary and explicitly consented.
|
||||
- Avoid deceptive patterns, ads, or spammy notifications.
|
||||
- Register and clean up all DOM, app, and interval listeners using the provided `register*` helpers so the plugin unloads safely.
|
||||
|
||||
## UX & copy guidelines (for UI text, commands, settings)
|
||||
|
||||
- Prefer sentence case for headings, buttons, and titles.
|
||||
- Use clear, action-oriented imperatives in step-by-step copy.
|
||||
- Use **bold** to indicate literal UI labels. Prefer "select" for interactions.
|
||||
- Use arrow notation for navigation: **Settings → Community plugins**.
|
||||
- Keep in-app strings short, consistent, and free of jargon.
|
||||
|
||||
## Performance
|
||||
|
||||
- Keep startup light. Defer heavy work until needed.
|
||||
- Avoid long-running tasks during `onload`; use lazy initialization.
|
||||
- Batch disk access and avoid excessive vault scans.
|
||||
- Debounce/throttle expensive operations in response to file system events.
|
||||
|
||||
## Coding conventions
|
||||
|
||||
- TypeScript with `"strict": true` preferred.
|
||||
- **Keep `main.ts` minimal**: Focus only on plugin lifecycle (onload, onunload, addCommand calls). Delegate all feature logic to separate modules.
|
||||
- **Split large files**: If any file exceeds ~200-300 lines, consider breaking it into smaller, focused modules.
|
||||
- **Use clear module boundaries**: Each file should have a single, well-defined responsibility.
|
||||
- Bundle everything into `main.js` (no unbundled runtime deps).
|
||||
- Avoid Node/Electron APIs if you want mobile compatibility; set `isDesktopOnly` accordingly.
|
||||
- Prefer `async/await` over promise chains; handle errors gracefully.
|
||||
|
||||
## Mobile
|
||||
|
||||
- Where feasible, test on iOS and Android.
|
||||
- Don't assume desktop-only behavior unless `isDesktopOnly` is `true`.
|
||||
- Avoid large in-memory structures; be mindful of memory and storage constraints.
|
||||
|
||||
## Agent do/don't
|
||||
|
||||
**Do**
|
||||
- Add commands with stable IDs (don't rename once released).
|
||||
- Provide defaults and validation in settings.
|
||||
- Write idempotent code paths so reload/unload doesn't leak listeners or intervals.
|
||||
- Use `this.register*` helpers for everything that needs cleanup.
|
||||
|
||||
**Don't**
|
||||
- Introduce network calls without an obvious user-facing reason and documentation.
|
||||
- Ship features that require cloud services without clear disclosure and explicit opt-in.
|
||||
- Store or transmit vault contents unless essential and consented.
|
||||
|
||||
## Common tasks
|
||||
|
||||
### Organize code across multiple files
|
||||
|
||||
**main.ts** (minimal, lifecycle only):
|
||||
```ts
|
||||
import { Plugin } from "obsidian";
|
||||
import { MySettings, DEFAULT_SETTINGS } from "./settings";
|
||||
import { registerCommands } from "./commands";
|
||||
|
||||
export default class MyPlugin extends Plugin {
|
||||
settings: MySettings;
|
||||
|
||||
async onload() {
|
||||
this.settings = Object.assign({}, DEFAULT_SETTINGS, await this.loadData());
|
||||
registerCommands(this);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**settings.ts**:
|
||||
```ts
|
||||
export interface MySettings {
|
||||
enabled: boolean;
|
||||
apiKey: string;
|
||||
}
|
||||
|
||||
export const DEFAULT_SETTINGS: MySettings = {
|
||||
enabled: true,
|
||||
apiKey: "",
|
||||
};
|
||||
```
|
||||
|
||||
**commands/index.ts**:
|
||||
```ts
|
||||
import { Plugin } from "obsidian";
|
||||
import { doSomething } from "./my-command";
|
||||
|
||||
export function registerCommands(plugin: Plugin) {
|
||||
plugin.addCommand({
|
||||
id: "do-something",
|
||||
name: "Do something",
|
||||
callback: () => doSomething(plugin),
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Add a command
|
||||
|
||||
```ts
|
||||
this.addCommand({
|
||||
id: "your-command-id",
|
||||
name: "Do the thing",
|
||||
callback: () => this.doTheThing(),
|
||||
});
|
||||
```
|
||||
|
||||
### Persist settings
|
||||
|
||||
```ts
|
||||
interface MySettings { enabled: boolean }
|
||||
const DEFAULT_SETTINGS: MySettings = { enabled: true };
|
||||
|
||||
async onload() {
|
||||
this.settings = Object.assign({}, DEFAULT_SETTINGS, await this.loadData());
|
||||
await this.saveData(this.settings);
|
||||
}
|
||||
```
|
||||
|
||||
### Register listeners safely
|
||||
|
||||
```ts
|
||||
this.registerEvent(this.app.workspace.on("file-open", f => { /* ... */ }));
|
||||
this.registerDomEvent(window, "resize", () => { /* ... */ });
|
||||
this.registerInterval(window.setInterval(() => { /* ... */ }, 1000));
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
- Plugin doesn't load after build: ensure `main.js` and `manifest.json` are at the top level of the plugin folder under `<Vault>/.obsidian/plugins/<plugin-id>/`.
|
||||
- Build issues: if `main.js` is missing, run `npm run build` or `npm run dev` to compile your TypeScript source code.
|
||||
- Commands not appearing: verify `addCommand` runs after `onload` and IDs are unique.
|
||||
- Settings not persisting: ensure `loadData`/`saveData` are awaited and you re-render the UI after changes.
|
||||
- Mobile-only issues: confirm you're not using desktop-only APIs; check `isDesktopOnly` and adjust.
|
||||
|
||||
## References
|
||||
|
||||
- Obsidian sample plugin: https://github.com/obsidianmd/obsidian-sample-plugin
|
||||
- API documentation: https://docs.obsidian.md
|
||||
- Developer policies: https://docs.obsidian.md/Developer+policies
|
||||
- Plugin guidelines: https://docs.obsidian.md/Plugins/Releasing/Plugin+guidelines
|
||||
- Style guide: https://help.obsidian.md/style-guide
|
||||
201
surfsense_obsidian/LICENSE
Normal file
201
surfsense_obsidian/LICENSE
Normal file
|
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
150
surfsense_obsidian/README.md
Normal file
150
surfsense_obsidian/README.md
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
# SurfSense for Obsidian
|
||||
|
||||
Sync your Obsidian vault to [SurfSense](https://github.com/MODSetter/SurfSense)
|
||||
so your notes become searchable alongside the rest of your knowledge sources
|
||||
(GitHub, Slack, Linear, Drive, web pages, etc.) from any SurfSense chat.
|
||||
|
||||
The plugin runs inside Obsidian itself, on desktop and mobile, so it works
|
||||
the same way for SurfSense Cloud and self-hosted deployments. There is no
|
||||
server-side vault mount and no Electron-only path; everything goes over HTTPS.
|
||||
|
||||
## What it does
|
||||
|
||||
- Realtime sync as you create, edit, rename, or delete notes
|
||||
- Initial scan + reconciliation against the server manifest on startup,
|
||||
so vault edits made while the plugin was offline still show up
|
||||
- Persistent upload queue, so a crash or offline window never loses changes
|
||||
- Frontmatter, `[[wiki links]]`, `#tags`, headings, and resolved/unresolved
|
||||
links are extracted and indexed
|
||||
- Each chat citation links straight back into Obsidian via the
|
||||
`obsidian://open?vault=…&file=…` deep link
|
||||
- Multi-vault aware: each vault you enable the plugin in becomes its own
|
||||
connector row in SurfSense, named after the vault
|
||||
|
||||
## Install
|
||||
|
||||
### Via [BRAT](https://github.com/TfTHacker/obsidian42-brat) (current)
|
||||
|
||||
1. Install the BRAT community plugin.
|
||||
2. Run **BRAT: Add a beta plugin for testing**.
|
||||
3. Paste `MODSetter/SurfSense` and pick the latest release.
|
||||
4. Enable **SurfSense** in *Settings → Community plugins*.
|
||||
|
||||
### Manual sideload
|
||||
|
||||
1. Download `main.js`, `manifest.json`, and `styles.css` from the latest
|
||||
GitHub release tagged with the plugin version (e.g. `0.1.0`, with no `v`
|
||||
prefix, matching the `version` field in `manifest.json`).
|
||||
2. Copy them into `<vault>/.obsidian/plugins/surfsense/`.
|
||||
3. Restart Obsidian and enable the plugin.
|
||||
|
||||
### Community plugin store
|
||||
|
||||
Submission to the official Obsidian community plugin store is in progress.
|
||||
Once approved you will be able to install from *Settings → Community plugins*
|
||||
inside Obsidian.
|
||||
|
||||
## Configure
|
||||
|
||||
Open **Settings → SurfSense** in Obsidian and fill in:
|
||||
|
||||
| Setting | Value |
|
||||
| --- | --- |
|
||||
| Server URL | `https://surfsense.com` for SurfSense Cloud, or your self-hosted URL |
|
||||
| API token | Copy from the *Connectors → Obsidian* dialog in the SurfSense web app |
|
||||
| Search space | Pick the search space this vault should sync into |
|
||||
| Vault name | Defaults to your Obsidian vault name; rename if you have multiple vaults |
|
||||
| Sync mode | *Auto* (recommended) or *Manual* |
|
||||
| Exclude patterns | Glob patterns of folders/files to skip (e.g. `.trash`, `_attachments`, `templates/**`) |
|
||||
| Include attachments | Off by default; enable to sync non-`.md` files |
|
||||
|
||||
The connector row appears automatically inside SurfSense the first time the
|
||||
plugin successfully calls `/obsidian/connect`. You can manage or delete it
|
||||
from *Connectors → Obsidian* in the web app.
|
||||
|
||||
> **Token lifetime.** The web app currently issues 24-hour JWTs. If you see
|
||||
> *"token expired"* in the plugin status bar, paste a fresh token from the
|
||||
> SurfSense web app. Long-lived personal access tokens are coming in a future
|
||||
> release.
|
||||
|
||||
## Mobile
|
||||
|
||||
The plugin works on Obsidian for iOS and Android. Sync runs whenever the
|
||||
app is in the foreground and once more on app close. Mobile OSes
|
||||
aggressively suspend background apps, so mobile sync is near-realtime rather
|
||||
than instant. Desktop is the source of truth for live editing.
|
||||
|
||||
## Privacy & safety
|
||||
|
||||
The SurfSense backend qualifies as server-side telemetry under Obsidian's
|
||||
[Developer policies](https://github.com/obsidianmd/obsidian-developer-docs/blob/main/en/Developer%20policies.md),
|
||||
so here is the full list of what the plugin sends and stores. The
|
||||
canonical SurfSense privacy policy lives at
|
||||
<https://surfsense.com/privacy>; this section is the plugin-specific
|
||||
addendum.
|
||||
|
||||
**Sent on `/connect` (once per onload):**
|
||||
|
||||
- `vault_id`: a random UUID minted in the plugin's `data.json` on first run
|
||||
- `vault_name`: the Obsidian vault folder name
|
||||
- `search_space_id`: the SurfSense search space you picked
|
||||
|
||||
**Sent per note on `/sync`, `/rename`, `/delete`:**
|
||||
|
||||
- `path`, `name`, `extension`
|
||||
- `content` (plain text of the note)
|
||||
- `frontmatter`, `tags`, `headings`, resolved and unresolved links,
|
||||
`embeds`, `aliases`
|
||||
- `content_hash` (SHA-256 of the note body), `mtime`, `ctime`
|
||||
|
||||
**Stored server-side per vault:**
|
||||
|
||||
- One connector row keyed by `vault_id` with `{vault_name, source: "plugin",
|
||||
last_connect_at}`. Nothing per-device, no plugin version, no analytics.
|
||||
- One `documents` row per note (soft-deleted rather than hard-deleted so
|
||||
existing chat citations remain valid).
|
||||
|
||||
**What never leaves the plugin:**
|
||||
|
||||
- No remote code loading, no `eval`, no analytics.
|
||||
- All network traffic goes to your configured **Server URL** only.
|
||||
- The `Authorization: Bearer …` header is set per-request with the token
|
||||
you paste; the plugin never reads cookies or other Obsidian state.
|
||||
- The plugin uses Obsidian's `requestUrl` (no `fetch`, no `node:http`,
|
||||
no `node:https`) and Web Crypto for hashing, per Obsidian's mobile guidance.
|
||||
|
||||
For retention, deletion, and contact details see
|
||||
<https://surfsense.com/privacy>.
|
||||
|
||||
## Development
|
||||
|
||||
This plugin lives in [`surfsense_obsidian/`](.) inside the SurfSense
|
||||
monorepo. To work on it locally:
|
||||
|
||||
```sh
|
||||
cd surfsense_obsidian
|
||||
npm install
|
||||
npm run dev # esbuild in watch mode → main.js
|
||||
```
|
||||
|
||||
Symlink the folder into a test vault's `.obsidian/plugins/surfsense/`,
|
||||
enable the plugin, then **Cmd+R** in Obsidian whenever `main.js` rebuilds.
|
||||
|
||||
Lint:
|
||||
|
||||
```sh
|
||||
npm run lint
|
||||
```
|
||||
|
||||
The release pipeline lives at
|
||||
[`.github/workflows/release-obsidian-plugin.yml`](../.github/workflows/release-obsidian-plugin.yml)
|
||||
in the repo root and is triggered by tags of the form `obsidian-v0.1.0`.
|
||||
It verifies the tag matches `manifest.json`, builds the plugin, attaches
|
||||
`main.js` + `manifest.json` + `styles.css` to a GitHub release tagged with
|
||||
the bare version (e.g. `0.1.0`, the form BRAT and the Obsidian community
|
||||
store look for), and mirrors `manifest.json` + `versions.json` to the repo
|
||||
root so Obsidian's community plugin browser can discover them.
|
||||
|
||||
## License
|
||||
|
||||
[Apache-2.0](LICENSE), same as the rest of SurfSense.
|
||||
49
surfsense_obsidian/esbuild.config.mjs
Normal file
49
surfsense_obsidian/esbuild.config.mjs
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
import esbuild from "esbuild";
|
||||
import process from "process";
|
||||
import { builtinModules } from 'node:module';
|
||||
|
||||
const banner =
|
||||
`/*
|
||||
THIS IS A GENERATED/BUNDLED FILE BY ESBUILD
|
||||
if you want to view the source, please visit the github repository of this plugin
|
||||
*/
|
||||
`;
|
||||
|
||||
const prod = (process.argv[2] === "production");
|
||||
|
||||
const context = await esbuild.context({
|
||||
banner: {
|
||||
js: banner,
|
||||
},
|
||||
entryPoints: ["src/main.ts"],
|
||||
bundle: true,
|
||||
external: [
|
||||
"obsidian",
|
||||
"electron",
|
||||
"@codemirror/autocomplete",
|
||||
"@codemirror/collab",
|
||||
"@codemirror/commands",
|
||||
"@codemirror/language",
|
||||
"@codemirror/lint",
|
||||
"@codemirror/search",
|
||||
"@codemirror/state",
|
||||
"@codemirror/view",
|
||||
"@lezer/common",
|
||||
"@lezer/highlight",
|
||||
"@lezer/lr",
|
||||
...builtinModules],
|
||||
format: "cjs",
|
||||
target: "es2018",
|
||||
logLevel: "info",
|
||||
sourcemap: prod ? false : "inline",
|
||||
treeShaking: true,
|
||||
outfile: "main.js",
|
||||
minify: prod,
|
||||
});
|
||||
|
||||
if (prod) {
|
||||
await context.rebuild();
|
||||
process.exit(0);
|
||||
} else {
|
||||
await context.watch();
|
||||
}
|
||||
55
surfsense_obsidian/eslint.config.mts
Normal file
55
surfsense_obsidian/eslint.config.mts
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
import tseslint from 'typescript-eslint';
|
||||
import obsidianmd from "eslint-plugin-obsidianmd";
|
||||
import globals from "globals";
|
||||
import { globalIgnores } from "eslint/config";
|
||||
|
||||
export default tseslint.config(
|
||||
{
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.browser,
|
||||
},
|
||||
parserOptions: {
|
||||
projectService: {
|
||||
allowDefaultProject: [
|
||||
'eslint.config.js',
|
||||
'manifest.json'
|
||||
]
|
||||
},
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
extraFileExtensions: ['.json']
|
||||
},
|
||||
},
|
||||
},
|
||||
...obsidianmd.configs.recommended,
|
||||
{
|
||||
plugins: { obsidianmd },
|
||||
rules: {
|
||||
"obsidianmd/ui/sentence-case": [
|
||||
"error",
|
||||
{
|
||||
brands: [
|
||||
"Surfsense",
|
||||
"iOS",
|
||||
"iPadOS",
|
||||
"macOS",
|
||||
"Windows",
|
||||
"Android",
|
||||
"Linux",
|
||||
"Obsidian",
|
||||
"Markdown",
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
globalIgnores([
|
||||
"node_modules",
|
||||
"dist",
|
||||
"esbuild.config.mjs",
|
||||
"eslint.config.js",
|
||||
"version-bump.mjs",
|
||||
"versions.json",
|
||||
"main.js",
|
||||
]),
|
||||
);
|
||||
10
surfsense_obsidian/manifest.json
Normal file
10
surfsense_obsidian/manifest.json
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"id": "surfsense-obsidian",
|
||||
"name": "SurfSense",
|
||||
"version": "0.1.0",
|
||||
"minAppVersion": "1.5.4",
|
||||
"description": "Turn your vault into a searchable second brain with SurfSense.",
|
||||
"author": "SurfSense",
|
||||
"authorUrl": "https://www.surfsense.com",
|
||||
"isDesktopOnly": false
|
||||
}
|
||||
5170
surfsense_obsidian/package-lock.json
generated
Normal file
5170
surfsense_obsidian/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load diff
34
surfsense_obsidian/package.json
Normal file
34
surfsense_obsidian/package.json
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
{
|
||||
"name": "surfsense-obsidian",
|
||||
"version": "0.1.0",
|
||||
"description": "SurfSense plugin for Obsidian: sync your vault to SurfSense for AI-powered search.",
|
||||
"main": "main.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "node esbuild.config.mjs",
|
||||
"build": "tsc -noEmit -skipLibCheck && node esbuild.config.mjs production",
|
||||
"version": "node version-bump.mjs && git add manifest.json versions.json",
|
||||
"lint": "eslint ."
|
||||
},
|
||||
"keywords": [
|
||||
"obsidian",
|
||||
"surfsense",
|
||||
"sync",
|
||||
"search"
|
||||
],
|
||||
"license": "Apache-2.0",
|
||||
"devDependencies": {
|
||||
"@eslint/js": "9.30.1",
|
||||
"@types/node": "^20.19.39",
|
||||
"esbuild": "0.25.5",
|
||||
"eslint-plugin-obsidianmd": "0.1.9",
|
||||
"globals": "14.0.0",
|
||||
"jiti": "2.6.1",
|
||||
"tslib": "2.4.0",
|
||||
"typescript": "^5.8.3",
|
||||
"typescript-eslint": "8.35.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"obsidian": "latest"
|
||||
}
|
||||
}
|
||||
296
surfsense_obsidian/src/api-client.ts
Normal file
296
surfsense_obsidian/src/api-client.ts
Normal file
|
|
@ -0,0 +1,296 @@
|
|||
import { requestUrl, type RequestUrlParam, type RequestUrlResponse } from "obsidian";
|
||||
import type {
|
||||
ConnectResponse,
|
||||
DeleteAck,
|
||||
HealthResponse,
|
||||
ManifestResponse,
|
||||
NotePayload,
|
||||
RenameAck,
|
||||
RenameItem,
|
||||
SearchSpace,
|
||||
SyncAck,
|
||||
} from "./types";
|
||||
|
||||
/**
|
||||
* SurfSense backend client used by the Obsidian plugin.
|
||||
*
|
||||
* Mobile-safety contract (must hold for every transitive import):
|
||||
* - Use Obsidian `requestUrl` only — no `fetch`, no `axios`, no
|
||||
* `node:http`, no `node:https`. CORS is bypassed and mobile works.
|
||||
* - No top-level `node:*` imports anywhere reachable from this file.
|
||||
* - Hashing happens elsewhere via Web Crypto, not `node:crypto`.
|
||||
*
|
||||
* Auth + wire contract:
|
||||
* - Every request carries `Authorization: Bearer <token>` only. No
|
||||
* custom headers — the backend identifies the caller from the JWT
|
||||
* and feature-detects the API via the `capabilities` array on
|
||||
* `/health` and `/connect`.
|
||||
* - 401 surfaces as `AuthError` so the orchestrator can show the
|
||||
* "token expired, paste a fresh one" UX.
|
||||
* - HealthResponse / ConnectResponse use index signatures so any
|
||||
* additive backend field (e.g. new capabilities) parses without
|
||||
* breaking the decoder. This mirrors `ConfigDict(extra='ignore')`
|
||||
* on the server side.
|
||||
*/
|
||||
|
||||
export class AuthError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = "AuthError";
|
||||
}
|
||||
}
|
||||
|
||||
export class TransientError extends Error {
|
||||
readonly status: number;
|
||||
constructor(status: number, message: string) {
|
||||
super(message);
|
||||
this.name = "TransientError";
|
||||
this.status = status;
|
||||
}
|
||||
}
|
||||
|
||||
export class PermanentError extends Error {
|
||||
readonly status: number;
|
||||
constructor(status: number, message: string) {
|
||||
super(message);
|
||||
this.name = "PermanentError";
|
||||
this.status = status;
|
||||
}
|
||||
}
|
||||
|
||||
/** 404 `VAULT_NOT_REGISTERED` — `/connect` hasn't committed yet; retry after reconnect. */
|
||||
export class VaultNotRegisteredError extends TransientError {
|
||||
constructor(message: string) {
|
||||
super(404, message);
|
||||
this.name = "VaultNotRegisteredError";
|
||||
}
|
||||
}
|
||||
|
||||
export interface ApiClientOptions {
|
||||
getServerUrl: () => string;
|
||||
getToken: () => string;
|
||||
onAuthError?: () => void;
|
||||
}
|
||||
|
||||
const AUTH_BLOCK_MS = 60_000;
|
||||
|
||||
export class SurfSenseApiClient {
|
||||
private readonly opts: ApiClientOptions;
|
||||
private authBlockedUntil = 0;
|
||||
|
||||
constructor(opts: ApiClientOptions) {
|
||||
this.opts = opts;
|
||||
}
|
||||
|
||||
updateOptions(partial: Partial<ApiClientOptions>): void {
|
||||
Object.assign(this.opts, partial);
|
||||
}
|
||||
|
||||
resetAuthBlock(): void {
|
||||
this.authBlockedUntil = 0;
|
||||
}
|
||||
|
||||
async health(): Promise<HealthResponse> {
|
||||
return await this.request<HealthResponse>("GET", "/api/v1/obsidian/health");
|
||||
}
|
||||
|
||||
async listSearchSpaces(): Promise<SearchSpace[]> {
|
||||
const resp = await this.request<SearchSpace[] | { items: SearchSpace[] }>(
|
||||
"GET",
|
||||
"/api/v1/searchspaces/"
|
||||
);
|
||||
if (Array.isArray(resp)) return resp;
|
||||
if (resp && Array.isArray((resp as { items?: SearchSpace[] }).items)) {
|
||||
return (resp as { items: SearchSpace[] }).items;
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
async verifyToken(): Promise<{ ok: true; health: HealthResponse }> {
|
||||
// /health is gated by current_active_user, so a successful response
|
||||
// transitively proves the token works. Cheaper than fetching a list.
|
||||
const health = await this.health();
|
||||
return { ok: true, health };
|
||||
}
|
||||
|
||||
async connect(input: {
|
||||
searchSpaceId: number;
|
||||
vaultId: string;
|
||||
vaultName: string;
|
||||
vaultFingerprint: string;
|
||||
}): Promise<ConnectResponse> {
|
||||
return await this.request<ConnectResponse>(
|
||||
"POST",
|
||||
"/api/v1/obsidian/connect",
|
||||
{
|
||||
vault_id: input.vaultId,
|
||||
vault_name: input.vaultName,
|
||||
search_space_id: input.searchSpaceId,
|
||||
vault_fingerprint: input.vaultFingerprint,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/** POST /sync — `failed[]` are paths whose `status === "error"` for retry. */
|
||||
async syncBatch(input: {
|
||||
vaultId: string;
|
||||
notes: NotePayload[];
|
||||
}): Promise<{ indexed: number; failed: string[] }> {
|
||||
const resp = await this.request<SyncAck>(
|
||||
"POST",
|
||||
"/api/v1/obsidian/sync",
|
||||
{ vault_id: input.vaultId, notes: input.notes }
|
||||
);
|
||||
const failed = resp.items
|
||||
.filter((it) => it.status === "error")
|
||||
.map((it) => it.path);
|
||||
return { indexed: resp.indexed, failed };
|
||||
}
|
||||
|
||||
/** POST /rename — `"missing"` counts as success; only `"error"` is retried. */
|
||||
async renameBatch(input: {
|
||||
vaultId: string;
|
||||
renames: Pick<RenameItem, "oldPath" | "newPath">[];
|
||||
}): Promise<{
|
||||
renamed: number;
|
||||
failed: Array<{ oldPath: string; newPath: string }>;
|
||||
}> {
|
||||
const resp = await this.request<RenameAck>(
|
||||
"POST",
|
||||
"/api/v1/obsidian/rename",
|
||||
{
|
||||
vault_id: input.vaultId,
|
||||
renames: input.renames.map((r) => ({
|
||||
old_path: r.oldPath,
|
||||
new_path: r.newPath,
|
||||
})),
|
||||
}
|
||||
);
|
||||
const failed = resp.items
|
||||
.filter((it) => it.status === "error")
|
||||
.map((it) => ({ oldPath: it.old_path, newPath: it.new_path }));
|
||||
return { renamed: resp.renamed, failed };
|
||||
}
|
||||
|
||||
/** DELETE /notes — `"missing"` counts as success; only `"error"` is retried. */
|
||||
async deleteBatch(input: {
|
||||
vaultId: string;
|
||||
paths: string[];
|
||||
}): Promise<{ deleted: number; failed: string[] }> {
|
||||
const resp = await this.request<DeleteAck>(
|
||||
"DELETE",
|
||||
"/api/v1/obsidian/notes",
|
||||
{ vault_id: input.vaultId, paths: input.paths }
|
||||
);
|
||||
const failed = resp.items
|
||||
.filter((it) => it.status === "error")
|
||||
.map((it) => it.path);
|
||||
return { deleted: resp.deleted, failed };
|
||||
}
|
||||
|
||||
async getManifest(vaultId: string): Promise<ManifestResponse> {
|
||||
return await this.request<ManifestResponse>(
|
||||
"GET",
|
||||
`/api/v1/obsidian/manifest?vault_id=${encodeURIComponent(vaultId)}`
|
||||
);
|
||||
}
|
||||
|
||||
private async request<T>(
|
||||
method: RequestUrlParam["method"],
|
||||
path: string,
|
||||
body?: unknown
|
||||
): Promise<T> {
|
||||
const baseUrl = this.opts.getServerUrl().replace(/\/+$/, "");
|
||||
const token = this.opts.getToken();
|
||||
if (!token) {
|
||||
throw new AuthError("Missing API token. Open plugin settings to paste one.");
|
||||
}
|
||||
if (Date.now() < this.authBlockedUntil) {
|
||||
throw new AuthError("Token rejected. Paste a fresh one in settings.");
|
||||
}
|
||||
const headers: Record<string, string> = {
|
||||
Authorization: `Bearer ${token}`,
|
||||
Accept: "application/json",
|
||||
};
|
||||
if (body !== undefined) headers["Content-Type"] = "application/json";
|
||||
|
||||
let resp: RequestUrlResponse;
|
||||
try {
|
||||
resp = await requestUrl({
|
||||
url: `${baseUrl}${path}`,
|
||||
method,
|
||||
headers,
|
||||
body: body === undefined ? undefined : JSON.stringify(body),
|
||||
throw: false,
|
||||
});
|
||||
} catch (err) {
|
||||
throw new TransientError(0, `Network error: ${(err as Error).message}`);
|
||||
}
|
||||
|
||||
if (resp.status >= 200 && resp.status < 300) {
|
||||
return parseJson<T>(resp);
|
||||
}
|
||||
|
||||
const detail = extractDetail(resp);
|
||||
|
||||
if (resp.status === 401) {
|
||||
this.authBlockedUntil = Date.now() + AUTH_BLOCK_MS;
|
||||
this.opts.onAuthError?.();
|
||||
throw new AuthError(detail || "Unauthorized");
|
||||
}
|
||||
|
||||
if (resp.status >= 500 || resp.status === 429) {
|
||||
throw new TransientError(resp.status, detail || `HTTP ${resp.status}`);
|
||||
}
|
||||
|
||||
if (resp.status === 404 && extractCode(resp) === "VAULT_NOT_REGISTERED") {
|
||||
throw new VaultNotRegisteredError(detail || "Vault not registered yet");
|
||||
}
|
||||
|
||||
throw new PermanentError(resp.status, detail || `HTTP ${resp.status}`);
|
||||
}
|
||||
}
|
||||
|
||||
function parseJson<T>(resp: RequestUrlResponse): T {
|
||||
// Plugin endpoints always return JSON; non-JSON 2xx is usually a
|
||||
// captive portal or CDN page — surface as transient so we back off.
|
||||
const text = resp.text ?? "";
|
||||
try {
|
||||
return JSON.parse(text) as T;
|
||||
} catch {
|
||||
throw new TransientError(
|
||||
resp.status,
|
||||
`Invalid JSON from server (got: ${text.slice(0, 80)})`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function safeJson(resp: RequestUrlResponse): Record<string, unknown> {
|
||||
try {
|
||||
return resp.text ? (JSON.parse(resp.text) as Record<string, unknown>) : {};
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
function extractDetail(resp: RequestUrlResponse): string {
|
||||
const json = safeJson(resp);
|
||||
if (typeof json.detail === "string") return json.detail;
|
||||
if (typeof json.message === "string") return json.message;
|
||||
const detailObj = json.detail;
|
||||
if (detailObj && typeof detailObj === "object") {
|
||||
const obj = detailObj as Record<string, unknown>;
|
||||
if (typeof obj.message === "string") return obj.message;
|
||||
}
|
||||
return resp.text?.slice(0, 200) ?? "";
|
||||
}
|
||||
|
||||
function extractCode(resp: RequestUrlResponse): string | undefined {
|
||||
const json = safeJson(resp);
|
||||
const detailObj = json.detail;
|
||||
if (detailObj && typeof detailObj === "object") {
|
||||
const code = (detailObj as Record<string, unknown>).code;
|
||||
if (typeof code === "string") return code;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
61
surfsense_obsidian/src/attachments-confirm-modal.ts
Normal file
61
surfsense_obsidian/src/attachments-confirm-modal.ts
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
import { type App, Modal, Setting } from "obsidian";
|
||||
|
||||
/**
|
||||
* Confirmation modal shown before enabling attachment sync.
|
||||
* Attachment files can be large and increase sync latency/cost.
|
||||
*/
|
||||
export class AttachmentsConfirmModal extends Modal {
|
||||
private resolver: ((confirmed: boolean) => void) | null = null;
|
||||
|
||||
constructor(app: App) {
|
||||
super(app);
|
||||
}
|
||||
|
||||
onOpen(): void {
|
||||
this.setTitle("Enable attachment sync?");
|
||||
this.contentEl.empty();
|
||||
|
||||
new Setting(this.contentEl).setDesc(
|
||||
"Syncing attachments (images & PDFs) can make indexing slower, especially on large vaults."
|
||||
);
|
||||
new Setting(this.contentEl).setDesc(
|
||||
"Syncing attachments can make indexing slower on large vaults. You can disable this anytime.",
|
||||
);
|
||||
|
||||
new Setting(this.contentEl)
|
||||
.addButton((btn) =>
|
||||
btn
|
||||
.setButtonText("Cancel")
|
||||
.onClick(() => this.resolveAndClose(false)),
|
||||
)
|
||||
.addButton((btn) =>
|
||||
btn
|
||||
.setButtonText("Enable")
|
||||
.setCta()
|
||||
.onClick(() => this.resolveAndClose(true)),
|
||||
);
|
||||
}
|
||||
|
||||
onClose(): void {
|
||||
this.contentEl.empty();
|
||||
if (this.resolver) {
|
||||
this.resolver(false);
|
||||
this.resolver = null;
|
||||
}
|
||||
}
|
||||
|
||||
waitForConfirmation(): Promise<boolean> {
|
||||
this.open();
|
||||
return new Promise<boolean>((resolve) => {
|
||||
this.resolver = resolve;
|
||||
});
|
||||
}
|
||||
|
||||
private resolveAndClose(confirmed: boolean): void {
|
||||
if (this.resolver) {
|
||||
this.resolver(confirmed);
|
||||
this.resolver = null;
|
||||
}
|
||||
this.close();
|
||||
}
|
||||
}
|
||||
94
surfsense_obsidian/src/excludes.ts
Normal file
94
surfsense_obsidian/src/excludes.ts
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
/**
|
||||
* Tiny glob matcher for exclude patterns.
|
||||
*
|
||||
* Supports `*` (any chars except `/`), `**` (any chars including `/`), and
|
||||
* literal segments. Patterns without a slash are matched against any path
|
||||
* segment (so `templates` excludes `templates/foo.md` and `notes/templates/x.md`).
|
||||
*
|
||||
* Intentionally not a full minimatch — Obsidian users overwhelmingly type
|
||||
* folder names ("templates", ".trash") and the obvious wildcards. Avoiding
|
||||
* the dependency keeps the bundle small and the mobile attack surface tiny.
|
||||
*/
|
||||
|
||||
const cache = new Map<string, RegExp>();
|
||||
|
||||
function compile(pattern: string): RegExp {
|
||||
const cached = cache.get(pattern);
|
||||
if (cached) return cached;
|
||||
|
||||
let body = "";
|
||||
let i = 0;
|
||||
while (i < pattern.length) {
|
||||
const ch = pattern[i] ?? "";
|
||||
if (ch === "*") {
|
||||
if (pattern[i + 1] === "*") {
|
||||
body += ".*";
|
||||
i += 2;
|
||||
if (pattern[i] === "/") i += 1;
|
||||
continue;
|
||||
}
|
||||
body += "[^/]*";
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
if (".+^${}()|[]\\".includes(ch)) {
|
||||
body += "\\" + ch;
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
body += ch;
|
||||
i += 1;
|
||||
}
|
||||
|
||||
const anchored = pattern.includes("/")
|
||||
? `^${body}(/.*)?$`
|
||||
: `(^|/)${body}(/.*)?$`;
|
||||
const re = new RegExp(anchored);
|
||||
cache.set(pattern, re);
|
||||
return re;
|
||||
}
|
||||
|
||||
export function isExcluded(path: string, patterns: string[]): boolean {
|
||||
if (!patterns.length) return false;
|
||||
for (const raw of patterns) {
|
||||
const trimmed = raw.trim();
|
||||
if (!trimmed || trimmed.startsWith("#")) continue;
|
||||
if (compile(trimmed).test(path)) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function parseExcludePatterns(raw: string): string[] {
|
||||
return raw
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.filter((line) => line.length > 0 && !line.startsWith("#"));
|
||||
}
|
||||
|
||||
/** Normalize a folder path: strip leading/trailing slashes; "" or "/" means vault root. */
|
||||
export function normalizeFolder(folder: string): string {
|
||||
return folder.replace(/^\/+|\/+$/g, "");
|
||||
}
|
||||
|
||||
/** True if `path` lives inside `folder` (or `folder` is the vault root). */
|
||||
export function isInFolder(path: string, folder: string): boolean {
|
||||
const f = normalizeFolder(folder);
|
||||
if (f === "") return true;
|
||||
return path === f || path.startsWith(`${f}/`);
|
||||
}
|
||||
|
||||
/** Exclude wins over include. Empty includeFolders means "include everything". */
|
||||
export function isFolderFiltered(
|
||||
path: string,
|
||||
includeFolders: string[],
|
||||
excludeFolders: string[],
|
||||
): boolean {
|
||||
for (const f of excludeFolders) {
|
||||
if (isInFolder(path, f)) return true;
|
||||
}
|
||||
if (includeFolders.length === 0) return false;
|
||||
for (const f of includeFolders) {
|
||||
if (isInFolder(path, f)) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
32
surfsense_obsidian/src/folder-suggest-modal.ts
Normal file
32
surfsense_obsidian/src/folder-suggest-modal.ts
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
import { type App, FuzzySuggestModal, type TFolder } from "obsidian";
|
||||
|
||||
/** Folder picker built on Obsidian's stock {@link FuzzySuggestModal}. */
|
||||
export class FolderSuggestModal extends FuzzySuggestModal<TFolder> {
|
||||
private readonly onPick: (path: string) => void;
|
||||
private readonly excluded: Set<string>;
|
||||
|
||||
constructor(app: App, onPick: (path: string) => void, excluded: string[] = []) {
|
||||
super(app);
|
||||
this.onPick = onPick;
|
||||
this.excluded = new Set(excluded.map((p) => p.replace(/^\/+|\/+$/g, "")));
|
||||
this.setPlaceholder("Type to filter folders…");
|
||||
}
|
||||
|
||||
getItems(): TFolder[] {
|
||||
return this.app.vault
|
||||
.getAllFolders(true)
|
||||
.filter((f) => !this.excluded.has(this.toPath(f)));
|
||||
}
|
||||
|
||||
getItemText(folder: TFolder): string {
|
||||
return this.toPath(folder) || "/";
|
||||
}
|
||||
|
||||
onChooseItem(folder: TFolder): void {
|
||||
this.onPick(this.toPath(folder));
|
||||
}
|
||||
|
||||
private toPath(folder: TFolder): string {
|
||||
return folder.isRoot() ? "" : folder.path;
|
||||
}
|
||||
}
|
||||
292
surfsense_obsidian/src/main.ts
Normal file
292
surfsense_obsidian/src/main.ts
Normal file
|
|
@ -0,0 +1,292 @@
|
|||
import { Notice, Platform, Plugin } from "obsidian";
|
||||
import { SurfSenseApiClient } from "./api-client";
|
||||
import { PersistentQueue } from "./queue";
|
||||
import { SurfSenseSettingTab } from "./settings";
|
||||
import { StatusBar } from "./status-bar";
|
||||
import { StatusModal } from "./status-modal";
|
||||
import { SyncEngine } from "./sync-engine";
|
||||
import {
|
||||
DEFAULT_SETTINGS,
|
||||
type QueueItem,
|
||||
type StatusState,
|
||||
type SurfsensePluginSettings,
|
||||
} from "./types";
|
||||
import { generateVaultUuid } from "./vault-identity";
|
||||
|
||||
/** SurfSense plugin entry point. */
|
||||
export default class SurfSensePlugin extends Plugin {
|
||||
settings!: SurfsensePluginSettings;
|
||||
api!: SurfSenseApiClient;
|
||||
queue!: PersistentQueue;
|
||||
engine!: SyncEngine;
|
||||
private statusBar: StatusBar | null = null;
|
||||
lastStatus: StatusState = { kind: "needs-setup", queueDepth: 0 };
|
||||
serverCapabilities: string[] = [];
|
||||
private settingTab: SurfSenseSettingTab | null = null;
|
||||
private statusListeners = new Set<() => void>();
|
||||
private reconcileTimerId: number | null = null;
|
||||
private lastAuthToastAt = 0;
|
||||
|
||||
async onload() {
|
||||
await this.loadSettings();
|
||||
this.seedIdentity();
|
||||
await this.saveSettings();
|
||||
|
||||
this.api = new SurfSenseApiClient({
|
||||
getServerUrl: () => this.settings.serverUrl,
|
||||
getToken: () => this.settings.apiToken,
|
||||
onAuthError: () => this.notifyAuthError(),
|
||||
});
|
||||
|
||||
this.queue = new PersistentQueue(this.settings.queue ?? [], {
|
||||
persist: async (items) => {
|
||||
this.settings.queue = items;
|
||||
await this.saveData(this.settings);
|
||||
},
|
||||
});
|
||||
|
||||
this.engine = new SyncEngine({
|
||||
app: this.app,
|
||||
apiClient: this.api,
|
||||
queue: this.queue,
|
||||
getSettings: () => this.settings,
|
||||
saveSettings: async (mut) => {
|
||||
mut(this.settings);
|
||||
await this.saveSettings();
|
||||
this.notifyStatusChange();
|
||||
},
|
||||
setStatus: (s) => {
|
||||
this.lastStatus = s;
|
||||
this.statusBar?.update(s);
|
||||
this.notifyStatusChange();
|
||||
},
|
||||
onCapabilities: (caps) => {
|
||||
this.serverCapabilities = [...caps];
|
||||
this.notifyStatusChange();
|
||||
},
|
||||
onReconcileBackoffChanged: () => {
|
||||
this.restartReconcileTimer();
|
||||
},
|
||||
});
|
||||
|
||||
this.queue.setFlushHandler(() => {
|
||||
if (!this.shouldAutoSync()) return;
|
||||
void this.engine.flushQueue();
|
||||
});
|
||||
|
||||
this.settingTab = new SurfSenseSettingTab(this.app, this);
|
||||
this.addSettingTab(this.settingTab);
|
||||
|
||||
const statusHost = this.addStatusBarItem();
|
||||
this.statusBar = new StatusBar(statusHost, () => this.openStatusModal());
|
||||
this.statusBar.update(this.lastStatus);
|
||||
|
||||
this.registerEvent(
|
||||
this.app.vault.on("create", (file) => this.engine.onCreate(file)),
|
||||
);
|
||||
this.registerEvent(
|
||||
this.app.vault.on("modify", (file) => this.engine.onModify(file)),
|
||||
);
|
||||
this.registerEvent(
|
||||
this.app.vault.on("delete", (file) => this.engine.onDelete(file)),
|
||||
);
|
||||
this.registerEvent(
|
||||
this.app.vault.on("rename", (file, oldPath) =>
|
||||
this.engine.onRename(file, oldPath),
|
||||
),
|
||||
);
|
||||
this.registerEvent(
|
||||
this.app.metadataCache.on("changed", (file, data, cache) =>
|
||||
this.engine.onMetadataChanged(file, data, cache),
|
||||
),
|
||||
);
|
||||
|
||||
this.addCommand({
|
||||
id: "resync-vault",
|
||||
name: "Re-sync entire vault",
|
||||
callback: async () => {
|
||||
try {
|
||||
await this.engine.maybeReconcile(true);
|
||||
new Notice("Surfsense: re-sync started.");
|
||||
} catch (err) {
|
||||
new Notice(`Surfsense: re-sync failed — ${(err as Error).message}`);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
this.addCommand({
|
||||
id: "sync-current-note",
|
||||
name: "Sync current note",
|
||||
checkCallback: (checking) => {
|
||||
const file = this.app.workspace.getActiveFile();
|
||||
if (!file || file.extension.toLowerCase() !== "md") return false;
|
||||
if (checking) return true;
|
||||
this.queue.enqueueUpsert(file.path);
|
||||
void this.engine.flushQueue();
|
||||
return true;
|
||||
},
|
||||
});
|
||||
|
||||
this.addCommand({
|
||||
id: "open-status",
|
||||
name: "Open sync status",
|
||||
callback: () => this.openStatusModal(),
|
||||
});
|
||||
|
||||
this.addCommand({
|
||||
id: "open-settings",
|
||||
name: "Open settings",
|
||||
callback: () => {
|
||||
// `app.setting` isn't in the d.ts; fall back silently if it moves.
|
||||
type SettingHost = {
|
||||
open?: () => void;
|
||||
openTabById?: (id: string) => void;
|
||||
};
|
||||
const setting = (this.app as unknown as { setting?: SettingHost }).setting;
|
||||
if (setting?.open) setting.open();
|
||||
if (setting?.openTabById) setting.openTabById(this.manifest.id);
|
||||
},
|
||||
});
|
||||
|
||||
const onNetChange = () => {
|
||||
void this.engine.recoverConnectivityStatus();
|
||||
if (this.shouldAutoSync()) void this.engine.flushQueue();
|
||||
};
|
||||
this.registerDomEvent(window, "online", onNetChange);
|
||||
const conn = (navigator as unknown as { connection?: NetworkConnection }).connection;
|
||||
if (conn && typeof conn.addEventListener === "function") {
|
||||
conn.addEventListener("change", onNetChange);
|
||||
this.register(() => conn.removeEventListener?.("change", onNetChange));
|
||||
}
|
||||
|
||||
// Wait for layout so the metadataCache is warm before reconcile.
|
||||
this.app.workspace.onLayoutReady(() => {
|
||||
void this.engine.start();
|
||||
this.restartReconcileTimer();
|
||||
});
|
||||
}
|
||||
|
||||
onunload() {
|
||||
this.queue?.cancelFlush();
|
||||
this.queue?.requestStop();
|
||||
}
|
||||
|
||||
/**
|
||||
* Obsidian fires this when another device rewrites our data.json.
|
||||
* If the synced vault_id differs from ours, adopt it and
|
||||
* re-handshake so the server routes us to the right row.
|
||||
*/
|
||||
async onExternalSettingsChange(): Promise<void> {
|
||||
const previousVaultId = this.settings.vaultId;
|
||||
const previousConnectorId = this.settings.connectorId;
|
||||
await this.loadSettings();
|
||||
const changed =
|
||||
this.settings.vaultId !== previousVaultId ||
|
||||
this.settings.connectorId !== previousConnectorId;
|
||||
if (!changed) return;
|
||||
this.engine?.refreshStatus();
|
||||
this.notifyStatusChange();
|
||||
if (this.settings.searchSpaceId !== null) {
|
||||
void this.engine.ensureConnected();
|
||||
}
|
||||
}
|
||||
|
||||
get queueDepth(): number {
|
||||
return this.queue?.size ?? 0;
|
||||
}
|
||||
|
||||
openStatusModal(): void {
|
||||
new StatusModal(this.app, this).open();
|
||||
}
|
||||
|
||||
restartReconcileTimer(): void {
|
||||
if (this.reconcileTimerId !== null) {
|
||||
window.clearInterval(this.reconcileTimerId);
|
||||
this.reconcileTimerId = null;
|
||||
}
|
||||
const minutes = this.settings.syncIntervalMinutes ?? 10;
|
||||
if (minutes <= 0) return;
|
||||
const baseMs = minutes * 60 * 1000;
|
||||
// Idle vaults back off (×2 → ×4 → ×8); resets on the first edit or non-empty reconcile.
|
||||
const effectiveMs = this.engine?.getReconcileBackoffMs(baseMs) ?? baseMs;
|
||||
const id = window.setInterval(
|
||||
() => {
|
||||
if (!this.shouldAutoSync()) return;
|
||||
void this.engine.maybeReconcile();
|
||||
},
|
||||
effectiveMs,
|
||||
);
|
||||
this.reconcileTimerId = id;
|
||||
this.registerInterval(id);
|
||||
}
|
||||
|
||||
/** Gate for background network activity; per-edit flush + periodic reconcile both consult this. */
|
||||
shouldAutoSync(): boolean {
|
||||
if (!this.settings.wifiOnly) return true;
|
||||
if (!Platform.isMobileApp) return true;
|
||||
// navigator.connection is supported on Android Capacitor; undefined on iOS.
|
||||
// When unavailable, behave permissively so iOS users aren't blocked outright.
|
||||
const conn = (navigator as unknown as { connection?: NetworkConnection }).connection;
|
||||
if (!conn || typeof conn.type !== "string") return true;
|
||||
return conn.type === "wifi" || conn.type === "ethernet";
|
||||
}
|
||||
|
||||
onStatusChange(listener: () => void): void {
|
||||
this.statusListeners.add(listener);
|
||||
}
|
||||
|
||||
offStatusChange(listener: () => void): void {
|
||||
this.statusListeners.delete(listener);
|
||||
}
|
||||
|
||||
private notifyStatusChange(): void {
|
||||
for (const fn of this.statusListeners) fn();
|
||||
}
|
||||
|
||||
private notifyAuthError(): void {
|
||||
this.engine?.reportAuthError();
|
||||
const now = Date.now();
|
||||
if (now - this.lastAuthToastAt < 10_000) return;
|
||||
this.lastAuthToastAt = now;
|
||||
new Notice("Surfsense: API token expired or invalid. Paste a fresh token in settings.", 8000);
|
||||
}
|
||||
|
||||
async loadSettings() {
|
||||
const data = (await this.loadData()) as Partial<SurfsensePluginSettings> | null;
|
||||
this.settings = {
|
||||
...DEFAULT_SETTINGS,
|
||||
...(data ?? {}),
|
||||
queue: (data?.queue ?? []).map((i: QueueItem) => ({ ...i })),
|
||||
tombstones: { ...(data?.tombstones ?? {}) },
|
||||
includeFolders: [...(data?.includeFolders ?? [])],
|
||||
excludeFolders: [...(data?.excludeFolders ?? [])],
|
||||
excludePatterns: data?.excludePatterns?.length
|
||||
? [...data.excludePatterns]
|
||||
: [...DEFAULT_SETTINGS.excludePatterns],
|
||||
};
|
||||
}
|
||||
|
||||
async saveSettings() {
|
||||
await this.saveData(this.settings);
|
||||
this.engine?.refreshStatus();
|
||||
}
|
||||
|
||||
/**
|
||||
* Mint a tentative vault_id locally on first run. The server's
|
||||
* fingerprint dedup (see /obsidian/connect) may overwrite it on the
|
||||
* first /connect when another device of the same vault has already
|
||||
* registered; we always trust the server's response.
|
||||
*/
|
||||
private seedIdentity(): void {
|
||||
if (!this.settings.vaultId) {
|
||||
this.settings.vaultId = generateVaultUuid();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Subset of the Network Information API used to detect WiFi vs cellular on Android. */
|
||||
interface NetworkConnection {
|
||||
type?: string;
|
||||
addEventListener?: (event: string, handler: () => void) => void;
|
||||
removeEventListener?: (event: string, handler: () => void) => void;
|
||||
}
|
||||
163
surfsense_obsidian/src/payload.ts
Normal file
163
surfsense_obsidian/src/payload.ts
Normal file
|
|
@ -0,0 +1,163 @@
|
|||
import {
|
||||
type App,
|
||||
type CachedMetadata,
|
||||
type FrontMatterCache,
|
||||
type HeadingCache,
|
||||
type ReferenceCache,
|
||||
type TFile,
|
||||
} from "obsidian";
|
||||
import type { HeadingRef, NotePayload } from "./types";
|
||||
|
||||
/**
|
||||
* Build a NotePayload from an Obsidian TFile.
|
||||
*
|
||||
* Mobile-safety contract:
|
||||
* - No top-level `node:fs` / `node:path` / `node:crypto` imports.
|
||||
* File IO uses `vault.cachedRead` (works on the mobile WASM adapter).
|
||||
* Hashing uses Web Crypto `subtle.digest`.
|
||||
* - Caller MUST first wait for `metadataCache.changed` before calling
|
||||
* this for a `.md` file, otherwise `frontmatter`/`tags`/`headings`
|
||||
* can lag the actual file contents.
|
||||
*/
|
||||
export async function buildNotePayload(
|
||||
app: App,
|
||||
file: TFile,
|
||||
vaultId: string,
|
||||
): Promise<NotePayload> {
|
||||
const content = await app.vault.cachedRead(file);
|
||||
const cache: CachedMetadata | null = app.metadataCache.getFileCache(file);
|
||||
|
||||
const frontmatter = normalizeFrontmatter(cache?.frontmatter);
|
||||
const tags = collectTags(cache);
|
||||
const headings = collectHeadings(cache?.headings ?? []);
|
||||
const aliases = collectAliases(frontmatter);
|
||||
const { embeds, internalLinks } = collectLinks(cache);
|
||||
const { resolved, unresolved } = resolveLinkTargets(
|
||||
app,
|
||||
file.path,
|
||||
internalLinks,
|
||||
);
|
||||
const contentHash = await computeContentHash(content);
|
||||
|
||||
return {
|
||||
vault_id: vaultId,
|
||||
path: file.path,
|
||||
name: file.basename,
|
||||
extension: file.extension,
|
||||
content,
|
||||
frontmatter,
|
||||
tags,
|
||||
headings,
|
||||
resolved_links: resolved,
|
||||
unresolved_links: unresolved,
|
||||
embeds,
|
||||
aliases,
|
||||
content_hash: contentHash,
|
||||
size: file.stat.size,
|
||||
mtime: file.stat.mtime,
|
||||
ctime: file.stat.ctime,
|
||||
};
|
||||
}
|
||||
|
||||
export async function computeContentHash(content: string): Promise<string> {
|
||||
const bytes = new TextEncoder().encode(content);
|
||||
const digest = await crypto.subtle.digest("SHA-256", bytes);
|
||||
return bufferToHex(digest);
|
||||
}
|
||||
|
||||
function bufferToHex(buf: ArrayBuffer): string {
|
||||
const view = new Uint8Array(buf);
|
||||
let hex = "";
|
||||
for (let i = 0; i < view.length; i++) {
|
||||
hex += (view[i] ?? 0).toString(16).padStart(2, "0");
|
||||
}
|
||||
return hex;
|
||||
}
|
||||
|
||||
function normalizeFrontmatter(
|
||||
fm: FrontMatterCache | undefined,
|
||||
): Record<string, unknown> {
|
||||
if (!fm) return {};
|
||||
// FrontMatterCache extends a plain object; strip the `position` key
|
||||
// the cache adds so the wire payload stays clean.
|
||||
const rest: Record<string, unknown> = { ...(fm as Record<string, unknown>) };
|
||||
delete rest.position;
|
||||
return rest;
|
||||
}
|
||||
|
||||
function collectTags(cache: CachedMetadata | null): string[] {
|
||||
const out = new Set<string>();
|
||||
for (const t of cache?.tags ?? []) {
|
||||
const tag = t.tag.startsWith("#") ? t.tag.slice(1) : t.tag;
|
||||
if (tag) out.add(tag);
|
||||
}
|
||||
const fmTags: unknown =
|
||||
cache?.frontmatter?.tags ?? cache?.frontmatter?.tag;
|
||||
if (Array.isArray(fmTags)) {
|
||||
for (const t of fmTags) {
|
||||
if (typeof t === "string" && t) out.add(t.replace(/^#/, ""));
|
||||
}
|
||||
} else if (typeof fmTags === "string" && fmTags) {
|
||||
for (const t of fmTags.split(/[\s,]+/)) {
|
||||
if (t) out.add(t.replace(/^#/, ""));
|
||||
}
|
||||
}
|
||||
return [...out];
|
||||
}
|
||||
|
||||
function collectHeadings(items: HeadingCache[]): HeadingRef[] {
|
||||
return items.map((h) => ({ heading: h.heading, level: h.level }));
|
||||
}
|
||||
|
||||
function collectAliases(frontmatter: Record<string, unknown>): string[] {
|
||||
const raw = frontmatter.aliases ?? frontmatter.alias;
|
||||
if (Array.isArray(raw)) {
|
||||
return raw.filter((x): x is string => typeof x === "string" && x.length > 0);
|
||||
}
|
||||
if (typeof raw === "string" && raw) return [raw];
|
||||
return [];
|
||||
}
|
||||
|
||||
function collectLinks(cache: CachedMetadata | null): {
|
||||
embeds: string[];
|
||||
internalLinks: ReferenceCache[];
|
||||
} {
|
||||
const linkRefs: ReferenceCache[] = [
|
||||
...((cache?.links) ?? []),
|
||||
...((cache?.embeds as ReferenceCache[] | undefined) ?? []),
|
||||
];
|
||||
const embeds = ((cache?.embeds as ReferenceCache[] | undefined) ?? []).map(
|
||||
(e) => e.link,
|
||||
);
|
||||
return { embeds, internalLinks: linkRefs };
|
||||
}
|
||||
|
||||
function resolveLinkTargets(
|
||||
app: App,
|
||||
sourcePath: string,
|
||||
links: ReferenceCache[],
|
||||
): { resolved: string[]; unresolved: string[] } {
|
||||
const resolved = new Set<string>();
|
||||
const unresolved = new Set<string>();
|
||||
for (const link of links) {
|
||||
const target = app.metadataCache.getFirstLinkpathDest(
|
||||
stripSubpath(link.link),
|
||||
sourcePath,
|
||||
);
|
||||
if (target) {
|
||||
resolved.add(target.path);
|
||||
} else {
|
||||
unresolved.add(link.link);
|
||||
}
|
||||
}
|
||||
return { resolved: [...resolved], unresolved: [...unresolved] };
|
||||
}
|
||||
|
||||
function stripSubpath(link: string): string {
|
||||
const hashIdx = link.indexOf("#");
|
||||
const pipeIdx = link.indexOf("|");
|
||||
let end = link.length;
|
||||
if (hashIdx !== -1) end = Math.min(end, hashIdx);
|
||||
if (pipeIdx !== -1) end = Math.min(end, pipeIdx);
|
||||
return link.slice(0, end);
|
||||
}
|
||||
228
surfsense_obsidian/src/queue.ts
Normal file
228
surfsense_obsidian/src/queue.ts
Normal file
|
|
@ -0,0 +1,228 @@
|
|||
import { type Debouncer, debounce } from "obsidian";
|
||||
import type { QueueItem } from "./types";
|
||||
|
||||
/**
|
||||
* Persistent upload queue.
|
||||
*
|
||||
* Mobile-safety contract:
|
||||
* - Persistence is delegated to a save callback (which the plugin wires
|
||||
* to `plugin.saveData()`); never `node:fs`. Items also live in the
|
||||
* plugin's settings JSON so a crash mid-flight loses nothing.
|
||||
* - No top-level `node:*` imports.
|
||||
*
|
||||
* Behavioural contract:
|
||||
* - Per-file debounce: enqueueing the same path coalesces, the latest
|
||||
* `enqueuedAt` wins so we don't ship a stale snapshot.
|
||||
* - `delete` for a path drops any pending `upsert` for that path
|
||||
* (otherwise we'd resurrect a note the user just deleted).
|
||||
* - `rename` is a first-class op so the backend can update
|
||||
* `unique_identifier_hash` instead of "delete + create" (which would
|
||||
* blow away document versions, citations, and the document_id used
|
||||
* in chat history).
|
||||
* - Drain takes a worker, returns once the worker either succeeds for
|
||||
* every batch or hits a stop signal (transient error, mid-drain
|
||||
* stop request).
|
||||
*/
|
||||
|
||||
export interface QueueWorker {
|
||||
processBatch(batch: QueueItem[]): Promise<BatchResult>;
|
||||
}
|
||||
|
||||
export interface BatchResult {
|
||||
/** Items that succeeded; they will be ack'd off the queue. */
|
||||
acked: QueueItem[];
|
||||
/** Items that should be retried; their `attempt` is bumped. */
|
||||
retry: QueueItem[];
|
||||
/** Items that failed permanently (4xx). They get dropped. */
|
||||
dropped: QueueItem[];
|
||||
/** If true, the drain loop stops (e.g. transient/network error). */
|
||||
stop: boolean;
|
||||
/** Optional retry-after for transient errors (ms). */
|
||||
backoffMs?: number;
|
||||
}
|
||||
|
||||
export interface PersistentQueueOptions {
|
||||
debounceMs?: number;
|
||||
batchSize?: number;
|
||||
maxAttempts?: number;
|
||||
persist: (items: QueueItem[]) => Promise<void> | void;
|
||||
now?: () => number;
|
||||
}
|
||||
|
||||
const DEFAULTS = {
|
||||
debounceMs: 2000,
|
||||
batchSize: 15,
|
||||
maxAttempts: 8,
|
||||
};
|
||||
|
||||
export class PersistentQueue {
|
||||
private items: QueueItem[];
|
||||
private readonly opts: Required<
|
||||
Omit<PersistentQueueOptions, "persist" | "now">
|
||||
> & {
|
||||
persist: PersistentQueueOptions["persist"];
|
||||
now: () => number;
|
||||
};
|
||||
private draining = false;
|
||||
private stopRequested = false;
|
||||
private debouncedFlush: Debouncer<[], void> | null = null;
|
||||
|
||||
constructor(initial: QueueItem[], opts: PersistentQueueOptions) {
|
||||
this.items = [...initial];
|
||||
this.opts = {
|
||||
debounceMs: opts.debounceMs ?? DEFAULTS.debounceMs,
|
||||
batchSize: opts.batchSize ?? DEFAULTS.batchSize,
|
||||
maxAttempts: opts.maxAttempts ?? DEFAULTS.maxAttempts,
|
||||
persist: opts.persist,
|
||||
now: opts.now ?? (() => Date.now()),
|
||||
};
|
||||
}
|
||||
|
||||
get size(): number {
|
||||
return this.items.length;
|
||||
}
|
||||
|
||||
snapshot(): QueueItem[] {
|
||||
return this.items.map((i) => ({ ...i }));
|
||||
}
|
||||
|
||||
setFlushHandler(handler: () => void): void {
|
||||
// resetTimer: true → each enqueue postpones the flush.
|
||||
this.debouncedFlush = debounce(handler, this.opts.debounceMs, true);
|
||||
}
|
||||
|
||||
enqueueUpsert(path: string): void {
|
||||
const now = this.opts.now();
|
||||
this.items = this.items.filter(
|
||||
(i) => !(i.op === "upsert" && i.path === path),
|
||||
);
|
||||
this.items.push({ op: "upsert", path, enqueuedAt: now, attempt: 0 });
|
||||
void this.persist();
|
||||
this.scheduleFlush();
|
||||
}
|
||||
|
||||
enqueueDelete(path: string): void {
|
||||
const now = this.opts.now();
|
||||
// A delete supersedes any pending upsert for the same path.
|
||||
this.items = this.items.filter(
|
||||
(i) =>
|
||||
!(
|
||||
(i.op === "upsert" && i.path === path) ||
|
||||
(i.op === "delete" && i.path === path)
|
||||
),
|
||||
);
|
||||
this.items.push({ op: "delete", path, enqueuedAt: now, attempt: 0 });
|
||||
void this.persist();
|
||||
this.scheduleFlush();
|
||||
}
|
||||
|
||||
enqueueRename(oldPath: string, newPath: string): void {
|
||||
const now = this.opts.now();
|
||||
this.items = this.items.filter(
|
||||
(i) =>
|
||||
!(
|
||||
(i.op === "upsert" && (i.path === oldPath || i.path === newPath)) ||
|
||||
(i.op === "rename" && i.oldPath === oldPath && i.newPath === newPath)
|
||||
),
|
||||
);
|
||||
this.items.push({
|
||||
op: "rename",
|
||||
oldPath,
|
||||
newPath,
|
||||
enqueuedAt: now,
|
||||
attempt: 0,
|
||||
});
|
||||
// Pair with an upsert — content may have changed alongside the rename.
|
||||
this.items.push({ op: "upsert", path: newPath, enqueuedAt: now, attempt: 0 });
|
||||
void this.persist();
|
||||
this.scheduleFlush();
|
||||
}
|
||||
|
||||
requestStop(): void {
|
||||
this.stopRequested = true;
|
||||
}
|
||||
|
||||
cancelFlush(): void {
|
||||
this.debouncedFlush?.cancel();
|
||||
}
|
||||
|
||||
private scheduleFlush(): void {
|
||||
this.debouncedFlush?.();
|
||||
}
|
||||
|
||||
async drain(worker: QueueWorker): Promise<DrainSummary> {
|
||||
if (this.draining) return { batches: 0, acked: 0, dropped: 0, stopped: false };
|
||||
this.draining = true;
|
||||
this.stopRequested = false;
|
||||
const summary: DrainSummary = {
|
||||
batches: 0,
|
||||
acked: 0,
|
||||
dropped: 0,
|
||||
stopped: false,
|
||||
};
|
||||
try {
|
||||
while (this.items.length > 0 && !this.stopRequested) {
|
||||
const batch = this.takeBatch();
|
||||
summary.batches += 1;
|
||||
|
||||
const result = await worker.processBatch(batch);
|
||||
summary.acked += result.acked.length;
|
||||
summary.dropped += result.dropped.length;
|
||||
|
||||
const ackKeys = new Set(result.acked.map(itemKey));
|
||||
const dropKeys = new Set(result.dropped.map(itemKey));
|
||||
const retryKeys = new Set(result.retry.map(itemKey));
|
||||
|
||||
// Items the worker didn't classify get retried — never silently dropped.
|
||||
const unhandled = batch.filter(
|
||||
(b) =>
|
||||
!ackKeys.has(itemKey(b)) &&
|
||||
!dropKeys.has(itemKey(b)) &&
|
||||
!retryKeys.has(itemKey(b)),
|
||||
);
|
||||
const retry = [...result.retry, ...unhandled].map((i) => ({
|
||||
...i,
|
||||
attempt: i.attempt + 1,
|
||||
}));
|
||||
const survivors = retry.filter((i) => i.attempt <= this.opts.maxAttempts);
|
||||
summary.dropped += retry.length - survivors.length;
|
||||
|
||||
this.items = [...survivors, ...this.items];
|
||||
await this.persist();
|
||||
|
||||
if (result.stop) {
|
||||
summary.stopped = true;
|
||||
if (result.backoffMs) summary.backoffMs = result.backoffMs;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (this.stopRequested) summary.stopped = true;
|
||||
return summary;
|
||||
} finally {
|
||||
this.draining = false;
|
||||
}
|
||||
}
|
||||
|
||||
private takeBatch(): QueueItem[] {
|
||||
const head = this.items.slice(0, this.opts.batchSize);
|
||||
this.items = this.items.slice(this.opts.batchSize);
|
||||
return head;
|
||||
}
|
||||
|
||||
private async persist(): Promise<void> {
|
||||
await this.opts.persist(this.snapshot());
|
||||
}
|
||||
}
|
||||
|
||||
export interface DrainSummary {
|
||||
batches: number;
|
||||
acked: number;
|
||||
dropped: number;
|
||||
stopped: boolean;
|
||||
backoffMs?: number;
|
||||
}
|
||||
|
||||
export function itemKey(i: QueueItem): string {
|
||||
if (i.op === "rename") return `rename:${i.oldPath}=>${i.newPath}`;
|
||||
return `${i.op}:${i.path}`;
|
||||
}
|
||||
389
surfsense_obsidian/src/settings.ts
Normal file
389
surfsense_obsidian/src/settings.ts
Normal file
|
|
@ -0,0 +1,389 @@
|
|||
import {
|
||||
type App,
|
||||
type ButtonComponent,
|
||||
Notice,
|
||||
Platform,
|
||||
PluginSettingTab,
|
||||
Setting,
|
||||
setIcon,
|
||||
} from "obsidian";
|
||||
import { AuthError } from "./api-client";
|
||||
import { AttachmentsConfirmModal } from "./attachments-confirm-modal";
|
||||
import { normalizeFolder, parseExcludePatterns } from "./excludes";
|
||||
import { FolderSuggestModal } from "./folder-suggest-modal";
|
||||
import type SurfSensePlugin from "./main";
|
||||
import { STATUS_VISUALS } from "./status-visuals";
|
||||
import type { SearchSpace } from "./types";
|
||||
|
||||
/** Plugin settings tab. */
|
||||
|
||||
export class SurfSenseSettingTab extends PluginSettingTab {
|
||||
private readonly plugin: SurfSensePlugin;
|
||||
private searchSpaces: SearchSpace[] = [];
|
||||
private loadingSpaces = false;
|
||||
private connectionIndicator: HTMLElement | null = null;
|
||||
private readonly onStatusChange = (): void => this.updateConnectionIndicator();
|
||||
|
||||
constructor(app: App, plugin: SurfSensePlugin) {
|
||||
super(app, plugin);
|
||||
this.plugin = plugin;
|
||||
}
|
||||
|
||||
display(): void {
|
||||
const { containerEl } = this;
|
||||
containerEl.empty();
|
||||
this.plugin.onStatusChange(this.onStatusChange);
|
||||
|
||||
const settings = this.plugin.settings;
|
||||
|
||||
this.renderConnectionHeading(containerEl);
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName("Server URL")
|
||||
.setDesc(
|
||||
"https://surfsense.com for SurfSense Cloud, or your self-hosted URL.",
|
||||
)
|
||||
.addText((text) =>
|
||||
text
|
||||
.setPlaceholder("https://surfsense.com")
|
||||
.setValue(settings.serverUrl)
|
||||
.onChange(async (value) => {
|
||||
const next = value.trim();
|
||||
const previous = this.plugin.settings.serverUrl;
|
||||
if (previous !== "" && next !== previous) {
|
||||
this.plugin.settings.searchSpaceId = null;
|
||||
this.plugin.settings.connectorId = null;
|
||||
}
|
||||
this.plugin.settings.serverUrl = next;
|
||||
await this.plugin.saveSettings();
|
||||
}),
|
||||
);
|
||||
|
||||
let verifyButton: ButtonComponent | null = null;
|
||||
const updateVerifyDisabled = (): void => {
|
||||
verifyButton?.setDisabled(this.plugin.settings.apiToken.trim().length === 0);
|
||||
};
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName("API token")
|
||||
.setDesc(
|
||||
"Paste your Surfsense API token (expires after 24 hours; re-paste when you see an auth error).",
|
||||
)
|
||||
.addText((text) => {
|
||||
text.inputEl.type = "password";
|
||||
text.inputEl.autocomplete = "off";
|
||||
text.inputEl.spellcheck = false;
|
||||
text
|
||||
.setPlaceholder("Paste token")
|
||||
.setValue(settings.apiToken)
|
||||
.onChange(async (value) => {
|
||||
const next = value.trim();
|
||||
const previous = this.plugin.settings.apiToken;
|
||||
if (previous !== "" && next !== previous) {
|
||||
this.plugin.settings.searchSpaceId = null;
|
||||
this.plugin.settings.connectorId = null;
|
||||
}
|
||||
this.plugin.settings.apiToken = next;
|
||||
updateVerifyDisabled();
|
||||
await this.plugin.saveSettings();
|
||||
this.plugin.api.resetAuthBlock();
|
||||
});
|
||||
})
|
||||
.addButton((btn) => {
|
||||
verifyButton = btn;
|
||||
updateVerifyDisabled();
|
||||
btn.setButtonText("Verify").setCta().onClick(async () => {
|
||||
if (this.plugin.settings.apiToken.trim().length === 0) {
|
||||
new Notice("Surfsense: paste an API token before verifying.");
|
||||
return;
|
||||
}
|
||||
btn.setDisabled(true);
|
||||
try {
|
||||
await this.plugin.api.verifyToken();
|
||||
new Notice("Surfsense: token verified.");
|
||||
this.plugin.engine.refreshStatus({ force: true });
|
||||
await this.refreshSearchSpaces();
|
||||
this.display();
|
||||
} catch (err) {
|
||||
this.handleApiError(err);
|
||||
} finally {
|
||||
updateVerifyDisabled();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName("Search space")
|
||||
.setDesc(
|
||||
"Which Surfsense search space this vault syncs into. Reload after changing your token.",
|
||||
)
|
||||
.addDropdown((drop) => {
|
||||
drop.addOption("", this.loadingSpaces ? "Loading…" : "Select a search space");
|
||||
for (const space of this.searchSpaces) {
|
||||
drop.addOption(String(space.id), space.name);
|
||||
}
|
||||
if (settings.searchSpaceId !== null) {
|
||||
drop.setValue(String(settings.searchSpaceId));
|
||||
}
|
||||
drop.onChange(async (value) => {
|
||||
this.plugin.settings.searchSpaceId = value ? Number(value) : null;
|
||||
this.plugin.settings.connectorId = null;
|
||||
await this.plugin.saveSettings();
|
||||
if (this.plugin.settings.searchSpaceId !== null) {
|
||||
try {
|
||||
await this.plugin.engine.ensureConnected();
|
||||
await this.plugin.engine.maybeReconcile(true);
|
||||
new Notice("Surfsense: vault connected.");
|
||||
this.display();
|
||||
} catch (err) {
|
||||
this.handleApiError(err);
|
||||
}
|
||||
}
|
||||
});
|
||||
})
|
||||
.addExtraButton((btn) =>
|
||||
btn
|
||||
.setIcon("refresh-ccw")
|
||||
.setTooltip("Reload search spaces")
|
||||
.onClick(async () => {
|
||||
await this.refreshSearchSpaces();
|
||||
this.display();
|
||||
}),
|
||||
);
|
||||
|
||||
new Setting(containerEl).setName("Vault").setHeading();
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName("Sync interval")
|
||||
.setDesc(
|
||||
"How often to check for changes made outside Obsidian.",
|
||||
)
|
||||
.addDropdown((drop) => {
|
||||
const options: Array<[number, string]> = [
|
||||
[0, "Off"],
|
||||
[5, "5 minutes"],
|
||||
[10, "10 minutes"],
|
||||
[15, "15 minutes"],
|
||||
[30, "30 minutes"],
|
||||
[60, "60 minutes"],
|
||||
[120, "2 hours"],
|
||||
[360, "6 hours"],
|
||||
[720, "12 hours"],
|
||||
[1440, "24 hours"],
|
||||
];
|
||||
for (const [value, label] of options) {
|
||||
drop.addOption(String(value), label);
|
||||
}
|
||||
drop.setValue(String(settings.syncIntervalMinutes));
|
||||
drop.onChange(async (value) => {
|
||||
this.plugin.settings.syncIntervalMinutes = Number(value);
|
||||
await this.plugin.saveSettings();
|
||||
this.plugin.restartReconcileTimer();
|
||||
});
|
||||
});
|
||||
|
||||
this.renderFolderList(
|
||||
containerEl,
|
||||
"Include folders",
|
||||
"Folders to sync (leave empty to sync entire vault).",
|
||||
settings.includeFolders,
|
||||
(next) => {
|
||||
this.plugin.settings.includeFolders = next;
|
||||
},
|
||||
);
|
||||
|
||||
this.renderFolderList(
|
||||
containerEl,
|
||||
"Exclude folders",
|
||||
"Folders to exclude from sync (takes precedence over includes).",
|
||||
settings.excludeFolders,
|
||||
(next) => {
|
||||
this.plugin.settings.excludeFolders = next;
|
||||
},
|
||||
);
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName("Advanced exclude patterns")
|
||||
.setDesc(
|
||||
"Glob fallback for power users. One pattern per line, supports * and **. Lines starting with # are comments. Applied on top of the folder lists above.",
|
||||
)
|
||||
.addTextArea((area) => {
|
||||
area.inputEl.rows = 4;
|
||||
area
|
||||
.setPlaceholder(".trash\n_attachments\ntemplates/**")
|
||||
.setValue(settings.excludePatterns.join("\n"))
|
||||
.onChange(async (value) => {
|
||||
this.plugin.settings.excludePatterns = parseExcludePatterns(value);
|
||||
await this.plugin.saveSettings();
|
||||
});
|
||||
});
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName("Include attachments")
|
||||
.setDesc(
|
||||
"Also sync non-Markdown files such as images and PDFs. Other file types are skipped.",
|
||||
)
|
||||
.addToggle((toggle) =>
|
||||
toggle
|
||||
.setValue(settings.includeAttachments)
|
||||
.onChange(async (value) => {
|
||||
const isEnabling =
|
||||
value && !this.plugin.settings.includeAttachments;
|
||||
if (isEnabling) {
|
||||
const confirmed = await new AttachmentsConfirmModal(
|
||||
this.app,
|
||||
).waitForConfirmation();
|
||||
if (!confirmed) {
|
||||
this.display();
|
||||
return;
|
||||
}
|
||||
}
|
||||
this.plugin.settings.includeAttachments = value;
|
||||
await this.plugin.saveSettings();
|
||||
}),
|
||||
);
|
||||
|
||||
if (Platform.isAndroidApp) {
|
||||
new Setting(containerEl)
|
||||
.setName("Sync only on WiFi")
|
||||
.setDesc("Pause automatic syncing on cellular.")
|
||||
.addToggle((toggle) =>
|
||||
toggle
|
||||
.setValue(settings.wifiOnly)
|
||||
.onChange(async (value) => {
|
||||
this.plugin.settings.wifiOnly = value;
|
||||
await this.plugin.saveSettings();
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName("Force sync")
|
||||
.setDesc("Manually re-index the entire vault now.")
|
||||
.addButton((btn) =>
|
||||
btn.setButtonText("Update").onClick(async () => {
|
||||
btn.setDisabled(true);
|
||||
try {
|
||||
await this.plugin.engine.maybeReconcile(true);
|
||||
new Notice("Surfsense: re-sync requested.");
|
||||
} catch (err) {
|
||||
this.handleApiError(err);
|
||||
} finally {
|
||||
btn.setDisabled(false);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
new Setting(containerEl)
|
||||
.addButton((btn) =>
|
||||
btn
|
||||
.setButtonText("View sync status")
|
||||
.setCta()
|
||||
.onClick(() => this.plugin.openStatusModal()),
|
||||
)
|
||||
.addButton((btn) =>
|
||||
btn.setButtonText("Open releases").onClick(() => {
|
||||
window.open(
|
||||
"https://github.com/MODSetter/SurfSense/releases?q=obsidian",
|
||||
"_blank",
|
||||
);
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
hide(): void {
|
||||
this.plugin.offStatusChange(this.onStatusChange);
|
||||
this.connectionIndicator = null;
|
||||
}
|
||||
|
||||
private renderConnectionHeading(containerEl: HTMLElement): void {
|
||||
const heading = new Setting(containerEl).setName("Connection").setHeading();
|
||||
heading.nameEl.addClass("surfsense-connection-heading");
|
||||
this.connectionIndicator = heading.nameEl.createSpan({
|
||||
cls: "surfsense-connection-indicator",
|
||||
});
|
||||
this.updateConnectionIndicator();
|
||||
}
|
||||
|
||||
private updateConnectionIndicator(): void {
|
||||
const indicator = this.connectionIndicator;
|
||||
if (!indicator) return;
|
||||
const visual = STATUS_VISUALS[this.plugin.lastStatus.kind];
|
||||
indicator.empty();
|
||||
indicator.removeClass("surfsense-connection-indicator--err");
|
||||
if (visual.isError) {
|
||||
indicator.addClass("surfsense-connection-indicator--err");
|
||||
}
|
||||
setIcon(indicator, visual.icon);
|
||||
indicator.setAttr("aria-label", visual.label);
|
||||
indicator.setAttr("title", visual.label);
|
||||
}
|
||||
|
||||
private async refreshSearchSpaces(): Promise<void> {
|
||||
this.loadingSpaces = true;
|
||||
try {
|
||||
this.searchSpaces = await this.plugin.api.listSearchSpaces();
|
||||
} catch (err) {
|
||||
this.handleApiError(err);
|
||||
this.searchSpaces = [];
|
||||
} finally {
|
||||
this.loadingSpaces = false;
|
||||
}
|
||||
}
|
||||
|
||||
private renderFolderList(
|
||||
containerEl: HTMLElement,
|
||||
title: string,
|
||||
desc: string,
|
||||
current: string[],
|
||||
write: (next: string[]) => void,
|
||||
): void {
|
||||
const setting = new Setting(containerEl).setName(title).setDesc(desc);
|
||||
|
||||
const persist = async (next: string[]): Promise<void> => {
|
||||
const dedup = Array.from(new Set(next.map(normalizeFolder)));
|
||||
write(dedup);
|
||||
await this.plugin.saveSettings();
|
||||
this.display();
|
||||
};
|
||||
|
||||
setting.addButton((btn) =>
|
||||
btn
|
||||
.setButtonText("Add folder")
|
||||
.setCta()
|
||||
.onClick(() => {
|
||||
new FolderSuggestModal(
|
||||
this.app,
|
||||
(picked) => {
|
||||
void persist([...current, picked]);
|
||||
},
|
||||
current,
|
||||
).open();
|
||||
}),
|
||||
);
|
||||
|
||||
for (const folder of current) {
|
||||
new Setting(containerEl).setName(folder || "/").addExtraButton((btn) =>
|
||||
btn
|
||||
.setIcon("cross")
|
||||
.setTooltip("Remove")
|
||||
.onClick(() => {
|
||||
void persist(current.filter((f) => f !== folder));
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private handleApiError(err: unknown): void {
|
||||
if (err instanceof AuthError) {
|
||||
if (err.message.startsWith("Missing API token")) {
|
||||
new Notice("Surfsense: paste an API token before verifying.");
|
||||
}
|
||||
return;
|
||||
}
|
||||
this.plugin.engine.reportError(err);
|
||||
new Notice(
|
||||
`SurfSense: request failed — ${(err as Error).message ?? "unknown error"}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
46
surfsense_obsidian/src/status-bar.ts
Normal file
46
surfsense_obsidian/src/status-bar.ts
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
import { setIcon } from "obsidian";
|
||||
import { STATUS_VISUALS } from "./status-visuals";
|
||||
import type { StatusState } from "./types";
|
||||
|
||||
/**
|
||||
* Tiny status-bar adornment.
|
||||
*
|
||||
* Plain DOM (no HTML strings, no CSS-in-JS) so it stays cheap on mobile
|
||||
* and Obsidian's lint doesn't complain about innerHTML.
|
||||
*/
|
||||
|
||||
export class StatusBar {
|
||||
private readonly el: HTMLElement;
|
||||
private readonly icon: HTMLElement;
|
||||
private readonly text: HTMLElement;
|
||||
|
||||
constructor(host: HTMLElement, onClick?: () => void) {
|
||||
this.el = host;
|
||||
this.el.addClass("surfsense-status");
|
||||
this.icon = this.el.createSpan({ cls: "surfsense-status__icon" });
|
||||
this.text = this.el.createSpan({ cls: "surfsense-status__text" });
|
||||
if (onClick) {
|
||||
this.el.addClass("surfsense-status--clickable");
|
||||
this.el.addEventListener("click", onClick);
|
||||
}
|
||||
this.update({ kind: "idle", queueDepth: 0 });
|
||||
}
|
||||
|
||||
update(state: StatusState): void {
|
||||
const visual = STATUS_VISUALS[state.kind];
|
||||
this.el.removeClass("surfsense-status--err");
|
||||
if (visual.isError) this.el.addClass("surfsense-status--err");
|
||||
setIcon(this.icon, visual.icon);
|
||||
|
||||
let label = `SurfSense: ${visual.label}`;
|
||||
if (state.queueDepth > 0 && state.kind !== "idle") {
|
||||
label += ` (${state.queueDepth})`;
|
||||
}
|
||||
this.text.setText(label);
|
||||
this.el.setAttr(
|
||||
"aria-label",
|
||||
state.detail ? `${label} — ${state.detail}` : label,
|
||||
);
|
||||
this.el.setAttr("title", state.detail ?? label);
|
||||
}
|
||||
}
|
||||
77
surfsense_obsidian/src/status-modal.ts
Normal file
77
surfsense_obsidian/src/status-modal.ts
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
import { type App, Modal, Notice, Setting } from "obsidian";
|
||||
import type SurfSensePlugin from "./main";
|
||||
import { STATUS_VISUALS } from "./status-visuals";
|
||||
|
||||
/** Live status panel reachable from the status bar / command palette. */
|
||||
export class StatusModal extends Modal {
|
||||
private readonly plugin: SurfSensePlugin;
|
||||
private readonly onChange = (): void => this.render();
|
||||
|
||||
constructor(app: App, plugin: SurfSensePlugin) {
|
||||
super(app);
|
||||
this.plugin = plugin;
|
||||
}
|
||||
|
||||
onOpen(): void {
|
||||
this.setTitle("Surfsense status");
|
||||
this.plugin.onStatusChange(this.onChange);
|
||||
this.render();
|
||||
}
|
||||
|
||||
onClose(): void {
|
||||
this.plugin.offStatusChange(this.onChange);
|
||||
this.contentEl.empty();
|
||||
}
|
||||
|
||||
private render(): void {
|
||||
const { contentEl, plugin } = this;
|
||||
contentEl.empty();
|
||||
const s = plugin.settings;
|
||||
|
||||
const rows: Array<[string, string]> = [
|
||||
["Status", STATUS_VISUALS[plugin.lastStatus.kind].label],
|
||||
[
|
||||
"Last sync",
|
||||
s.lastSyncAt ? new Date(s.lastSyncAt).toLocaleString() : "—",
|
||||
],
|
||||
[
|
||||
"Last reconcile",
|
||||
s.lastReconcileAt
|
||||
? new Date(s.lastReconcileAt).toLocaleString()
|
||||
: "—",
|
||||
],
|
||||
["Files synced", String(s.filesSynced ?? 0)],
|
||||
["Queue depth", String(plugin.queueDepth)],
|
||||
[
|
||||
"Capabilities",
|
||||
plugin.serverCapabilities.length
|
||||
? plugin.serverCapabilities.join(", ")
|
||||
: "(not yet handshaken)",
|
||||
],
|
||||
];
|
||||
for (const [label, value] of rows) {
|
||||
new Setting(contentEl).setName(label).setDesc(value);
|
||||
}
|
||||
|
||||
new Setting(contentEl)
|
||||
.addButton((btn) =>
|
||||
btn
|
||||
.setButtonText("Re-sync entire vault")
|
||||
.setCta()
|
||||
.onClick(async () => {
|
||||
btn.setDisabled(true);
|
||||
try {
|
||||
await plugin.engine.maybeReconcile(true);
|
||||
new Notice("Surfsense: re-sync requested.");
|
||||
} catch (err) {
|
||||
new Notice(
|
||||
`Surfsense: re-sync failed — ${(err as Error).message}`,
|
||||
);
|
||||
} finally {
|
||||
btn.setDisabled(false);
|
||||
}
|
||||
}),
|
||||
)
|
||||
.addButton((btn) => btn.setButtonText("Close").onClick(() => this.close()));
|
||||
}
|
||||
}
|
||||
18
surfsense_obsidian/src/status-visuals.ts
Normal file
18
surfsense_obsidian/src/status-visuals.ts
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
import type { StatusKind } from "./types";
|
||||
|
||||
/** Shared by the status bar and the settings "Connection" heading. */
|
||||
export interface StatusVisual {
|
||||
icon: string;
|
||||
label: string;
|
||||
isError: boolean;
|
||||
}
|
||||
|
||||
export const STATUS_VISUALS: Record<StatusKind, StatusVisual> = {
|
||||
idle: { icon: "check-circle", label: "Synced", isError: false },
|
||||
syncing: { icon: "refresh-ccw", label: "Syncing", isError: false },
|
||||
queued: { icon: "clock", label: "Queued", isError: false },
|
||||
"needs-setup": { icon: "cloud-off", label: "Setup required", isError: false },
|
||||
offline: { icon: "wifi-off", label: "Offline", isError: false },
|
||||
"auth-error": { icon: "alert-circle", label: "Reauthenticate", isError: true },
|
||||
error: { icon: "alert-circle", label: "Error", isError: true },
|
||||
};
|
||||
751
surfsense_obsidian/src/sync-engine.ts
Normal file
751
surfsense_obsidian/src/sync-engine.ts
Normal file
|
|
@ -0,0 +1,751 @@
|
|||
import {
|
||||
type App,
|
||||
type CachedMetadata,
|
||||
type Debouncer,
|
||||
Notice,
|
||||
type TAbstractFile,
|
||||
TFile,
|
||||
debounce,
|
||||
} from "obsidian";
|
||||
import {
|
||||
AuthError,
|
||||
PermanentError,
|
||||
type SurfSenseApiClient,
|
||||
TransientError,
|
||||
VaultNotRegisteredError,
|
||||
} from "./api-client";
|
||||
import { isExcluded, isFolderFiltered } from "./excludes";
|
||||
import { buildNotePayload } from "./payload";
|
||||
import { type BatchResult, PersistentQueue } from "./queue";
|
||||
import type {
|
||||
HealthResponse,
|
||||
ManifestEntry,
|
||||
NotePayload,
|
||||
QueueItem,
|
||||
StatusKind,
|
||||
StatusState,
|
||||
} from "./types";
|
||||
import { computeVaultFingerprint } from "./vault-identity";
|
||||
|
||||
/**
|
||||
* Reconciles vault state with the server.
|
||||
* Start order: connect (or /health) → drain queue → reconcile → subscribe events.
|
||||
*/
|
||||
|
||||
export interface SyncEngineDeps {
|
||||
app: App;
|
||||
apiClient: SurfSenseApiClient;
|
||||
queue: PersistentQueue;
|
||||
getSettings: () => SyncEngineSettings;
|
||||
saveSettings: (mut: (s: SyncEngineSettings) => void) => Promise<void>;
|
||||
setStatus: (s: StatusState) => void;
|
||||
onCapabilities: (caps: string[]) => void;
|
||||
/** Fired when the adaptive backoff multiplier may have changed; main.ts uses it to reschedule. */
|
||||
onReconcileBackoffChanged?: () => void;
|
||||
}
|
||||
|
||||
export interface SyncEngineSettings {
|
||||
vaultId: string;
|
||||
apiToken: string;
|
||||
connectorId: number | null;
|
||||
searchSpaceId: number | null;
|
||||
includeFolders: string[];
|
||||
excludeFolders: string[];
|
||||
excludePatterns: string[];
|
||||
includeAttachments: boolean;
|
||||
lastReconcileAt: number | null;
|
||||
lastSyncAt: number | null;
|
||||
filesSynced: number;
|
||||
tombstones: Record<string, number>;
|
||||
}
|
||||
|
||||
export const RECONCILE_MIN_INTERVAL_MS = 5 * 60 * 1000;
|
||||
const TOMBSTONE_TTL_MS = 24 * 60 * 60 * 1000; // 1 day
|
||||
const PENDING_DEBOUNCE_MS = 1500;
|
||||
|
||||
export class SyncEngine {
|
||||
private readonly deps: SyncEngineDeps;
|
||||
private capabilities: string[] = [];
|
||||
private pendingMdEdits = new Map<string, Debouncer<[], void>>();
|
||||
/** Consecutive reconciles that found no work; powers the adaptive interval. */
|
||||
private idleReconcileStreak = 0;
|
||||
/** 2^streak is capped at this value (e.g. 8 → max ×8 backoff). */
|
||||
private readonly maxBackoffMultiplier = 8;
|
||||
private lastAppliedKind: StatusKind = "needs-setup";
|
||||
|
||||
constructor(deps: SyncEngineDeps) {
|
||||
this.deps = deps;
|
||||
}
|
||||
|
||||
/** Returns the next-tick interval given the user's base, scaled by the idle streak. */
|
||||
getReconcileBackoffMs(baseMs: number): number {
|
||||
const multiplier = Math.min(2 ** this.idleReconcileStreak, this.maxBackoffMultiplier);
|
||||
return baseMs * multiplier;
|
||||
}
|
||||
|
||||
getCapabilities(): readonly string[] {
|
||||
return this.capabilities;
|
||||
}
|
||||
|
||||
supports(capability: string): boolean {
|
||||
return this.capabilities.includes(capability);
|
||||
}
|
||||
|
||||
/** Run the onload sequence described in this file's docstring. */
|
||||
async start(): Promise<void> {
|
||||
this.setStatus("syncing", "Connecting to SurfSense…");
|
||||
|
||||
const settings = this.deps.getSettings();
|
||||
if (!settings.searchSpaceId) {
|
||||
// No target yet — /health still surfaces auth/network errors.
|
||||
try {
|
||||
const health = await this.deps.apiClient.health();
|
||||
this.applyHealth(health);
|
||||
} catch (err) {
|
||||
this.handleStartupError(err);
|
||||
return;
|
||||
}
|
||||
this.setStatus("idle");
|
||||
return;
|
||||
}
|
||||
|
||||
// Re-announce so the backend sees the latest vault_name + last_connect_at.
|
||||
// flushQueue gates on connectorId, so a failed connect leaves the queue intact.
|
||||
await this.ensureConnected();
|
||||
|
||||
await this.flushQueue();
|
||||
await this.maybeReconcile();
|
||||
this.setStatus(this.queueStatusKind(), undefined);
|
||||
}
|
||||
|
||||
/**
|
||||
* (Re)register the vault. Adopts server's `vault_id` in case fingerprint
|
||||
* dedup routed us to an existing row from another device.
|
||||
*/
|
||||
async ensureConnected(): Promise<boolean> {
|
||||
const settings = this.deps.getSettings();
|
||||
if (!settings.searchSpaceId) {
|
||||
this.setStatus("idle");
|
||||
return false;
|
||||
}
|
||||
this.setStatus("syncing", "Connecting to SurfSense");
|
||||
try {
|
||||
const fingerprint = await computeVaultFingerprint(this.deps.app);
|
||||
const resp = await this.deps.apiClient.connect({
|
||||
searchSpaceId: settings.searchSpaceId,
|
||||
vaultId: settings.vaultId,
|
||||
vaultName: this.deps.app.vault.getName(),
|
||||
vaultFingerprint: fingerprint,
|
||||
});
|
||||
this.applyHealth(resp);
|
||||
await this.deps.saveSettings((s) => {
|
||||
s.vaultId = resp.vault_id;
|
||||
s.connectorId = resp.connector_id;
|
||||
});
|
||||
this.setStatus(this.queueStatusKind(), this.statusDetail());
|
||||
return true;
|
||||
} catch (err) {
|
||||
this.handleStartupError(err);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
applyHealth(h: HealthResponse): void {
|
||||
this.capabilities = Array.isArray(h.capabilities) ? [...h.capabilities] : [];
|
||||
this.deps.onCapabilities(this.capabilities);
|
||||
}
|
||||
|
||||
// ---- vault event handlers --------------------------------------------
|
||||
|
||||
onCreate(file: TAbstractFile): void {
|
||||
if (!this.shouldTrack(file)) return;
|
||||
const settings = this.deps.getSettings();
|
||||
if (this.isExcluded(file.path, settings)) return;
|
||||
this.resetIdleStreak();
|
||||
if (this.isMarkdown(file)) {
|
||||
this.scheduleMdUpsert(file.path);
|
||||
return;
|
||||
}
|
||||
this.deps.queue.enqueueUpsert(file.path);
|
||||
}
|
||||
|
||||
onModify(file: TAbstractFile): void {
|
||||
if (!this.shouldTrack(file)) return;
|
||||
const settings = this.deps.getSettings();
|
||||
if (this.isExcluded(file.path, settings)) return;
|
||||
this.resetIdleStreak();
|
||||
if (this.isMarkdown(file)) {
|
||||
// Wait for metadataCache.changed so the payload sees fresh metadata.
|
||||
this.scheduleMdUpsert(file.path);
|
||||
return;
|
||||
}
|
||||
this.deps.queue.enqueueUpsert(file.path);
|
||||
}
|
||||
|
||||
onDelete(file: TAbstractFile): void {
|
||||
if (!this.shouldTrack(file)) return;
|
||||
this.resetIdleStreak();
|
||||
this.deps.queue.enqueueDelete(file.path);
|
||||
void this.deps.saveSettings((s) => {
|
||||
s.tombstones[file.path] = Date.now();
|
||||
});
|
||||
}
|
||||
|
||||
onRename(file: TAbstractFile, oldPath: string): void {
|
||||
if (!this.shouldTrack(file)) return;
|
||||
this.resetIdleStreak();
|
||||
const settings = this.deps.getSettings();
|
||||
if (this.isExcluded(file.path, settings)) {
|
||||
this.deps.queue.enqueueDelete(oldPath);
|
||||
void this.deps.saveSettings((s) => {
|
||||
s.tombstones[oldPath] = Date.now();
|
||||
});
|
||||
return;
|
||||
}
|
||||
this.deps.queue.enqueueRename(oldPath, file.path);
|
||||
}
|
||||
|
||||
onMetadataChanged(file: TFile, _data: string, _cache: CachedMetadata): void {
|
||||
if (!this.shouldTrack(file)) return;
|
||||
const settings = this.deps.getSettings();
|
||||
if (this.isExcluded(file.path, settings)) return;
|
||||
if (!this.isMarkdown(file)) return;
|
||||
// Metadata is fresh now — cancel the deferred upsert and enqueue immediately.
|
||||
const pending = this.pendingMdEdits.get(file.path);
|
||||
if (pending) {
|
||||
pending.cancel();
|
||||
this.pendingMdEdits.delete(file.path);
|
||||
}
|
||||
this.deps.queue.enqueueUpsert(file.path);
|
||||
}
|
||||
|
||||
private scheduleMdUpsert(path: string): void {
|
||||
let pending = this.pendingMdEdits.get(path);
|
||||
if (!pending) {
|
||||
// resetTimer: true → each edit pushes the upsert out by another PENDING_DEBOUNCE_MS.
|
||||
pending = debounce(
|
||||
() => {
|
||||
this.pendingMdEdits.delete(path);
|
||||
this.deps.queue.enqueueUpsert(path);
|
||||
},
|
||||
PENDING_DEBOUNCE_MS,
|
||||
true,
|
||||
);
|
||||
this.pendingMdEdits.set(path, pending);
|
||||
}
|
||||
pending();
|
||||
}
|
||||
|
||||
// ---- queue draining ---------------------------------------------------
|
||||
|
||||
async flushQueue(): Promise<void> {
|
||||
if (this.deps.queue.size === 0) {
|
||||
await this.recoverStatusIfNeeded();
|
||||
return;
|
||||
}
|
||||
// Shared gate for every flush trigger so the first /sync can't race /connect.
|
||||
if (!this.deps.getSettings().connectorId) {
|
||||
const connected = await this.ensureConnected();
|
||||
if (!connected) return;
|
||||
if (!this.deps.getSettings().connectorId) return;
|
||||
}
|
||||
this.setStatus("syncing", `Syncing ${this.deps.queue.size} item(s)…`);
|
||||
const summary = await this.deps.queue.drain({
|
||||
processBatch: (batch) => this.processBatch(batch),
|
||||
});
|
||||
if (summary.acked > 0) {
|
||||
await this.deps.saveSettings((s) => {
|
||||
s.lastSyncAt = Date.now();
|
||||
s.filesSynced = (s.filesSynced ?? 0) + summary.acked;
|
||||
});
|
||||
}
|
||||
this.setStatus(this.queueStatusKind(), this.statusDetail());
|
||||
}
|
||||
|
||||
/**
|
||||
* Lightweight status recovery path used after network-change signals.
|
||||
* Clears stale offline/auth/error only when connectivity/auth is explicitly re-validated.
|
||||
*/
|
||||
async recoverConnectivityStatus(): Promise<void> {
|
||||
const settings = this.deps.getSettings();
|
||||
if (!settings.apiToken) {
|
||||
this.refreshStatus({ force: true });
|
||||
return;
|
||||
}
|
||||
if (!settings.searchSpaceId) {
|
||||
try {
|
||||
const health = await this.deps.apiClient.health();
|
||||
this.applyHealth(health);
|
||||
this.refreshStatus({ force: true });
|
||||
} catch (err) {
|
||||
this.handleStartupError(err);
|
||||
}
|
||||
return;
|
||||
}
|
||||
const connected = await this.ensureConnected();
|
||||
if (!connected) return;
|
||||
this.refreshStatus({ force: true });
|
||||
}
|
||||
|
||||
private async processBatch(batch: QueueItem[]): Promise<BatchResult> {
|
||||
const settings = this.deps.getSettings();
|
||||
const upserts = batch.filter((b): b is QueueItem & { op: "upsert" } => b.op === "upsert");
|
||||
const renames = batch.filter((b): b is QueueItem & { op: "rename" } => b.op === "rename");
|
||||
const deletes = batch.filter((b): b is QueueItem & { op: "delete" } => b.op === "delete");
|
||||
|
||||
const acked: QueueItem[] = [];
|
||||
const retry: QueueItem[] = [];
|
||||
const dropped: QueueItem[] = [];
|
||||
|
||||
// Renames first so paths line up before content upserts.
|
||||
if (renames.length > 0) {
|
||||
try {
|
||||
const resp = await this.deps.apiClient.renameBatch({
|
||||
vaultId: settings.vaultId,
|
||||
renames: renames.map((r) => ({ oldPath: r.oldPath, newPath: r.newPath })),
|
||||
});
|
||||
const failed = new Set(
|
||||
resp.failed.map((f) => `${f.oldPath}\u0000${f.newPath}`),
|
||||
);
|
||||
for (const r of renames) {
|
||||
if (failed.has(`${r.oldPath}\u0000${r.newPath}`)) retry.push(r);
|
||||
else acked.push(r);
|
||||
}
|
||||
} catch (err) {
|
||||
if (await this.handleVaultNotRegistered(err)) {
|
||||
retry.push(...renames);
|
||||
} else {
|
||||
const verdict = this.classify(err);
|
||||
if (verdict === "stop") return { acked, retry: [...retry, ...renames], dropped, stop: true };
|
||||
if (verdict === "retry") retry.push(...renames);
|
||||
else dropped.push(...renames);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (deletes.length > 0) {
|
||||
try {
|
||||
const resp = await this.deps.apiClient.deleteBatch({
|
||||
vaultId: settings.vaultId,
|
||||
paths: deletes.map((d) => d.path),
|
||||
});
|
||||
const failed = new Set(resp.failed);
|
||||
for (const d of deletes) {
|
||||
if (failed.has(d.path)) retry.push(d);
|
||||
else acked.push(d);
|
||||
}
|
||||
} catch (err) {
|
||||
if (await this.handleVaultNotRegistered(err)) {
|
||||
retry.push(...deletes);
|
||||
} else {
|
||||
const verdict = this.classify(err);
|
||||
if (verdict === "stop") return { acked, retry: [...retry, ...deletes], dropped, stop: true };
|
||||
if (verdict === "retry") retry.push(...deletes);
|
||||
else dropped.push(...deletes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (upserts.length > 0) {
|
||||
const payloads: NotePayload[] = [];
|
||||
for (const item of upserts) {
|
||||
const file = this.deps.app.vault.getFileByPath(item.path);
|
||||
if (!file) {
|
||||
// Vanished — ack now; the delete event will follow if needed.
|
||||
acked.push(item);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const payload = this.isMarkdown(file)
|
||||
? await buildNotePayload(this.deps.app, file, settings.vaultId)
|
||||
: await this.buildBinaryPayload(file, settings.vaultId);
|
||||
payloads.push(payload);
|
||||
} catch (err) {
|
||||
console.error("SurfSense: failed to build payload", item.path, err);
|
||||
retry.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
if (payloads.length > 0) {
|
||||
try {
|
||||
const resp = await this.deps.apiClient.syncBatch({
|
||||
vaultId: settings.vaultId,
|
||||
notes: payloads,
|
||||
});
|
||||
// Per-note failures retry; queue maxAttempts drops poison pills.
|
||||
const failed = new Set(resp.failed);
|
||||
for (const item of upserts) {
|
||||
if (retry.find((r) => r === item)) continue;
|
||||
if (failed.has(item.path)) retry.push(item);
|
||||
else acked.push(item);
|
||||
}
|
||||
} catch (err) {
|
||||
if (await this.handleVaultNotRegistered(err)) {
|
||||
for (const item of upserts) {
|
||||
if (retry.find((r) => r === item)) continue;
|
||||
retry.push(item);
|
||||
}
|
||||
} else {
|
||||
const verdict = this.classify(err);
|
||||
if (verdict === "stop")
|
||||
return { acked, retry: [...retry, ...upserts], dropped, stop: true };
|
||||
if (verdict === "retry") retry.push(...upserts);
|
||||
else dropped.push(...upserts);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { acked, retry, dropped, stop: false };
|
||||
}
|
||||
|
||||
private async buildBinaryPayload(file: TFile, vaultId: string): Promise<NotePayload> {
|
||||
// Attachments skip buildNotePayload (no markdown metadata) but still
|
||||
// need raw bytes + hash + stat so the backend can ETL-extract text
|
||||
// and manifest diff still works.
|
||||
const buf = await this.deps.app.vault.readBinary(file);
|
||||
const digest = await crypto.subtle.digest("SHA-256", buf);
|
||||
const hash = bufferToHex(digest);
|
||||
const binaryBase64 = arrayBufferToBase64(buf);
|
||||
return {
|
||||
vault_id: vaultId,
|
||||
path: file.path,
|
||||
name: file.basename,
|
||||
extension: file.extension,
|
||||
content: "",
|
||||
frontmatter: {},
|
||||
tags: [],
|
||||
headings: [],
|
||||
resolved_links: [],
|
||||
unresolved_links: [],
|
||||
embeds: [],
|
||||
aliases: [],
|
||||
content_hash: hash,
|
||||
size: file.stat.size,
|
||||
mtime: file.stat.mtime,
|
||||
ctime: file.stat.ctime,
|
||||
is_binary: true,
|
||||
binary_base64: binaryBase64,
|
||||
mime_type: mimeTypeFor(file.extension),
|
||||
};
|
||||
}
|
||||
|
||||
// ---- reconcile --------------------------------------------------------
|
||||
|
||||
async maybeReconcile(force = false): Promise<void> {
|
||||
const settings = this.deps.getSettings();
|
||||
if (!settings.connectorId) return;
|
||||
if (!force && settings.lastReconcileAt) {
|
||||
if (Date.now() - settings.lastReconcileAt < RECONCILE_MIN_INTERVAL_MS) return;
|
||||
}
|
||||
|
||||
// Re-handshake first: if the vault grew enough to match another
|
||||
// device's fingerprint, the server merges and routes us to the
|
||||
// survivor row, which the /manifest call below then uses.
|
||||
const connected = await this.ensureConnected();
|
||||
if (!connected) return;
|
||||
const refreshed = this.deps.getSettings();
|
||||
if (!refreshed.connectorId) return;
|
||||
|
||||
this.setStatus("syncing", "Reconciling vault with server…");
|
||||
try {
|
||||
const manifest = await this.deps.apiClient.getManifest(refreshed.vaultId);
|
||||
const remote = manifest.items ?? {};
|
||||
const enqueued = this.diffAndQueue(refreshed, remote);
|
||||
await this.deps.saveSettings((s) => {
|
||||
s.lastReconcileAt = Date.now();
|
||||
s.tombstones = pruneTombstones(s.tombstones);
|
||||
});
|
||||
this.updateIdleStreak(enqueued);
|
||||
await this.flushQueue();
|
||||
this.refreshStatus({ force: true });
|
||||
} catch (err) {
|
||||
this.classifyAndStatus(err, "Reconcile failed");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Diff local vault vs server manifest and enqueue work. Skips disk reads
|
||||
* on idle reconciles by short-circuiting on `mtime + size`; false positives
|
||||
* collapse to a no-op upsert via the server's `content_hash` check.
|
||||
* Returns the enqueued count to drive adaptive backoff.
|
||||
*/
|
||||
private diffAndQueue(
|
||||
settings: SyncEngineSettings,
|
||||
remote: Record<string, ManifestEntry>,
|
||||
): number {
|
||||
const localFiles = this.deps.app.vault.getFiles().filter((f) => {
|
||||
if (!this.shouldTrack(f)) return false;
|
||||
if (this.isExcluded(f.path, settings)) return false;
|
||||
return true;
|
||||
});
|
||||
const localPaths = new Set(localFiles.map((f) => f.path));
|
||||
let enqueued = 0;
|
||||
|
||||
for (const file of localFiles) {
|
||||
const remoteEntry = remote[file.path];
|
||||
if (!remoteEntry) {
|
||||
this.deps.queue.enqueueUpsert(file.path);
|
||||
enqueued++;
|
||||
continue;
|
||||
}
|
||||
const remoteMtimeMs = toMillis(remoteEntry.mtime);
|
||||
const mtimeMatches = file.stat.mtime <= remoteMtimeMs + 1000;
|
||||
// Older server rows lack `size` — treat as unknown and re-upsert.
|
||||
const sizeMatches =
|
||||
typeof remoteEntry.size === "number" && file.stat.size === remoteEntry.size;
|
||||
if (mtimeMatches && sizeMatches) continue;
|
||||
this.deps.queue.enqueueUpsert(file.path);
|
||||
enqueued++;
|
||||
}
|
||||
|
||||
// Remote-only → delete, unless a fresh tombstone is already in the queue.
|
||||
for (const path of Object.keys(remote)) {
|
||||
if (localPaths.has(path)) continue;
|
||||
const tombstone = settings.tombstones[path];
|
||||
if (tombstone && Date.now() - tombstone < TOMBSTONE_TTL_MS) continue;
|
||||
this.deps.queue.enqueueDelete(path);
|
||||
enqueued++;
|
||||
}
|
||||
|
||||
return enqueued;
|
||||
}
|
||||
|
||||
/** Bump (idle) or reset (active) the streak; notify only when the capped multiplier changes. */
|
||||
private updateIdleStreak(enqueued: number): void {
|
||||
const previousStreak = this.idleReconcileStreak;
|
||||
if (enqueued === 0) this.idleReconcileStreak++;
|
||||
else this.idleReconcileStreak = 0;
|
||||
const cap = Math.log2(this.maxBackoffMultiplier);
|
||||
const cappedPrev = Math.min(previousStreak, cap);
|
||||
const cappedNow = Math.min(this.idleReconcileStreak, cap);
|
||||
if (cappedPrev !== cappedNow) this.deps.onReconcileBackoffChanged?.();
|
||||
}
|
||||
|
||||
/** Vault edit — drop back to base interval immediately. */
|
||||
private resetIdleStreak(): void {
|
||||
if (this.idleReconcileStreak === 0) return;
|
||||
this.idleReconcileStreak = 0;
|
||||
this.deps.onReconcileBackoffChanged?.();
|
||||
}
|
||||
|
||||
// ---- status helpers ---------------------------------------------------
|
||||
|
||||
/**
|
||||
* Conservative by default: real errors are preserved while setup is
|
||||
* complete, so unrelated edits don't optimistically clear the indicator.
|
||||
* Pass `force: true` after an explicit verify/reconcile confirmation.
|
||||
*/
|
||||
refreshStatus(opts: { force?: boolean } = {}): void {
|
||||
if (!opts.force) {
|
||||
const last = this.lastAppliedKind;
|
||||
if (last === "syncing") return;
|
||||
const isError =
|
||||
last === "auth-error" || last === "offline" || last === "error";
|
||||
const s = this.deps.getSettings();
|
||||
const setupComplete = !!(s.apiToken && s.searchSpaceId && s.connectorId);
|
||||
if (isError && setupComplete) return;
|
||||
}
|
||||
this.setStatus(this.queueStatusKind(), this.statusDetail());
|
||||
}
|
||||
|
||||
reportAuthError(message?: string): void {
|
||||
this.setStatus("auth-error", message ?? "API token expired or invalid");
|
||||
}
|
||||
|
||||
reportError(err: unknown): void {
|
||||
if (err instanceof AuthError) {
|
||||
this.reportAuthError(err.message);
|
||||
return;
|
||||
}
|
||||
if (err instanceof TransientError) {
|
||||
this.setStatus("offline", err.message);
|
||||
return;
|
||||
}
|
||||
this.setStatus("error", (err as Error).message ?? "Unknown error");
|
||||
}
|
||||
|
||||
private setStatus(kind: StatusKind, detail?: string): void {
|
||||
const s = this.deps.getSettings();
|
||||
if (!s.apiToken) {
|
||||
kind = "needs-setup";
|
||||
detail = this.setupHint(s);
|
||||
} else if (kind !== "auth-error" && kind !== "offline" && kind !== "error") {
|
||||
if (!s.searchSpaceId || !s.connectorId) {
|
||||
kind = "needs-setup";
|
||||
detail = this.setupHint(s);
|
||||
}
|
||||
}
|
||||
this.lastAppliedKind = kind;
|
||||
this.deps.setStatus({ kind, detail, queueDepth: this.deps.queue.size });
|
||||
}
|
||||
|
||||
private setupHint(s: SyncEngineSettings): string {
|
||||
if (!s.apiToken) return "Paste your API token in settings.";
|
||||
if (!s.searchSpaceId) return "Pick a search space in settings.";
|
||||
return "Connecting…";
|
||||
}
|
||||
|
||||
private queueStatusKind(): StatusKind {
|
||||
if (this.deps.queue.size > 0) return "queued";
|
||||
return "idle";
|
||||
}
|
||||
|
||||
private statusDetail(): string | undefined {
|
||||
const settings = this.deps.getSettings();
|
||||
if (settings.lastSyncAt) {
|
||||
return `Last sync ${formatRelative(settings.lastSyncAt)}`;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
private handleStartupError(err: unknown): void {
|
||||
if (err instanceof AuthError) {
|
||||
this.setStatus("auth-error", err.message);
|
||||
return;
|
||||
}
|
||||
if (err instanceof TransientError) {
|
||||
this.setStatus("offline", err.message);
|
||||
return;
|
||||
}
|
||||
this.setStatus("error", (err as Error).message ?? "Unknown error");
|
||||
}
|
||||
|
||||
/** Re-connect on VAULT_NOT_REGISTERED so the next drain sees the new row. */
|
||||
private async handleVaultNotRegistered(err: unknown): Promise<boolean> {
|
||||
if (!(err instanceof VaultNotRegisteredError)) return false;
|
||||
console.warn("SurfSense: vault not registered, re-connecting before retry", err);
|
||||
await this.ensureConnected();
|
||||
return true;
|
||||
}
|
||||
|
||||
private classify(err: unknown): "ack" | "retry" | "drop" | "stop" {
|
||||
if (err instanceof AuthError) {
|
||||
this.setStatus("auth-error", err.message);
|
||||
return "stop";
|
||||
}
|
||||
if (err instanceof TransientError) {
|
||||
this.setStatus("offline", err.message);
|
||||
return "stop";
|
||||
}
|
||||
if (err instanceof PermanentError) {
|
||||
console.warn("SurfSense: permanent error, dropping batch", err);
|
||||
new Notice(`Surfsense: ${err.message}`);
|
||||
return "drop";
|
||||
}
|
||||
console.error("SurfSense: unknown error", err);
|
||||
return "retry";
|
||||
}
|
||||
|
||||
private classifyAndStatus(err: unknown, prefix: string): void {
|
||||
const verdict = this.classify(err);
|
||||
if (verdict === "stop") return;
|
||||
this.setStatus(this.queueStatusKind(), `${prefix}: ${(err as Error).message}`);
|
||||
}
|
||||
|
||||
private async recoverStatusIfNeeded(): Promise<void> {
|
||||
if (!this.isRecoverableErrorState()) return;
|
||||
await this.recoverConnectivityStatus();
|
||||
}
|
||||
|
||||
private isRecoverableErrorState(): boolean {
|
||||
return (
|
||||
this.lastAppliedKind === "offline" ||
|
||||
this.lastAppliedKind === "auth-error" ||
|
||||
this.lastAppliedKind === "error"
|
||||
);
|
||||
}
|
||||
|
||||
// ---- predicates -------------------------------------------------------
|
||||
|
||||
private shouldTrack(file: TAbstractFile): boolean {
|
||||
if (!isTFile(file)) return false;
|
||||
if (this.isMarkdown(file)) return true;
|
||||
const settings = this.deps.getSettings();
|
||||
if (!settings.includeAttachments) return false;
|
||||
return ALLOWED_ATTACHMENT_EXTENSIONS.has(file.extension.toLowerCase());
|
||||
}
|
||||
|
||||
private isExcluded(path: string, settings: SyncEngineSettings): boolean {
|
||||
if (isFolderFiltered(path, settings.includeFolders, settings.excludeFolders)) {
|
||||
return true;
|
||||
}
|
||||
return isExcluded(path, settings.excludePatterns);
|
||||
}
|
||||
|
||||
private isMarkdown(file: TAbstractFile): boolean {
|
||||
return isTFile(file) && file.extension.toLowerCase() === "md";
|
||||
}
|
||||
}
|
||||
|
||||
function isTFile(f: TAbstractFile): f is TFile {
|
||||
return f instanceof TFile;
|
||||
}
|
||||
|
||||
function bufferToHex(buf: ArrayBuffer): string {
|
||||
const view = new Uint8Array(buf);
|
||||
let hex = "";
|
||||
for (let i = 0; i < view.length; i++) hex += (view[i] ?? 0).toString(16).padStart(2, "0");
|
||||
return hex;
|
||||
}
|
||||
|
||||
function arrayBufferToBase64(buf: ArrayBuffer): string {
|
||||
const bytes = new Uint8Array(buf);
|
||||
const chunkSize = 0x8000;
|
||||
let binary = "";
|
||||
for (let i = 0; i < bytes.length; i += chunkSize) {
|
||||
const chunk = bytes.subarray(i, i + chunkSize);
|
||||
binary += String.fromCharCode(...Array.from(chunk));
|
||||
}
|
||||
return btoa(binary);
|
||||
}
|
||||
|
||||
/** Source of truth for the attachment whitelist. Mirrors ATTACHMENT_MIME_TYPES on the backend. */
|
||||
export const MIME_BY_EXTENSION = {
|
||||
pdf: "application/pdf",
|
||||
png: "image/png",
|
||||
jpg: "image/jpeg",
|
||||
jpeg: "image/jpeg",
|
||||
gif: "image/gif",
|
||||
webp: "image/webp",
|
||||
svg: "image/svg+xml",
|
||||
txt: "text/plain",
|
||||
} as const satisfies Record<string, string>;
|
||||
|
||||
export const ALLOWED_ATTACHMENT_EXTENSIONS: ReadonlySet<string> = new Set(
|
||||
Object.keys(MIME_BY_EXTENSION),
|
||||
);
|
||||
|
||||
function mimeTypeFor(extension: string): string {
|
||||
const ext = extension.toLowerCase() as keyof typeof MIME_BY_EXTENSION;
|
||||
const mime = MIME_BY_EXTENSION[ext];
|
||||
if (!mime) {
|
||||
throw new Error(`Unsupported attachment extension: .${extension}`);
|
||||
}
|
||||
return mime;
|
||||
}
|
||||
|
||||
function formatRelative(ts: number): string {
|
||||
const diff = Date.now() - ts;
|
||||
if (diff < 60_000) return "just now";
|
||||
if (diff < 3600_000) return `${Math.round(diff / 60_000)}m ago`;
|
||||
if (diff < 86_400_000) return `${Math.round(diff / 3600_000)}h ago`;
|
||||
return `${Math.round(diff / 86_400_000)}d ago`;
|
||||
}
|
||||
|
||||
/** Manifest mtimes arrive as ISO strings, vault stats as epoch ms — normalise. */
|
||||
function toMillis(value: number | string | Date): number {
|
||||
if (typeof value === "number") return value;
|
||||
if (value instanceof Date) return value.getTime();
|
||||
const parsed = Date.parse(value);
|
||||
return Number.isFinite(parsed) ? parsed : 0;
|
||||
}
|
||||
|
||||
function pruneTombstones(tombstones: Record<string, number>): Record<string, number> {
|
||||
const out: Record<string, number> = {};
|
||||
const cutoff = Date.now() - TOMBSTONE_TTL_MS;
|
||||
for (const [k, v] of Object.entries(tombstones)) {
|
||||
if (v >= cutoff) out[k] = v;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
202
surfsense_obsidian/src/types.ts
Normal file
202
surfsense_obsidian/src/types.ts
Normal file
|
|
@ -0,0 +1,202 @@
|
|||
/** Shared types for the SurfSense Obsidian plugin. Leaf module — no src/ imports. */
|
||||
|
||||
export interface SurfsensePluginSettings {
|
||||
serverUrl: string;
|
||||
apiToken: string;
|
||||
searchSpaceId: number | null;
|
||||
connectorId: number | null;
|
||||
/** UUID for the vault — lives here so Obsidian Sync replicates it across devices. */
|
||||
vaultId: string;
|
||||
/** 0 disables periodic reconcile (Force sync still works). */
|
||||
syncIntervalMinutes: number;
|
||||
/** Mobile-only: pause auto-sync when on cellular. iOS can't detect network type, so the toggle is a no-op there. */
|
||||
wifiOnly: boolean;
|
||||
includeFolders: string[];
|
||||
excludeFolders: string[];
|
||||
excludePatterns: string[];
|
||||
includeAttachments: boolean;
|
||||
lastSyncAt: number | null;
|
||||
lastReconcileAt: number | null;
|
||||
filesSynced: number;
|
||||
queue: QueueItem[];
|
||||
tombstones: Record<string, number>;
|
||||
}
|
||||
|
||||
export const DEFAULT_SETTINGS: SurfsensePluginSettings = {
|
||||
serverUrl: "https://surfsense.com",
|
||||
apiToken: "",
|
||||
searchSpaceId: null,
|
||||
connectorId: null,
|
||||
vaultId: "",
|
||||
syncIntervalMinutes: 10,
|
||||
wifiOnly: false,
|
||||
includeFolders: [],
|
||||
excludeFolders: [],
|
||||
excludePatterns: [".trash", "_attachments", "templates"],
|
||||
includeAttachments: false,
|
||||
lastSyncAt: null,
|
||||
lastReconcileAt: null,
|
||||
filesSynced: 0,
|
||||
queue: [],
|
||||
tombstones: {},
|
||||
};
|
||||
|
||||
export type QueueOp = "upsert" | "delete" | "rename";
|
||||
|
||||
export interface UpsertItem {
|
||||
op: "upsert";
|
||||
path: string;
|
||||
enqueuedAt: number;
|
||||
attempt: number;
|
||||
}
|
||||
|
||||
export interface DeleteItem {
|
||||
op: "delete";
|
||||
path: string;
|
||||
enqueuedAt: number;
|
||||
attempt: number;
|
||||
}
|
||||
|
||||
export interface RenameItem {
|
||||
op: "rename";
|
||||
oldPath: string;
|
||||
newPath: string;
|
||||
enqueuedAt: number;
|
||||
attempt: number;
|
||||
}
|
||||
|
||||
export type QueueItem = UpsertItem | DeleteItem | RenameItem;
|
||||
|
||||
interface NotePayloadBase {
|
||||
vault_id: string;
|
||||
path: string;
|
||||
name: string;
|
||||
extension: string;
|
||||
content: string;
|
||||
frontmatter: Record<string, unknown>;
|
||||
tags: string[];
|
||||
headings: HeadingRef[];
|
||||
resolved_links: string[];
|
||||
unresolved_links: string[];
|
||||
embeds: string[];
|
||||
aliases: string[];
|
||||
content_hash: string;
|
||||
/** Byte size of the local file; pairs with mtime for the reconcile short-circuit. */
|
||||
size: number;
|
||||
mtime: number;
|
||||
ctime: number;
|
||||
}
|
||||
|
||||
export interface MarkdownNotePayload extends NotePayloadBase {
|
||||
is_binary?: false;
|
||||
}
|
||||
|
||||
export interface BinaryNotePayload extends NotePayloadBase {
|
||||
/** Non-markdown attachment marker; enables backend ETL path. */
|
||||
is_binary: true;
|
||||
/** Base64-encoded file bytes for binary attachments. */
|
||||
binary_base64: string;
|
||||
/** Canonical MIME type for the extension; required by the backend. */
|
||||
mime_type: string;
|
||||
}
|
||||
|
||||
export type NotePayload = MarkdownNotePayload | BinaryNotePayload;
|
||||
|
||||
export interface HeadingRef {
|
||||
heading: string;
|
||||
level: number;
|
||||
}
|
||||
|
||||
export interface SearchSpace {
|
||||
id: number;
|
||||
name: string;
|
||||
description?: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface ConnectResponse {
|
||||
connector_id: number;
|
||||
vault_id: string;
|
||||
search_space_id: number;
|
||||
capabilities: string[];
|
||||
server_time_utc: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface HealthResponse {
|
||||
capabilities: string[];
|
||||
server_time_utc: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface ManifestEntry {
|
||||
hash: string;
|
||||
mtime: number;
|
||||
/** Optional: byte size of stored content. Enables mtime+size short-circuit; falls back to upsert when missing. */
|
||||
size?: number;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface ManifestResponse {
|
||||
vault_id: string;
|
||||
items: Record<string, ManifestEntry>;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
/** Per-item ack shapes — mirror `app/schemas/obsidian_plugin.py` 1:1. */
|
||||
export interface SyncAckItem {
|
||||
path: string;
|
||||
status: "ok" | "queued" | "error";
|
||||
document_id?: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export interface SyncAck {
|
||||
vault_id: string;
|
||||
indexed: number;
|
||||
failed: number;
|
||||
items: SyncAckItem[];
|
||||
}
|
||||
|
||||
export interface RenameAckItem {
|
||||
old_path: string;
|
||||
new_path: string;
|
||||
status: "ok" | "error" | "missing";
|
||||
document_id?: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export interface RenameAck {
|
||||
vault_id: string;
|
||||
renamed: number;
|
||||
missing: number;
|
||||
items: RenameAckItem[];
|
||||
}
|
||||
|
||||
export interface DeleteAckItem {
|
||||
path: string;
|
||||
status: "ok" | "error" | "missing";
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export interface DeleteAck {
|
||||
vault_id: string;
|
||||
deleted: number;
|
||||
missing: number;
|
||||
items: DeleteAckItem[];
|
||||
}
|
||||
|
||||
export type StatusKind =
|
||||
| "idle"
|
||||
| "syncing"
|
||||
| "queued"
|
||||
| "needs-setup"
|
||||
| "offline"
|
||||
| "auth-error"
|
||||
| "error";
|
||||
|
||||
export interface StatusState {
|
||||
kind: StatusKind;
|
||||
detail?: string;
|
||||
queueDepth: number;
|
||||
}
|
||||
43
surfsense_obsidian/src/vault-identity.ts
Normal file
43
surfsense_obsidian/src/vault-identity.ts
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
import type { App } from "obsidian";
|
||||
|
||||
/**
|
||||
* Deterministic SHA-256 over the vault name + sorted markdown paths.
|
||||
*
|
||||
* Two devices observing the same vault content compute the same value,
|
||||
* regardless of how it was synced (iCloud, Syncthing, Obsidian Sync, …).
|
||||
* The server uses this as the cross-device dedup key on /connect.
|
||||
*/
|
||||
export async function computeVaultFingerprint(app: App): Promise<string> {
|
||||
const vaultName = app.vault.getName();
|
||||
const paths = app.vault
|
||||
.getMarkdownFiles()
|
||||
.map((f) => f.path)
|
||||
.sort();
|
||||
const payload = `${vaultName}\n${paths.join("\n")}`;
|
||||
const bytes = new TextEncoder().encode(payload);
|
||||
const digest = await crypto.subtle.digest("SHA-256", bytes);
|
||||
return bufferToHex(digest);
|
||||
}
|
||||
|
||||
function bufferToHex(buf: ArrayBuffer): string {
|
||||
const view = new Uint8Array(buf);
|
||||
let hex = "";
|
||||
for (let i = 0; i < view.length; i++) {
|
||||
hex += (view[i] ?? 0).toString(16).padStart(2, "0");
|
||||
}
|
||||
return hex;
|
||||
}
|
||||
|
||||
export function generateVaultUuid(): string {
|
||||
const c = globalThis.crypto;
|
||||
if (c?.randomUUID) return c.randomUUID();
|
||||
const buf = new Uint8Array(16);
|
||||
c.getRandomValues(buf);
|
||||
buf[6] = ((buf[6] ?? 0) & 0x0f) | 0x40;
|
||||
buf[8] = ((buf[8] ?? 0) & 0x3f) | 0x80;
|
||||
const hex = Array.from(buf, (b) => b.toString(16).padStart(2, "0")).join("");
|
||||
return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(
|
||||
16,
|
||||
20,
|
||||
)}-${hex.slice(20)}`;
|
||||
}
|
||||
48
surfsense_obsidian/styles.css
Normal file
48
surfsense_obsidian/styles.css
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
/*
|
||||
* SurfSense Obsidian plugin styles. Status-bar widget only — the settings
|
||||
* tab uses Obsidian's stock Setting rows, no custom CSS needed.
|
||||
*/
|
||||
|
||||
.surfsense-status {
|
||||
gap: 6px;
|
||||
}
|
||||
|
||||
.surfsense-status--clickable {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.surfsense-status__icon {
|
||||
display: inline-flex;
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
}
|
||||
|
||||
.surfsense-status__icon svg {
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
}
|
||||
|
||||
.surfsense-status--err .surfsense-status__icon {
|
||||
color: var(--color-red);
|
||||
}
|
||||
|
||||
.surfsense-connection-indicator {
|
||||
display: inline-flex;
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
}
|
||||
|
||||
.surfsense-connection-heading {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.surfsense-connection-indicator svg {
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
}
|
||||
|
||||
.surfsense-connection-indicator--err {
|
||||
color: var(--color-red);
|
||||
}
|
||||
30
surfsense_obsidian/tsconfig.json
Normal file
30
surfsense_obsidian/tsconfig.json
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"baseUrl": "src",
|
||||
"inlineSourceMap": true,
|
||||
"inlineSources": true,
|
||||
"module": "ESNext",
|
||||
"target": "ES6",
|
||||
"allowJs": true,
|
||||
"noImplicitAny": true,
|
||||
"noImplicitThis": true,
|
||||
"noImplicitReturns": true,
|
||||
"moduleResolution": "node",
|
||||
"importHelpers": true,
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"isolatedModules": true,
|
||||
"strictNullChecks": true,
|
||||
"strictBindCallApply": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"useUnknownInCatchVariables": true,
|
||||
"lib": [
|
||||
"DOM",
|
||||
"ES5",
|
||||
"ES6",
|
||||
"ES7"
|
||||
]
|
||||
},
|
||||
"include": [
|
||||
"src/**/*.ts"
|
||||
]
|
||||
}
|
||||
17
surfsense_obsidian/version-bump.mjs
Normal file
17
surfsense_obsidian/version-bump.mjs
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
import { readFileSync, writeFileSync } from "fs";
|
||||
|
||||
const targetVersion = process.env.npm_package_version;
|
||||
|
||||
// read minAppVersion from manifest.json and bump version to target version
|
||||
const manifest = JSON.parse(readFileSync("manifest.json", "utf8"));
|
||||
const { minAppVersion } = manifest;
|
||||
manifest.version = targetVersion;
|
||||
writeFileSync("manifest.json", JSON.stringify(manifest, null, "\t"));
|
||||
|
||||
// update versions.json with target version and minAppVersion from manifest.json
|
||||
// but only if the target version is not already in versions.json
|
||||
const versions = JSON.parse(readFileSync('versions.json', 'utf8'));
|
||||
if (!Object.values(versions).includes(minAppVersion)) {
|
||||
versions[targetVersion] = minAppVersion;
|
||||
writeFileSync('versions.json', JSON.stringify(versions, null, '\t'));
|
||||
}
|
||||
3
surfsense_obsidian/versions.json
Normal file
3
surfsense_obsidian/versions.json
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"0.1.0": "1.5.4"
|
||||
}
|
||||
|
|
@ -1,4 +1,8 @@
|
|||
NEXT_PUBLIC_FASTAPI_BACKEND_URL=http://localhost:8000
|
||||
|
||||
# Server-only. Internal backend URL used by Next.js server code.
|
||||
FASTAPI_BACKEND_INTERNAL_URL=https://your-internal-backend.example.com
|
||||
|
||||
NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE=LOCAL or GOOGLE
|
||||
NEXT_PUBLIC_ETL_SERVICE=UNSTRUCTURED or LLAMACLOUD or DOCLING
|
||||
NEXT_PUBLIC_ZERO_CACHE_URL=http://localhost:4848
|
||||
|
|
|
|||
70
surfsense_web/app/api/v1/[...path]/route.ts
Normal file
70
surfsense_web/app/api/v1/[...path]/route.ts
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
import type { NextRequest } from "next/server";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
const HOP_BY_HOP_HEADERS = new Set([
|
||||
"connection",
|
||||
"keep-alive",
|
||||
"proxy-authenticate",
|
||||
"proxy-authorization",
|
||||
"te",
|
||||
"trailer",
|
||||
"transfer-encoding",
|
||||
"upgrade",
|
||||
]);
|
||||
|
||||
function getBackendBaseUrl() {
|
||||
const base = process.env.FASTAPI_BACKEND_INTERNAL_URL || "http://localhost:8000";
|
||||
return base.endsWith("/") ? base.slice(0, -1) : base;
|
||||
}
|
||||
|
||||
function toUpstreamHeaders(headers: Headers) {
|
||||
const nextHeaders = new Headers(headers);
|
||||
nextHeaders.delete("host");
|
||||
nextHeaders.delete("content-length");
|
||||
return nextHeaders;
|
||||
}
|
||||
|
||||
function toClientHeaders(headers: Headers) {
|
||||
const nextHeaders = new Headers(headers);
|
||||
for (const header of HOP_BY_HOP_HEADERS) {
|
||||
nextHeaders.delete(header);
|
||||
}
|
||||
return nextHeaders;
|
||||
}
|
||||
|
||||
async function proxy(request: NextRequest, context: { params: Promise<{ path?: string[] }> }) {
|
||||
const params = await context.params;
|
||||
const path = params.path?.join("/") || "";
|
||||
const upstreamUrl = new URL(`${getBackendBaseUrl()}/api/v1/${path}`);
|
||||
upstreamUrl.search = request.nextUrl.search;
|
||||
|
||||
const hasBody = request.method !== "GET" && request.method !== "HEAD";
|
||||
|
||||
const response = await fetch(upstreamUrl, {
|
||||
method: request.method,
|
||||
headers: toUpstreamHeaders(request.headers),
|
||||
body: hasBody ? request.body : undefined,
|
||||
// `duplex: "half"` is required by the Fetch spec when streaming a
|
||||
// ReadableStream as the request body. Avoids buffering uploads in heap.
|
||||
// @ts-expect-error - `duplex` is not yet in lib.dom RequestInit types.
|
||||
duplex: hasBody ? "half" : undefined,
|
||||
redirect: "manual",
|
||||
});
|
||||
|
||||
return new Response(response.body, {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
headers: toClientHeaders(response.headers),
|
||||
});
|
||||
}
|
||||
|
||||
export {
|
||||
proxy as GET,
|
||||
proxy as POST,
|
||||
proxy as PUT,
|
||||
proxy as PATCH,
|
||||
proxy as DELETE,
|
||||
proxy as OPTIONS,
|
||||
proxy as HEAD,
|
||||
};
|
||||
|
|
@ -3,7 +3,7 @@
|
|||
import { Check, Copy, Info } from "lucide-react";
|
||||
import { useTranslations } from "next-intl";
|
||||
import { useCallback, useRef, useState } from "react";
|
||||
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
|
||||
import { Alert, AlertDescription } from "@/components/ui/alert";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip";
|
||||
import { useApiKey } from "@/hooks/use-api-key";
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ import {
|
|||
DownloadIcon,
|
||||
ExternalLink,
|
||||
Globe,
|
||||
MessageSquare,
|
||||
MessageCircleReply,
|
||||
MoreHorizontalIcon,
|
||||
RefreshCwIcon,
|
||||
} from "lucide-react";
|
||||
|
|
@ -657,7 +657,7 @@ export const AssistantMessage: FC = () => {
|
|||
: "text-muted-foreground hover:text-foreground hover:bg-muted"
|
||||
)}
|
||||
>
|
||||
<MessageSquare className={cn("size-3.5", hasComments && "fill-current")} />
|
||||
<MessageCircleReply className={cn("size-3.5", hasComments && "fill-current")} />
|
||||
{hasComments ? (
|
||||
<span>
|
||||
{commentCount} {commentCount === 1 ? "comment" : "comments"}
|
||||
|
|
|
|||
|
|
@ -1,311 +1,187 @@
|
|||
"use client";
|
||||
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { Info } from "lucide-react";
|
||||
import type { FC } from "react";
|
||||
import { useRef, useState } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import * as z from "zod";
|
||||
import { Check, Copy, Info } from "lucide-react";
|
||||
import { type FC, useCallback, useRef, useState } from "react";
|
||||
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
|
||||
import {
|
||||
Form,
|
||||
FormControl,
|
||||
FormDescription,
|
||||
FormField,
|
||||
FormItem,
|
||||
FormLabel,
|
||||
FormMessage,
|
||||
} from "@/components/ui/form";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
import { Switch } from "@/components/ui/switch";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { EnumConnectorName } from "@/contracts/enums/connector";
|
||||
import { useApiKey } from "@/hooks/use-api-key";
|
||||
import { copyToClipboard as copyToClipboardUtil } from "@/lib/utils";
|
||||
import { getConnectorBenefits } from "../connector-benefits";
|
||||
import type { ConnectFormProps } from "../index";
|
||||
|
||||
const obsidianConnectorFormSchema = z.object({
|
||||
name: z.string().min(3, {
|
||||
message: "Connector name must be at least 3 characters.",
|
||||
}),
|
||||
vault_path: z.string().min(1, {
|
||||
message: "Vault path is required.",
|
||||
}),
|
||||
vault_name: z.string().min(1, {
|
||||
message: "Vault name is required.",
|
||||
}),
|
||||
exclude_folders: z.string().optional(),
|
||||
include_attachments: z.boolean(),
|
||||
});
|
||||
const PLUGIN_RELEASES_URL =
|
||||
"https://github.com/MODSetter/SurfSense/releases?q=obsidian&expanded=true";
|
||||
|
||||
type ObsidianConnectorFormValues = z.infer<typeof obsidianConnectorFormSchema>;
|
||||
const BACKEND_URL = process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL ?? "https://surfsense.com";
|
||||
|
||||
export const ObsidianConnectForm: FC<ConnectFormProps> = ({ onSubmit, isSubmitting }) => {
|
||||
const isSubmittingRef = useRef(false);
|
||||
const [periodicEnabled, setPeriodicEnabled] = useState(true);
|
||||
const [frequencyMinutes, setFrequencyMinutes] = useState("60");
|
||||
const form = useForm<ObsidianConnectorFormValues>({
|
||||
resolver: zodResolver(obsidianConnectorFormSchema),
|
||||
defaultValues: {
|
||||
name: "Obsidian Vault",
|
||||
vault_path: "",
|
||||
vault_name: "",
|
||||
exclude_folders: ".obsidian,.trash",
|
||||
include_attachments: false,
|
||||
},
|
||||
});
|
||||
/**
|
||||
* Obsidian connect form for the plugin-only architecture.
|
||||
*
|
||||
* The legacy `vault_path` form was removed because it only worked on
|
||||
* self-hosted with a server-side bind mount and broke for everyone else.
|
||||
* The plugin pushes data over HTTPS so this UI is purely instructional —
|
||||
* there is no backend create call here. The connector row is created
|
||||
* server-side the first time the plugin calls `POST /obsidian/connect`.
|
||||
*
|
||||
* The footer "Connect" button in `ConnectorConnectView` triggers this
|
||||
* form's submit; we just close the dialog (`onBack()`) since there's
|
||||
* nothing to validate or persist from this side.
|
||||
*/
|
||||
export const ObsidianConnectForm: FC<ConnectFormProps> = ({ onBack }) => {
|
||||
const { apiKey, isLoading, copied, copyToClipboard } = useApiKey();
|
||||
const [copiedUrl, setCopiedUrl] = useState(false);
|
||||
const urlCopyTimerRef = useRef<ReturnType<typeof setTimeout> | undefined>(undefined);
|
||||
|
||||
const handleSubmit = async (values: ObsidianConnectorFormValues) => {
|
||||
// Prevent multiple submissions
|
||||
if (isSubmittingRef.current || isSubmitting) {
|
||||
return;
|
||||
}
|
||||
const copyServerUrl = useCallback(async () => {
|
||||
const ok = await copyToClipboardUtil(BACKEND_URL);
|
||||
if (!ok) return;
|
||||
setCopiedUrl(true);
|
||||
if (urlCopyTimerRef.current) clearTimeout(urlCopyTimerRef.current);
|
||||
urlCopyTimerRef.current = setTimeout(() => setCopiedUrl(false), 2000);
|
||||
}, []);
|
||||
|
||||
isSubmittingRef.current = true;
|
||||
try {
|
||||
// Parse exclude_folders into an array
|
||||
const excludeFolders = values.exclude_folders
|
||||
? values.exclude_folders
|
||||
.split(",")
|
||||
.map((f) => f.trim())
|
||||
.filter(Boolean)
|
||||
: [".obsidian", ".trash"];
|
||||
|
||||
await onSubmit({
|
||||
name: values.name,
|
||||
connector_type: EnumConnectorName.OBSIDIAN_CONNECTOR,
|
||||
config: {
|
||||
vault_path: values.vault_path,
|
||||
vault_name: values.vault_name,
|
||||
exclude_folders: excludeFolders,
|
||||
include_attachments: values.include_attachments,
|
||||
},
|
||||
is_indexable: true,
|
||||
is_active: true,
|
||||
last_indexed_at: null,
|
||||
periodic_indexing_enabled: periodicEnabled,
|
||||
indexing_frequency_minutes: periodicEnabled ? Number.parseInt(frequencyMinutes, 10) : null,
|
||||
next_scheduled_at: null,
|
||||
periodicEnabled,
|
||||
frequencyMinutes,
|
||||
});
|
||||
} finally {
|
||||
isSubmittingRef.current = false;
|
||||
}
|
||||
const handleSubmit = (event: React.FormEvent<HTMLFormElement>) => {
|
||||
event.preventDefault();
|
||||
onBack();
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="space-y-6 pb-6">
|
||||
<Alert className="bg-purple-500/10 dark:bg-purple-500/10 border-purple-500/30 p-2 sm:p-3">
|
||||
{/* Form is intentionally empty so the footer Connect button is a no-op
|
||||
that just closes the dialog (see component-level docstring). */}
|
||||
<form id="obsidian-connect-form" onSubmit={handleSubmit} />
|
||||
|
||||
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 p-2 sm:p-3">
|
||||
<Info className="size-4 shrink-0 text-purple-500" />
|
||||
<AlertTitle className="text-xs sm:text-sm">Self-Hosted Only</AlertTitle>
|
||||
<AlertTitle className="text-xs sm:text-sm">Plugin-based sync</AlertTitle>
|
||||
<AlertDescription className="text-[10px] sm:text-xs">
|
||||
This connector requires direct file system access and only works with self-hosted
|
||||
SurfSense installations.
|
||||
SurfSense now syncs Obsidian via an official plugin that runs inside Obsidian itself.
|
||||
Works on desktop and mobile, in cloud and self-hosted deployments.
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
|
||||
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
|
||||
<Form {...form}>
|
||||
<form
|
||||
id="obsidian-connect-form"
|
||||
onSubmit={form.handleSubmit(handleSubmit)}
|
||||
className="space-y-4 sm:space-y-6"
|
||||
>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="name"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel className="text-xs sm:text-sm">Connector Name</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder="My Obsidian Vault"
|
||||
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
|
||||
disabled={isSubmitting}
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormDescription className="text-[10px] sm:text-xs">
|
||||
A friendly name to identify this connector.
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="vault_path"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel className="text-xs sm:text-sm">Vault Path</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder="/path/to/your/obsidian/vault"
|
||||
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40 font-mono"
|
||||
disabled={isSubmitting}
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormDescription className="text-[10px] sm:text-xs">
|
||||
The absolute path to your Obsidian vault on the server. This must be accessible
|
||||
from the SurfSense backend.
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="vault_name"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel className="text-xs sm:text-sm">Vault Name</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder="My Knowledge Base"
|
||||
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
|
||||
disabled={isSubmitting}
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormDescription className="text-[10px] sm:text-xs">
|
||||
A display name for your vault. This will be used in search results.
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="exclude_folders"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel className="text-xs sm:text-sm">Exclude Folders</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder=".obsidian,.trash,templates"
|
||||
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40 font-mono"
|
||||
disabled={isSubmitting}
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormDescription className="text-[10px] sm:text-xs">
|
||||
Comma-separated list of folder names to exclude from indexing.
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="include_attachments"
|
||||
render={({ field }) => (
|
||||
<FormItem className="flex flex-row items-center justify-between rounded-lg border border-slate-400/20 p-3">
|
||||
<div className="space-y-0.5">
|
||||
<FormLabel className="text-xs sm:text-sm">Include Attachments</FormLabel>
|
||||
<FormDescription className="text-[10px] sm:text-xs">
|
||||
Index attachment folders and embedded files (images, PDFs, etc.)
|
||||
</FormDescription>
|
||||
</div>
|
||||
<FormControl>
|
||||
<Switch
|
||||
checked={field.value}
|
||||
onCheckedChange={field.onChange}
|
||||
disabled={isSubmitting}
|
||||
/>
|
||||
</FormControl>
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
{/* Indexing Configuration */}
|
||||
<div className="space-y-4 pt-4 border-t border-slate-400/20">
|
||||
<h3 className="text-sm sm:text-base font-medium">Indexing Configuration</h3>
|
||||
|
||||
{/* Periodic Sync Config */}
|
||||
<div className="rounded-xl bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6">
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="space-y-1">
|
||||
<h3 className="font-medium text-sm sm:text-base">Enable Periodic Sync</h3>
|
||||
<p className="text-xs sm:text-sm text-muted-foreground">
|
||||
Automatically re-index at regular intervals
|
||||
</p>
|
||||
</div>
|
||||
<Switch
|
||||
checked={periodicEnabled}
|
||||
onCheckedChange={setPeriodicEnabled}
|
||||
disabled={isSubmitting}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{periodicEnabled && (
|
||||
<div className="mt-4 pt-4 border-t border-slate-400/20 space-y-3">
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="frequency" className="text-xs sm:text-sm">
|
||||
Sync Frequency
|
||||
</Label>
|
||||
<Select
|
||||
value={frequencyMinutes}
|
||||
onValueChange={setFrequencyMinutes}
|
||||
disabled={isSubmitting}
|
||||
>
|
||||
<SelectTrigger
|
||||
id="frequency"
|
||||
className="w-full bg-slate-400/5 dark:bg-slate-400/5 border-slate-400/20 text-xs sm:text-sm"
|
||||
>
|
||||
<SelectValue placeholder="Select frequency" />
|
||||
</SelectTrigger>
|
||||
<SelectContent className="z-100">
|
||||
<SelectItem value="5" className="text-xs sm:text-sm">
|
||||
Every 5 minutes
|
||||
</SelectItem>
|
||||
<SelectItem value="15" className="text-xs sm:text-sm">
|
||||
Every 15 minutes
|
||||
</SelectItem>
|
||||
<SelectItem value="60" className="text-xs sm:text-sm">
|
||||
Every hour
|
||||
</SelectItem>
|
||||
<SelectItem value="360" className="text-xs sm:text-sm">
|
||||
Every 6 hours
|
||||
</SelectItem>
|
||||
<SelectItem value="720" className="text-xs sm:text-sm">
|
||||
Every 12 hours
|
||||
</SelectItem>
|
||||
<SelectItem value="1440" className="text-xs sm:text-sm">
|
||||
Daily
|
||||
</SelectItem>
|
||||
<SelectItem value="10080" className="text-xs sm:text-sm">
|
||||
Weekly
|
||||
</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
<section className="rounded-xl border border-border bg-slate-400/5 p-3 sm:p-6 dark:bg-white/5">
|
||||
<div className="space-y-5 sm:space-y-6">
|
||||
{/* Step 1 — Install plugin */}
|
||||
<article>
|
||||
<header className="mb-3 flex items-center gap-2">
|
||||
<div className="flex size-7 items-center justify-center rounded-md border border-slate-400/30 text-xs font-medium">
|
||||
1
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</Form>
|
||||
</div>
|
||||
<h3 className="text-sm font-medium sm:text-base">Install the plugin</h3>
|
||||
</header>
|
||||
<p className="mb-3 text-[11px] text-muted-foreground sm:text-xs">
|
||||
Grab the latest SurfSense plugin release. Once it's in the community store, you'll
|
||||
also be able to install it from{" "}
|
||||
<span className="font-medium">Settings → Community plugins</span> inside Obsidian.
|
||||
</p>
|
||||
<a
|
||||
href={PLUGIN_RELEASES_URL}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="inline-flex"
|
||||
>
|
||||
<Button
|
||||
type="button"
|
||||
variant="secondary"
|
||||
size="sm"
|
||||
className="gap-2 text-xs sm:text-sm"
|
||||
>
|
||||
Open plugin releases
|
||||
</Button>
|
||||
</a>
|
||||
</article>
|
||||
|
||||
<div className="h-px bg-border/60" />
|
||||
|
||||
{/* Step 2 — Copy API key */}
|
||||
<article>
|
||||
<header className="mb-3 flex items-center gap-2">
|
||||
<div className="flex size-7 items-center justify-center rounded-md border border-slate-400/30 text-xs font-medium">
|
||||
2
|
||||
</div>
|
||||
<h3 className="text-sm font-medium sm:text-base">Copy your API key</h3>
|
||||
</header>
|
||||
<p className="mb-3 text-[11px] text-muted-foreground sm:text-xs">
|
||||
Paste this into the plugin's <span className="font-medium">API token</span> setting.
|
||||
The token expires after 24 hours. Long-lived personal access tokens are coming in a
|
||||
future release.
|
||||
</p>
|
||||
|
||||
{isLoading ? (
|
||||
<div className="h-10 w-full animate-pulse rounded-md border border-border/60 bg-muted/30" />
|
||||
) : apiKey ? (
|
||||
<div className="flex items-center gap-2 rounded-md border border-border/60 bg-muted/30 px-2.5 py-1.5">
|
||||
<div className="min-w-0 flex-1 overflow-x-auto scrollbar-hide">
|
||||
<p className="cursor-text select-all whitespace-nowrap font-mono text-[10px] text-muted-foreground">
|
||||
{apiKey}
|
||||
</p>
|
||||
</div>
|
||||
<Button
|
||||
type="button"
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
onClick={copyToClipboard}
|
||||
className="size-7 shrink-0 text-muted-foreground hover:text-foreground"
|
||||
aria-label={copied ? "Copied" : "Copy API key"}
|
||||
>
|
||||
{copied ? (
|
||||
<Check className="size-3.5 text-green-500" />
|
||||
) : (
|
||||
<Copy className="size-3.5" />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
) : (
|
||||
<p className="text-center text-xs text-muted-foreground/60">
|
||||
No API key available — try refreshing the page.
|
||||
</p>
|
||||
)}
|
||||
</article>
|
||||
|
||||
<div className="h-px bg-border/60" />
|
||||
|
||||
{/* Step 3 — Server URL */}
|
||||
<article>
|
||||
<header className="mb-3 flex items-center gap-2">
|
||||
<div className="flex size-7 items-center justify-center rounded-md border border-slate-400/30 text-xs font-medium">
|
||||
3
|
||||
</div>
|
||||
<h3 className="text-sm font-medium sm:text-base">Point the plugin at this server</h3>
|
||||
</header>
|
||||
<p className="text-[11px] text-muted-foreground sm:text-xs">
|
||||
For SurfSense Cloud, use the default{" "}
|
||||
<span className="font-medium">surfsense.com</span>. If you are self-hosting, set the
|
||||
plugin's <span className="font-medium">Server URL</span> to your frontend domain.
|
||||
</p>
|
||||
</article>
|
||||
|
||||
<div className="h-px bg-border/60" />
|
||||
|
||||
{/* Step 4 — Pick search space */}
|
||||
<article>
|
||||
<header className="mb-3 flex items-center gap-2">
|
||||
<div className="flex size-7 items-center justify-center rounded-md border border-slate-400/30 text-xs font-medium">
|
||||
4
|
||||
</div>
|
||||
<h3 className="text-sm font-medium sm:text-base">Pick this search space</h3>
|
||||
</header>
|
||||
<p className="text-[11px] text-muted-foreground sm:text-xs">
|
||||
In the plugin's <span className="font-medium">Search space</span> setting, choose the
|
||||
search space you want this vault to sync into. The connector will appear here
|
||||
automatically once the plugin makes its first sync.
|
||||
</p>
|
||||
</article>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
{/* What you get section */}
|
||||
{getConnectorBenefits(EnumConnectorName.OBSIDIAN_CONNECTOR) && (
|
||||
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 px-3 sm:px-6 py-4 space-y-2">
|
||||
<h4 className="text-xs sm:text-sm font-medium">
|
||||
<div className="space-y-2 rounded-xl border border-border bg-slate-400/5 px-3 py-4 sm:px-6 dark:bg-white/5">
|
||||
<h4 className="text-xs font-medium sm:text-sm">
|
||||
What you get with Obsidian integration:
|
||||
</h4>
|
||||
<ul className="list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
|
||||
<ul className="list-disc space-y-1 pl-5 text-[10px] text-muted-foreground sm:text-xs">
|
||||
{getConnectorBenefits(EnumConnectorName.OBSIDIAN_CONNECTOR)?.map((benefit) => (
|
||||
<li key={benefit}>{benefit}</li>
|
||||
))}
|
||||
|
|
|
|||
|
|
@ -104,11 +104,11 @@ export function getConnectorBenefits(connectorType: string): string[] | null {
|
|||
"No manual indexing required - meetings are added automatically",
|
||||
],
|
||||
OBSIDIAN_CONNECTOR: [
|
||||
"Search through all your Obsidian notes and knowledge base",
|
||||
"Access note content with YAML frontmatter metadata preserved",
|
||||
"Wiki-style links ([[note]]) and #tags are indexed",
|
||||
"Connect your personal knowledge base directly to your search space",
|
||||
"Incremental sync - only changed files are re-indexed",
|
||||
"Search through all of your Obsidian notes",
|
||||
"Realtime sync as you create, edit, rename, or delete notes",
|
||||
"YAML frontmatter, [[wiki links]], and #tags are preserved and indexed",
|
||||
"Open any chat citation straight back in Obsidian via deep links",
|
||||
"Each device is identifiable, so you can revoke a vault from one machine",
|
||||
"Full support for your vault's folder structure",
|
||||
],
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,167 +1,162 @@
|
|||
"use client";
|
||||
|
||||
import type { FC } from "react";
|
||||
import { useState } from "react";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { Switch } from "@/components/ui/switch";
|
||||
import { AlertTriangle, Info } from "lucide-react";
|
||||
import { type FC, useEffect, useMemo, useState } from "react";
|
||||
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
|
||||
import { connectorsApiService, type ObsidianStats } from "@/lib/apis/connectors-api.service";
|
||||
import type { ConnectorConfigProps } from "../index";
|
||||
|
||||
export interface ObsidianConfigProps extends ConnectorConfigProps {
|
||||
onNameChange?: (name: string) => void;
|
||||
const OBSIDIAN_SETUP_DOCS_URL = "/docs/connectors/obsidian";
|
||||
|
||||
function formatTimestamp(value: unknown): string {
|
||||
if (typeof value !== "string" || !value) return "—";
|
||||
const d = new Date(value);
|
||||
if (Number.isNaN(d.getTime())) return value;
|
||||
return d.toLocaleString();
|
||||
}
|
||||
|
||||
export const ObsidianConfig: FC<ObsidianConfigProps> = ({
|
||||
connector,
|
||||
onConfigChange,
|
||||
onNameChange,
|
||||
}) => {
|
||||
const [vaultPath, setVaultPath] = useState<string>(
|
||||
(connector.config?.vault_path as string) || ""
|
||||
);
|
||||
const [vaultName, setVaultName] = useState<string>(
|
||||
(connector.config?.vault_name as string) || ""
|
||||
);
|
||||
const [excludeFolders, setExcludeFolders] = useState<string>(() => {
|
||||
const folders = connector.config?.exclude_folders;
|
||||
if (Array.isArray(folders)) {
|
||||
return folders.join(", ");
|
||||
}
|
||||
return (folders as string) || ".obsidian, .trash";
|
||||
});
|
||||
const [includeAttachments, setIncludeAttachments] = useState<boolean>(
|
||||
(connector.config?.include_attachments as boolean) || false
|
||||
);
|
||||
const [name, setName] = useState<string>(connector.name || "");
|
||||
/**
|
||||
* Obsidian connector config view.
|
||||
*
|
||||
* Read-only on purpose: the plugin owns vault identity, so the connector's
|
||||
* display name is auto-derived from `payload.vault_name` server-side on
|
||||
* every `/connect` (see `obsidian_plugin_routes.obsidian_connect`). The
|
||||
* web UI doesn't expose a Name input or a Save button for Obsidian (the
|
||||
* latter is suppressed in `connector-edit-view.tsx`).
|
||||
*
|
||||
* Renders one of three modes depending on the connector's `config`:
|
||||
*
|
||||
* 1. **Plugin connector** (`config.source === "plugin"`) — read-only stats
|
||||
* panel showing what the plugin most recently reported.
|
||||
* 2. **Legacy server-path connector** (`config.legacy === true`, set by the
|
||||
* migration) — migration warning + docs link + explicit disconnect data-loss
|
||||
* warning so users move to the plugin flow safely.
|
||||
* 3. **Unknown** — fallback for rows that escaped migration; suggests a
|
||||
* clean re-install.
|
||||
*/
|
||||
export const ObsidianConfig: FC<ConnectorConfigProps> = ({ connector }) => {
|
||||
const config = (connector.config ?? {}) as Record<string, unknown>;
|
||||
const isLegacy = config.legacy === true;
|
||||
const isPlugin = config.source === "plugin";
|
||||
|
||||
const handleVaultPathChange = (value: string) => {
|
||||
setVaultPath(value);
|
||||
if (onConfigChange) {
|
||||
onConfigChange({
|
||||
...connector.config,
|
||||
vault_path: value,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleVaultNameChange = (value: string) => {
|
||||
setVaultName(value);
|
||||
if (onConfigChange) {
|
||||
onConfigChange({
|
||||
...connector.config,
|
||||
vault_name: value,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleExcludeFoldersChange = (value: string) => {
|
||||
setExcludeFolders(value);
|
||||
const foldersArray = value
|
||||
.split(",")
|
||||
.map((f) => f.trim())
|
||||
.filter(Boolean);
|
||||
if (onConfigChange) {
|
||||
onConfigChange({
|
||||
...connector.config,
|
||||
exclude_folders: foldersArray,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleIncludeAttachmentsChange = (value: boolean) => {
|
||||
setIncludeAttachments(value);
|
||||
if (onConfigChange) {
|
||||
onConfigChange({
|
||||
...connector.config,
|
||||
include_attachments: value,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleNameChange = (value: string) => {
|
||||
setName(value);
|
||||
if (onNameChange) {
|
||||
onNameChange(value);
|
||||
}
|
||||
};
|
||||
if (isLegacy) return <LegacyBanner />;
|
||||
if (isPlugin) return <PluginStats config={config} />;
|
||||
return <UnknownConnectorState />;
|
||||
};
|
||||
|
||||
const LegacyBanner: FC = () => {
|
||||
return (
|
||||
<div className="space-y-6">
|
||||
{/* Connector Name */}
|
||||
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
|
||||
<div className="space-y-2">
|
||||
<Label className="text-xs sm:text-sm">Connector Name</Label>
|
||||
<Input
|
||||
value={name}
|
||||
onChange={(e) => handleNameChange(e.target.value)}
|
||||
placeholder="My Obsidian Vault"
|
||||
className="border-slate-400/20 focus-visible:border-slate-400/40"
|
||||
/>
|
||||
<p className="text-[10px] sm:text-xs text-muted-foreground">
|
||||
A friendly name to identify this connector.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<Alert className="border-amber-500/40 bg-amber-500/10">
|
||||
<AlertTriangle className="size-4 shrink-0 text-amber-500" />
|
||||
<AlertTitle className="text-xs sm:text-sm">
|
||||
Sync stopped, install the plugin to migrate
|
||||
</AlertTitle>
|
||||
<AlertDescription className="text-[11px] sm:text-xs leading-relaxed">
|
||||
This Obsidian connector used the legacy server-path scanner, which has been removed. The
|
||||
notes already indexed remain searchable, but they no longer reflect changes made in your
|
||||
vault.
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
|
||||
{/* Configuration */}
|
||||
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
|
||||
<div className="space-y-1 sm:space-y-2">
|
||||
<h3 className="font-medium text-sm sm:text-base flex items-center gap-2">
|
||||
Vault Configuration
|
||||
</h3>
|
||||
</div>
|
||||
|
||||
<div className="space-y-4">
|
||||
<div className="space-y-2">
|
||||
<Label className="text-xs sm:text-sm">Vault Path</Label>
|
||||
<Input
|
||||
value={vaultPath}
|
||||
onChange={(e) => handleVaultPathChange(e.target.value)}
|
||||
placeholder="/path/to/your/obsidian/vault"
|
||||
className="border-slate-400/20 focus-visible:border-slate-400/40 font-mono"
|
||||
/>
|
||||
<p className="text-[10px] sm:text-xs text-muted-foreground">
|
||||
The absolute path to your Obsidian vault on the server.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="space-y-2">
|
||||
<Label className="text-xs sm:text-sm">Vault Name</Label>
|
||||
<Input
|
||||
value={vaultName}
|
||||
onChange={(e) => handleVaultNameChange(e.target.value)}
|
||||
placeholder="My Knowledge Base"
|
||||
className="border-slate-400/20 focus-visible:border-slate-400/40"
|
||||
/>
|
||||
<p className="text-[10px] sm:text-xs text-muted-foreground">
|
||||
A display name for your vault in search results.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="space-y-2">
|
||||
<Label className="text-xs sm:text-sm">Exclude Folders</Label>
|
||||
<Input
|
||||
value={excludeFolders}
|
||||
onChange={(e) => handleExcludeFoldersChange(e.target.value)}
|
||||
placeholder=".obsidian, .trash, templates"
|
||||
className="border-slate-400/20 focus-visible:border-slate-400/40 font-mono"
|
||||
/>
|
||||
<p className="text-[10px] sm:text-xs text-muted-foreground">
|
||||
Comma-separated list of folder names to exclude from indexing.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-between rounded-lg border border-slate-400/20 p-3">
|
||||
<div className="space-y-0.5">
|
||||
<Label className="text-xs sm:text-sm">Include Attachments</Label>
|
||||
<p className="text-[10px] sm:text-xs text-muted-foreground">
|
||||
Index attachment folders and embedded files
|
||||
</p>
|
||||
</div>
|
||||
<Switch checked={includeAttachments} onCheckedChange={handleIncludeAttachmentsChange} />
|
||||
</div>
|
||||
</div>
|
||||
<div className="rounded-xl border border-border bg-slate-400/5 p-3 sm:p-6 dark:bg-white/5">
|
||||
<h3 className="mb-3 text-sm font-medium sm:text-base">Migration required</h3>
|
||||
<p className="mb-3 text-[11px] leading-relaxed text-muted-foreground sm:text-xs">
|
||||
Follow the{" "}
|
||||
<a
|
||||
href={OBSIDIAN_SETUP_DOCS_URL}
|
||||
className="font-medium text-primary underline underline-offset-4 hover:text-primary/80"
|
||||
>
|
||||
Obsidian setup guide
|
||||
</a>{" "}
|
||||
to reconnect this vault through the plugin.
|
||||
</p>
|
||||
<p className="text-[11px] leading-relaxed text-amber-600 dark:text-amber-400 sm:text-xs">
|
||||
Heads up: Disconnect also deletes every document this connector previously indexed.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const PluginStats: FC<{ config: Record<string, unknown> }> = ({ config }) => {
|
||||
const vaultId = typeof config.vault_id === "string" ? config.vault_id : null;
|
||||
const [stats, setStats] = useState<ObsidianStats | null>(null);
|
||||
const [statsError, setStatsError] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (!vaultId) return;
|
||||
let cancelled = false;
|
||||
setStats(null);
|
||||
setStatsError(false);
|
||||
connectorsApiService
|
||||
.getObsidianStats(vaultId)
|
||||
.then((result) => {
|
||||
if (!cancelled) setStats(result);
|
||||
})
|
||||
.catch((err) => {
|
||||
if (!cancelled) {
|
||||
console.error("Failed to fetch Obsidian stats", err);
|
||||
setStatsError(true);
|
||||
}
|
||||
});
|
||||
return () => {
|
||||
cancelled = true;
|
||||
};
|
||||
}, [vaultId]);
|
||||
|
||||
const tileRows = useMemo(() => {
|
||||
const placeholder = statsError ? "—" : stats ? null : "…";
|
||||
return [
|
||||
{ label: "Vault name", value: (config.vault_name as string) || "—" },
|
||||
{
|
||||
label: "Last sync",
|
||||
value: placeholder ?? formatTimestamp(stats?.last_sync_at ?? null),
|
||||
},
|
||||
{
|
||||
label: "Files synced",
|
||||
value:
|
||||
placeholder ??
|
||||
(typeof stats?.files_synced === "number" ? stats.files_synced.toLocaleString() : "—"),
|
||||
},
|
||||
];
|
||||
}, [config.vault_name, stats, statsError]);
|
||||
|
||||
return (
|
||||
<div className="space-y-4">
|
||||
<Alert className="border-emerald-500/30 bg-emerald-500/10">
|
||||
<Info className="size-4 shrink-0 text-emerald-500" />
|
||||
<AlertTitle className="text-xs sm:text-sm">Plugin connected</AlertTitle>
|
||||
<AlertDescription className="text-[11px] sm:text-xs">
|
||||
Your notes stay synced automatically. To stop syncing, disable or uninstall the plugin in
|
||||
Obsidian, or delete this connector.
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
|
||||
<div className="rounded-xl bg-slate-400/5 p-3 sm:p-6 dark:bg-white/5">
|
||||
<h3 className="mb-3 text-sm font-medium sm:text-base">Vault Status</h3>
|
||||
<dl className="grid grid-cols-1 gap-3 sm:grid-cols-2">
|
||||
{tileRows.map((stat) => (
|
||||
<div key={stat.label} className="rounded-lg bg-background/50 p-3">
|
||||
<dt className="text-xs tracking-wide text-muted-foreground sm:text-sm">
|
||||
{stat.label}
|
||||
</dt>
|
||||
<dd className="mt-1 truncate text-xs font-medium sm:text-sm">{stat.value}</dd>
|
||||
</div>
|
||||
))}
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const UnknownConnectorState: FC = () => (
|
||||
<Alert>
|
||||
<Info className="size-4 shrink-0" />
|
||||
<AlertTitle className="text-xs sm:text-sm">Unrecognized config</AlertTitle>
|
||||
<AlertDescription className="text-[11px] sm:text-xs">
|
||||
This connector has neither plugin metadata nor a legacy marker. It may predate migration — you
|
||||
can safely delete it and re-install the SurfSense Obsidian plugin to resume syncing.
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -111,7 +111,9 @@ export const ConnectorConnectView: FC<ConnectorConnectViewProps> = ({
|
|||
: getConnectorTypeDisplay(connectorType)}
|
||||
</h2>
|
||||
<p className="text-xs sm:text-base text-muted-foreground mt-1">
|
||||
Enter your connection details
|
||||
{connectorType === "OBSIDIAN_CONNECTOR"
|
||||
? "Follow the plugin setup steps below"
|
||||
: "Enter your connection details"}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
|
@ -149,7 +151,9 @@ export const ConnectorConnectView: FC<ConnectorConnectViewProps> = ({
|
|||
<span className={isSubmitting ? "opacity-0" : ""}>
|
||||
{connectorType === "MCP_CONNECTOR"
|
||||
? "Connect"
|
||||
: `Connect ${getConnectorTypeDisplay(connectorType)}`}
|
||||
: connectorType === "OBSIDIAN_CONNECTOR"
|
||||
? "Done"
|
||||
: `Connect ${getConnectorTypeDisplay(connectorType)}`}
|
||||
</span>
|
||||
{isSubmitting && <Spinner size="sm" className="absolute" />}
|
||||
</Button>
|
||||
|
|
|
|||
|
|
@ -1,12 +1,13 @@
|
|||
"use client";
|
||||
|
||||
import { useAtomValue } from "jotai";
|
||||
import { ArrowLeft, Info, RefreshCw, Trash2 } from "lucide-react";
|
||||
import { ArrowLeft, Info, RefreshCw } from "lucide-react";
|
||||
import { type FC, useCallback, useEffect, useMemo, useRef, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
import { activeSearchSpaceIdAtom } from "@/atoms/search-spaces/search-space-query.atoms";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Spinner } from "@/components/ui/spinner";
|
||||
import { EnumConnectorName } from "@/contracts/enums/connector";
|
||||
import { getConnectorIcon } from "@/contracts/enums/connectorIcons";
|
||||
import type { SearchSourceConnector } from "@/contracts/types/connector.types";
|
||||
import { authenticatedFetch } from "@/lib/auth-utils";
|
||||
|
|
@ -18,7 +19,15 @@ import { VisionLLMConfig } from "../../components/vision-llm-config";
|
|||
import { LIVE_CONNECTOR_TYPES, getReauthEndpoint } from "../../constants/connector-constants";
|
||||
import { getConnectorDisplayName } from "../../tabs/all-connectors-tab";
|
||||
import { MCPServiceConfig } from "../components/mcp-service-config";
|
||||
import { type ConnectorConfigProps, getConnectorConfigComponent } from "../index";
|
||||
import { getConnectorConfigComponent } from "../index";
|
||||
|
||||
const VISION_LLM_CONNECTOR_TYPES = new Set<SearchSourceConnector["connector_type"]>([
|
||||
EnumConnectorName.GOOGLE_DRIVE_CONNECTOR,
|
||||
EnumConnectorName.COMPOSIO_GOOGLE_DRIVE_CONNECTOR,
|
||||
EnumConnectorName.DROPBOX_CONNECTOR,
|
||||
EnumConnectorName.ONEDRIVE_CONNECTOR,
|
||||
EnumConnectorName.OBSIDIAN_CONNECTOR,
|
||||
]);
|
||||
|
||||
interface ConnectorEditViewProps {
|
||||
connector: SearchSourceConnector;
|
||||
|
|
@ -75,6 +84,9 @@ export const ConnectorEditView: FC<ConnectorEditViewProps> = ({
|
|||
const isAuthExpired = connector.config?.auth_expired === true;
|
||||
const reauthEndpoint = getReauthEndpoint(connector);
|
||||
const [reauthing, setReauthing] = useState(false);
|
||||
const supportsVisionLlm = VISION_LLM_CONNECTOR_TYPES.has(connector.connector_type);
|
||||
const showsAiToggles =
|
||||
connector.is_indexable || connector.connector_type === EnumConnectorName.OBSIDIAN_CONNECTOR;
|
||||
|
||||
const handleReauth = useCallback(async () => {
|
||||
const spaceId = searchSpaceId ?? searchSpaceIdAtom;
|
||||
|
|
@ -264,25 +276,23 @@ export const ConnectorEditView: FC<ConnectorEditViewProps> = ({
|
|||
/>
|
||||
)}
|
||||
|
||||
{/* Summary and sync settings - hidden for live connectors */}
|
||||
{connector.is_indexable && !isLive && (
|
||||
{/* Summary + vision toggles (Obsidian is plugin-push, non-indexable by design) */}
|
||||
{showsAiToggles && !isLive && (
|
||||
<>
|
||||
{/* AI Summary toggle */}
|
||||
<SummaryConfig enabled={enableSummary} onEnabledChange={onEnableSummaryChange} />
|
||||
|
||||
{/* Vision LLM toggle - only for file-based connectors */}
|
||||
{(connector.connector_type === "GOOGLE_DRIVE_CONNECTOR" ||
|
||||
connector.connector_type === "COMPOSIO_GOOGLE_DRIVE_CONNECTOR" ||
|
||||
connector.connector_type === "DROPBOX_CONNECTOR" ||
|
||||
connector.connector_type === "ONEDRIVE_CONNECTOR") && (
|
||||
{/* Vision LLM toggle for file/attachment connectors */}
|
||||
{supportsVisionLlm && (
|
||||
<VisionLLMConfig
|
||||
enabled={enableVisionLlm}
|
||||
onEnabledChange={onEnableVisionLlmChange}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Date range selector - not shown for file-based connectors (Drive, Dropbox, OneDrive), Webcrawler, GitHub, or Local Folder */}
|
||||
{connector.connector_type !== "GOOGLE_DRIVE_CONNECTOR" &&
|
||||
{/* Date-range and periodic sync stay indexable-only */}
|
||||
{connector.is_indexable &&
|
||||
connector.connector_type !== "GOOGLE_DRIVE_CONNECTOR" &&
|
||||
connector.connector_type !== "COMPOSIO_GOOGLE_DRIVE_CONNECTOR" &&
|
||||
connector.connector_type !== "DROPBOX_CONNECTOR" &&
|
||||
connector.connector_type !== "ONEDRIVE_CONNECTOR" &&
|
||||
|
|
@ -302,37 +312,40 @@ export const ConnectorEditView: FC<ConnectorEditViewProps> = ({
|
|||
/>
|
||||
)}
|
||||
|
||||
{(() => {
|
||||
const isGoogleDrive = connector.connector_type === "GOOGLE_DRIVE_CONNECTOR";
|
||||
const isComposioGoogleDrive =
|
||||
connector.connector_type === "COMPOSIO_GOOGLE_DRIVE_CONNECTOR";
|
||||
const requiresFolderSelection = isGoogleDrive || isComposioGoogleDrive;
|
||||
const selectedFolders =
|
||||
(connector.config?.selected_folders as
|
||||
| Array<{ id: string; name: string }>
|
||||
| undefined) || [];
|
||||
const selectedFiles =
|
||||
(connector.config?.selected_files as
|
||||
| Array<{ id: string; name: string }>
|
||||
| undefined) || [];
|
||||
const hasItemsSelected = selectedFolders.length > 0 || selectedFiles.length > 0;
|
||||
const isDisabled = requiresFolderSelection && !hasItemsSelected;
|
||||
{connector.is_indexable &&
|
||||
(() => {
|
||||
const isGoogleDrive =
|
||||
connector.connector_type === "GOOGLE_DRIVE_CONNECTOR";
|
||||
const isComposioGoogleDrive =
|
||||
connector.connector_type === "COMPOSIO_GOOGLE_DRIVE_CONNECTOR";
|
||||
const requiresFolderSelection = isGoogleDrive || isComposioGoogleDrive;
|
||||
const selectedFolders =
|
||||
(connector.config?.selected_folders as
|
||||
| Array<{ id: string; name: string }>
|
||||
| undefined) || [];
|
||||
const selectedFiles =
|
||||
(connector.config?.selected_files as
|
||||
| Array<{ id: string; name: string }>
|
||||
| undefined) || [];
|
||||
const hasItemsSelected =
|
||||
selectedFolders.length > 0 || selectedFiles.length > 0;
|
||||
const isDisabled = requiresFolderSelection && !hasItemsSelected;
|
||||
|
||||
return (
|
||||
<PeriodicSyncConfig
|
||||
enabled={periodicEnabled}
|
||||
frequencyMinutes={frequencyMinutes}
|
||||
onEnabledChange={onPeriodicEnabledChange}
|
||||
onFrequencyChange={onFrequencyChange}
|
||||
disabled={isDisabled}
|
||||
disabledMessage={
|
||||
isDisabled
|
||||
? "Select at least one folder or file above to enable periodic sync"
|
||||
: undefined
|
||||
}
|
||||
/>
|
||||
);
|
||||
})()}
|
||||
return (
|
||||
<PeriodicSyncConfig
|
||||
enabled={periodicEnabled}
|
||||
frequencyMinutes={frequencyMinutes}
|
||||
onEnabledChange={onPeriodicEnabledChange}
|
||||
onFrequencyChange={onFrequencyChange}
|
||||
disabled={isDisabled}
|
||||
disabledMessage={
|
||||
isDisabled
|
||||
? "Select at least one folder or file above to enable periodic sync"
|
||||
: undefined
|
||||
}
|
||||
/>
|
||||
);
|
||||
})()}
|
||||
</>
|
||||
)}
|
||||
|
||||
|
|
@ -403,7 +416,6 @@ export const ConnectorEditView: FC<ConnectorEditViewProps> = ({
|
|||
disabled={isSaving || isDisconnecting}
|
||||
className="text-xs sm:text-sm flex-1 sm:flex-initial h-12 sm:h-auto py-3 sm:py-2"
|
||||
>
|
||||
<Trash2 className="mr-2 h-4 w-4" />
|
||||
Disconnect
|
||||
</Button>
|
||||
)}
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import { ArrowLeft, Check, Info } from "lucide-react";
|
|||
import { type FC, useCallback, useEffect, useMemo, useRef, useState } from "react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Spinner } from "@/components/ui/spinner";
|
||||
import { EnumConnectorName } from "@/contracts/enums/connector";
|
||||
import type { SearchSourceConnector } from "@/contracts/types/connector.types";
|
||||
import { getConnectorTypeDisplay } from "@/lib/connectors/utils";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
|
@ -15,6 +16,14 @@ import { LIVE_CONNECTOR_TYPES, type IndexingConfigState } from "../../constants/
|
|||
import { getConnectorDisplayName } from "../../tabs/all-connectors-tab";
|
||||
import { getConnectorConfigComponent } from "../index";
|
||||
|
||||
const VISION_LLM_CONNECTOR_TYPES = new Set<string>([
|
||||
"GOOGLE_DRIVE_CONNECTOR",
|
||||
"COMPOSIO_GOOGLE_DRIVE_CONNECTOR",
|
||||
"DROPBOX_CONNECTOR",
|
||||
"ONEDRIVE_CONNECTOR",
|
||||
"OBSIDIAN_CONNECTOR",
|
||||
]);
|
||||
|
||||
interface IndexingConfigurationViewProps {
|
||||
config: IndexingConfigState;
|
||||
connector?: SearchSourceConnector;
|
||||
|
|
@ -65,6 +74,9 @@ export const IndexingConfigurationView: FC<IndexingConfigurationViewProps> = ({
|
|||
() => (connector ? getConnectorConfigComponent(connector.connector_type) : null),
|
||||
[connector]
|
||||
);
|
||||
const showsAiToggles =
|
||||
(connector?.is_indexable ?? false) ||
|
||||
connector?.connector_type === EnumConnectorName.OBSIDIAN_CONNECTOR;
|
||||
const [isScrolled, setIsScrolled] = useState(false);
|
||||
const [hasMoreContent, setHasMoreContent] = useState(false);
|
||||
const scrollContainerRef = useRef<HTMLDivElement>(null);
|
||||
|
|
@ -161,25 +173,23 @@ export const IndexingConfigurationView: FC<IndexingConfigurationViewProps> = ({
|
|||
<ConnectorConfigComponent connector={connector} onConfigChange={onConfigChange} />
|
||||
)}
|
||||
|
||||
{/* Summary and sync settings - hidden for live connectors */}
|
||||
{connector?.is_indexable && !isLive && (
|
||||
{/* Summary + vision toggles (Obsidian is plugin-push, non-indexable by design) */}
|
||||
{showsAiToggles && !isLive && (
|
||||
<>
|
||||
{/* AI Summary toggle */}
|
||||
<SummaryConfig enabled={enableSummary} onEnabledChange={onEnableSummaryChange} />
|
||||
|
||||
{/* Vision LLM toggle - only for file-based connectors */}
|
||||
{(config.connectorType === "GOOGLE_DRIVE_CONNECTOR" ||
|
||||
config.connectorType === "COMPOSIO_GOOGLE_DRIVE_CONNECTOR" ||
|
||||
config.connectorType === "DROPBOX_CONNECTOR" ||
|
||||
config.connectorType === "ONEDRIVE_CONNECTOR") && (
|
||||
{/* Vision LLM toggle for file/attachment connectors */}
|
||||
{VISION_LLM_CONNECTOR_TYPES.has(config.connectorType) && (
|
||||
<VisionLLMConfig
|
||||
enabled={enableVisionLlm}
|
||||
onEnabledChange={onEnableVisionLlmChange}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Date range selector - not shown for file-based connectors (Drive, Dropbox, OneDrive), Webcrawler, GitHub, or Local Folder */}
|
||||
{config.connectorType !== "GOOGLE_DRIVE_CONNECTOR" &&
|
||||
{/* Date-range and periodic sync stay indexable-only */}
|
||||
{connector?.is_indexable &&
|
||||
config.connectorType !== "GOOGLE_DRIVE_CONNECTOR" &&
|
||||
config.connectorType !== "COMPOSIO_GOOGLE_DRIVE_CONNECTOR" &&
|
||||
config.connectorType !== "DROPBOX_CONNECTOR" &&
|
||||
config.connectorType !== "ONEDRIVE_CONNECTOR" &&
|
||||
|
|
@ -199,7 +209,8 @@ export const IndexingConfigurationView: FC<IndexingConfigurationViewProps> = ({
|
|||
/>
|
||||
)}
|
||||
|
||||
{config.connectorType !== "GOOGLE_DRIVE_CONNECTOR" &&
|
||||
{connector?.is_indexable &&
|
||||
config.connectorType !== "GOOGLE_DRIVE_CONNECTOR" &&
|
||||
config.connectorType !== "COMPOSIO_GOOGLE_DRIVE_CONNECTOR" &&
|
||||
config.connectorType !== "DROPBOX_CONNECTOR" &&
|
||||
config.connectorType !== "ONEDRIVE_CONNECTOR" && (
|
||||
|
|
|
|||
|
|
@ -200,7 +200,7 @@ export const OTHER_CONNECTORS = [
|
|||
{
|
||||
id: "obsidian-connector",
|
||||
title: "Obsidian",
|
||||
description: "Index your Obsidian vault (Local folder scan on Desktop)",
|
||||
description: "Sync your Obsidian vault on desktop or mobile",
|
||||
connectorType: EnumConnectorName.OBSIDIAN_CONNECTOR,
|
||||
},
|
||||
] as const;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { format } from "date-fns";
|
||||
import { useAtom, useAtomValue, useSetAtom } from "jotai";
|
||||
import { useAtom, useAtomValue } from "jotai";
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
import { connectorDialogOpenAtom } from "@/atoms/connector-dialog/connector-dialog.atoms";
|
||||
|
|
@ -10,17 +10,11 @@ import {
|
|||
updateConnectorMutationAtom,
|
||||
} from "@/atoms/connectors/connector-mutation.atoms";
|
||||
import { connectorsAtom } from "@/atoms/connectors/connector-query.atoms";
|
||||
import {
|
||||
folderWatchDialogOpenAtom,
|
||||
folderWatchInitialFolderAtom,
|
||||
} from "@/atoms/folder-sync/folder-sync.atoms";
|
||||
import { activeSearchSpaceIdAtom } from "@/atoms/search-spaces/search-space-query.atoms";
|
||||
import { EnumConnectorName } from "@/contracts/enums/connector";
|
||||
import type { SearchSourceConnector } from "@/contracts/types/connector.types";
|
||||
import { searchSourceConnector } from "@/contracts/types/connector.types";
|
||||
import { usePlatform } from "@/hooks/use-platform";
|
||||
import { authenticatedFetch } from "@/lib/auth-utils";
|
||||
import { isSelfHosted } from "@/lib/env-config";
|
||||
import {
|
||||
trackConnectorConnected,
|
||||
trackConnectorDeleted,
|
||||
|
|
@ -71,10 +65,6 @@ export const useConnectorDialog = () => {
|
|||
const { mutateAsync: updateConnector } = useAtomValue(updateConnectorMutationAtom);
|
||||
const { mutateAsync: deleteConnector } = useAtomValue(deleteConnectorMutationAtom);
|
||||
const { mutateAsync: createConnector } = useAtomValue(createConnectorMutationAtom);
|
||||
const setFolderWatchOpen = useSetAtom(folderWatchDialogOpenAtom);
|
||||
const setFolderWatchInitialFolder = useSetAtom(folderWatchInitialFolderAtom);
|
||||
const { isDesktop } = usePlatform();
|
||||
const selfHosted = isSelfHosted();
|
||||
|
||||
// Use global atom for dialog open state so it can be controlled from anywhere
|
||||
const [isOpen, setIsOpen] = useAtom(connectorDialogOpenAtom);
|
||||
|
|
@ -439,6 +429,7 @@ export const useConnectorDialog = () => {
|
|||
indexing_frequency_minutes: null,
|
||||
next_scheduled_at: null,
|
||||
enable_summary: false,
|
||||
enable_vision_llm: false,
|
||||
},
|
||||
queryParams: {
|
||||
search_space_id: searchSpaceId,
|
||||
|
|
@ -487,31 +478,16 @@ export const useConnectorDialog = () => {
|
|||
}
|
||||
}, [searchSpaceId, createConnector, refetchAllConnectors, setIsOpen]);
|
||||
|
||||
// Handle connecting non-OAuth connectors (like Tavily API)
|
||||
// Handle connecting non-OAuth connectors (like Tavily API, Obsidian plugin, etc.)
|
||||
const handleConnectNonOAuth = useCallback(
|
||||
(connectorType: string) => {
|
||||
if (!searchSpaceId) return;
|
||||
|
||||
trackConnectorSetupStarted(Number(searchSpaceId), connectorType, "non_oauth_click");
|
||||
|
||||
// Handle Obsidian specifically on Desktop & Cloud
|
||||
if (connectorType === EnumConnectorName.OBSIDIAN_CONNECTOR && !selfHosted && isDesktop) {
|
||||
setIsOpen(false);
|
||||
setFolderWatchInitialFolder(null);
|
||||
setFolderWatchOpen(true);
|
||||
return;
|
||||
}
|
||||
|
||||
setConnectingConnectorType(connectorType);
|
||||
},
|
||||
[
|
||||
searchSpaceId,
|
||||
selfHosted,
|
||||
isDesktop,
|
||||
setIsOpen,
|
||||
setFolderWatchOpen,
|
||||
setFolderWatchInitialFolder,
|
||||
]
|
||||
[searchSpaceId]
|
||||
);
|
||||
|
||||
// Handle submitting connect form
|
||||
|
|
@ -555,6 +531,7 @@ export const useConnectorDialog = () => {
|
|||
is_active: true,
|
||||
next_scheduled_at: connectorData.next_scheduled_at as string | null,
|
||||
enable_summary: false,
|
||||
enable_vision_llm: false,
|
||||
},
|
||||
queryParams: {
|
||||
search_space_id: searchSpaceId,
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
"use client";
|
||||
|
||||
import { useAtomValue, useSetAtom } from "jotai";
|
||||
import { MessageSquare } from "lucide-react";
|
||||
import { MessageCircleReply } from "lucide-react";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { clearTargetCommentIdAtom, targetCommentIdAtom } from "@/atoms/chat/current-thread.atom";
|
||||
import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar";
|
||||
|
|
@ -216,7 +216,7 @@ export function CommentItem({
|
|||
className="mt-1 h-7 w-fit px-2 text-xs text-muted-foreground hover:text-foreground"
|
||||
onClick={() => onReply(comment.id)}
|
||||
>
|
||||
<MessageSquare className="mr-1 size-3" />
|
||||
<MessageCircleReply className="mr-1 size-3" />
|
||||
Reply
|
||||
</Button>
|
||||
)}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
"use client";
|
||||
|
||||
import { MessageSquare } from "lucide-react";
|
||||
import { MessageCircleReply } from "lucide-react";
|
||||
import {
|
||||
Drawer,
|
||||
DrawerContent,
|
||||
|
|
@ -30,7 +30,7 @@ export function CommentSheet({
|
|||
<DrawerHandle />
|
||||
<DrawerHeader className="px-4 pb-3 pt-2">
|
||||
<DrawerTitle className="flex items-center gap-2 text-base font-semibold">
|
||||
<MessageSquare className="size-5" />
|
||||
<MessageCircleReply className="size-5" />
|
||||
Comments
|
||||
{commentCount > 0 && (
|
||||
<span className="rounded-full bg-primary/10 px-2 py-0.5 text-xs font-medium text-primary">
|
||||
|
|
@ -56,7 +56,7 @@ export function CommentSheet({
|
|||
>
|
||||
<SheetHeader className="flex-shrink-0 px-4 py-4">
|
||||
<SheetTitle className="flex items-center gap-2 text-base font-semibold">
|
||||
<MessageSquare className="size-5" />
|
||||
<MessageCircleReply className="size-5" />
|
||||
Comments
|
||||
{commentCount > 0 && (
|
||||
<span className="rounded-full bg-primary/10 px-2 py-0.5 text-xs font-medium text-primary">
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
"use client";
|
||||
|
||||
import { ChevronDown, ChevronRight, MessageSquare } from "lucide-react";
|
||||
import { ChevronDown, ChevronRight, MessageCircleReply } from "lucide-react";
|
||||
import { useState } from "react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { CommentComposer } from "../comment-composer/comment-composer";
|
||||
|
|
@ -143,7 +143,7 @@ export function CommentThread({
|
|||
</div>
|
||||
) : (
|
||||
<Button variant="ghost" size="sm" className="h-7 px-2 text-xs" onClick={handleReply}>
|
||||
<MessageSquare className="mr-1 size-3" />
|
||||
<MessageCircleReply className="mr-1 size-3" />
|
||||
Reply
|
||||
</Button>
|
||||
)}
|
||||
|
|
@ -155,7 +155,7 @@ export function CommentThread({
|
|||
{!hasReplies && !isReplyComposerOpen && (
|
||||
<div className="ml-7 mt-1">
|
||||
<Button variant="ghost" size="sm" className="h-7 px-2 text-xs" onClick={handleReply}>
|
||||
<MessageSquare className="mr-1 size-3" />
|
||||
<MessageCircleReply className="mr-1 size-3" />
|
||||
Reply
|
||||
</Button>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -84,7 +84,7 @@ export function DocumentsFilters({
|
|||
<TooltipTrigger asChild>
|
||||
<ToggleGroupItem
|
||||
value="folder"
|
||||
className="h-9 w-9 shrink-0 border-sidebar-border text-muted-foreground hover:text-foreground hover:border-sidebar-border bg-sidebar"
|
||||
className="h-9 w-9 shrink-0 border bg-muted/50 text-muted-foreground transition-colors hover:bg-muted/80 hover:text-foreground"
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
onCreateFolder();
|
||||
|
|
@ -104,11 +104,11 @@ export function DocumentsFilters({
|
|||
value="ai-sort"
|
||||
disabled={aiSortBusy}
|
||||
className={cn(
|
||||
"h-9 w-9 shrink-0 border-sidebar-border bg-sidebar",
|
||||
"h-9 w-9 shrink-0 border bg-muted/50 transition-colors",
|
||||
"disabled:pointer-events-none disabled:opacity-50",
|
||||
aiSortEnabled
|
||||
? "bg-accent text-accent-foreground"
|
||||
: "text-muted-foreground hover:text-foreground hover:border-sidebar-border"
|
||||
? "bg-accent text-accent-foreground hover:bg-accent"
|
||||
: "text-muted-foreground hover:bg-muted/80 hover:text-foreground"
|
||||
)}
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
|
|
@ -142,11 +142,11 @@ export function DocumentsFilters({
|
|||
<PopoverTrigger asChild>
|
||||
<ToggleGroupItem
|
||||
value="filter"
|
||||
className="relative h-9 w-9 shrink-0 border-sidebar-border text-muted-foreground hover:text-foreground hover:border-sidebar-border bg-sidebar overflow-visible"
|
||||
className="relative h-9 w-9 shrink-0 border bg-muted/50 text-muted-foreground transition-colors hover:bg-muted/80 hover:text-foreground overflow-visible"
|
||||
>
|
||||
<ListFilter size={14} />
|
||||
{activeTypes.length > 0 && (
|
||||
<span className="absolute -top-1 -right-1 flex h-4 w-4 items-center justify-center rounded-full bg-sidebar-border text-[9px] font-medium text-sidebar-foreground">
|
||||
<span className="absolute -top-1 -right-1 flex h-4 w-4 items-center justify-center rounded-full bg-neutral-300 text-[9px] font-medium text-neutral-700 dark:bg-neutral-700 dark:text-neutral-200">
|
||||
{activeTypes.length}
|
||||
</span>
|
||||
)}
|
||||
|
|
@ -226,13 +226,13 @@ export function DocumentsFilters({
|
|||
|
||||
{/* Search Input */}
|
||||
<div className="relative flex-1 min-w-0">
|
||||
<div className="pointer-events-none absolute inset-y-0 left-0 flex items-center pl-3 text-muted-foreground">
|
||||
<div className="pointer-events-none absolute inset-y-0 left-0 flex items-center pl-3">
|
||||
<Search size={14} aria-hidden="true" />
|
||||
</div>
|
||||
<Input
|
||||
id={`${id}-input`}
|
||||
ref={inputRef}
|
||||
className="peer h-9 w-full pl-9 pr-9 text-sm bg-sidebar border-border/60 select-none focus:select-text"
|
||||
className="h-9 w-full pl-9 pr-8 text-sm select-none focus:select-text"
|
||||
value={searchValue}
|
||||
onChange={(e) => onSearch(e.target.value)}
|
||||
placeholder="Search docs"
|
||||
|
|
@ -242,7 +242,7 @@ export function DocumentsFilters({
|
|||
{Boolean(searchValue) && (
|
||||
<button
|
||||
type="button"
|
||||
className="absolute inset-y-0 right-0 flex h-full w-9 items-center justify-center rounded-r-md text-muted-foreground hover:text-foreground transition-colors"
|
||||
className="absolute right-1 top-1/2 -translate-y-1/2 inline-flex h-6 w-6 items-center justify-center rounded-sm text-muted-foreground hover:bg-accent hover:text-accent-foreground transition-colors"
|
||||
aria-label="Clear filter"
|
||||
onClick={() => {
|
||||
onSearch("");
|
||||
|
|
@ -260,7 +260,7 @@ export function DocumentsFilters({
|
|||
onClick={handleUpload}
|
||||
variant="outline"
|
||||
size="sm"
|
||||
className="h-9 shrink-0 gap-1.5 bg-white text-gray-700 border-white hover:bg-gray-50 dark:bg-white dark:text-gray-800 dark:hover:bg-gray-100"
|
||||
className="h-9 shrink-0 gap-1.5 border-0 shadow-none bg-white text-gray-700 hover:bg-gray-50 dark:bg-white dark:text-gray-800 dark:hover:bg-gray-100"
|
||||
>
|
||||
<Upload size={14} />
|
||||
<span>Upload</span>
|
||||
|
|
|
|||
|
|
@ -379,7 +379,7 @@ export function EditorPanelContent({
|
|||
</div>
|
||||
</div>
|
||||
<div className="flex h-10 items-center justify-between gap-2 border-t px-4">
|
||||
<div className="min-w-0 flex-1">
|
||||
<div className="min-w-0 flex flex-1 items-center gap-2">
|
||||
<p className="truncate text-sm text-muted-foreground">{displayTitle}</p>
|
||||
</div>
|
||||
<div className="flex items-center gap-1 shrink-0">
|
||||
|
|
@ -410,6 +410,12 @@ export function EditorPanelContent({
|
|||
</>
|
||||
) : (
|
||||
<>
|
||||
{!isLocalFileMode && editorDoc?.document_type && documentId && (
|
||||
<VersionHistoryButton
|
||||
documentId={documentId}
|
||||
documentType={editorDoc.document_type}
|
||||
/>
|
||||
)}
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
|
|
@ -441,15 +447,12 @@ export function EditorPanelContent({
|
|||
)}
|
||||
</>
|
||||
)}
|
||||
{!showEditingActions && !isLocalFileMode && editorDoc?.document_type && documentId && (
|
||||
<VersionHistoryButton documentId={documentId} documentType={editorDoc.document_type} />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex h-14 items-center justify-between border-b px-4 shrink-0">
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex flex-1 min-w-0 items-center gap-2">
|
||||
<h2 className="text-sm font-semibold truncate">{displayTitle}</h2>
|
||||
</div>
|
||||
<div className="flex items-center gap-1 shrink-0">
|
||||
|
|
@ -480,6 +483,12 @@ export function EditorPanelContent({
|
|||
</>
|
||||
) : (
|
||||
<>
|
||||
{!isLocalFileMode && editorDoc?.document_type && documentId && (
|
||||
<VersionHistoryButton
|
||||
documentId={documentId}
|
||||
documentType={editorDoc.document_type}
|
||||
/>
|
||||
)}
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
|
|
@ -509,12 +518,6 @@ export function EditorPanelContent({
|
|||
<span className="sr-only">Edit document</span>
|
||||
</Button>
|
||||
)}
|
||||
{!isLocalFileMode && editorDoc?.document_type && documentId && (
|
||||
<VersionHistoryButton
|
||||
documentId={documentId}
|
||||
documentType={editorDoc.document_type}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
|
@ -559,7 +562,7 @@ export function EditorPanelContent({
|
|||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
className="shrink-0 gap-1.5"
|
||||
className="relative shrink-0"
|
||||
disabled={downloading}
|
||||
onClick={async () => {
|
||||
setDownloading(true);
|
||||
|
|
@ -591,12 +594,13 @@ export function EditorPanelContent({
|
|||
}
|
||||
}}
|
||||
>
|
||||
{downloading ? (
|
||||
<Spinner size="xs" />
|
||||
) : (
|
||||
<span
|
||||
className={`flex items-center gap-1.5 ${downloading ? "opacity-0" : ""}`}
|
||||
>
|
||||
<Download className="size-3.5" />
|
||||
)}
|
||||
{downloading ? "Preparing..." : "Download .md"}
|
||||
Download .md
|
||||
</span>
|
||||
{downloading && <Spinner size="sm" className="absolute" />}
|
||||
</Button>
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
|
|
|
|||
|
|
@ -1829,10 +1829,13 @@ function AnonymousDocumentsSidebar({
|
|||
type="button"
|
||||
onClick={handleAnonUploadClick}
|
||||
disabled={isUploading}
|
||||
className="flex w-full items-center justify-center gap-2 rounded-lg border-2 border-dashed border-primary/30 px-4 py-6 text-sm text-primary transition-colors hover:border-primary/60 hover:bg-primary/5 cursor-pointer disabled:opacity-50 disabled:pointer-events-none"
|
||||
className="relative flex w-full items-center justify-center rounded-lg border-2 border-dashed border-primary/30 px-4 py-6 text-sm text-primary transition-colors hover:border-primary/60 hover:bg-primary/5 cursor-pointer disabled:opacity-50 disabled:pointer-events-none"
|
||||
>
|
||||
<Upload className="size-4" />
|
||||
{isUploading ? "Uploading..." : "Upload a document"}
|
||||
<span className={`flex items-center gap-2 ${isUploading ? "opacity-0" : ""}`}>
|
||||
<Upload className="size-4" />
|
||||
Upload a document
|
||||
</span>
|
||||
{isUploading && <Spinner size="sm" className="absolute" />}
|
||||
</button>
|
||||
<p className="mt-2 text-[11px] text-muted-foreground leading-relaxed">
|
||||
Text, code, CSV, and HTML files only. Create an account for PDFs, images, and 30+
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ import {
|
|||
Inbox,
|
||||
LayoutGrid,
|
||||
ListFilter,
|
||||
MessageSquare,
|
||||
MessageCircleReply,
|
||||
Search,
|
||||
X,
|
||||
} from "lucide-react";
|
||||
|
|
@ -847,7 +847,7 @@ export function InboxSidebarContent({
|
|||
<TabsList stretch showBottomBorder size="sm">
|
||||
<TabsTrigger value="comments">
|
||||
<span className="inline-flex items-center gap-1.5">
|
||||
<MessageSquare className="h-4 w-4" />
|
||||
<MessageCircleReply className="h-4 w-4" />
|
||||
<span>{t("comments") || "Comments"}</span>
|
||||
<span className="inline-flex items-center justify-center min-w-5 h-5 px-1.5 rounded-full bg-primary/20 text-muted-foreground text-xs font-medium">
|
||||
{formatInboxCount(comments.unreadCount)}
|
||||
|
|
@ -1032,7 +1032,7 @@ export function InboxSidebarContent({
|
|||
) : (
|
||||
<div className="text-center py-8">
|
||||
{activeTab === "comments" ? (
|
||||
<MessageSquare className="h-12 w-12 mx-auto text-muted-foreground mb-3" />
|
||||
<MessageCircleReply className="h-12 w-12 mx-auto text-muted-foreground mb-3" />
|
||||
) : (
|
||||
<History className="h-12 w-12 mx-auto text-muted-foreground mb-3" />
|
||||
)}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
"use client";
|
||||
|
||||
import { Download, FileQuestionMark, FileText, Loader2, Pencil, RefreshCw } from "lucide-react";
|
||||
import { Download, FileQuestionMark, FileText, Pencil, RefreshCw } from "lucide-react";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
|
|
@ -8,6 +8,7 @@ import { PlateEditor } from "@/components/editor/plate-editor";
|
|||
import { MarkdownViewer } from "@/components/markdown-viewer";
|
||||
import { Alert, AlertDescription } from "@/components/ui/alert";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Spinner } from "@/components/ui/spinner";
|
||||
import { authenticatedFetch, getBearerToken, redirectToLogin } from "@/lib/auth-utils";
|
||||
|
||||
const LARGE_DOCUMENT_THRESHOLD = 2 * 1024 * 1024; // 2MB
|
||||
|
|
@ -278,7 +279,7 @@ export function DocumentTabContent({ documentId, searchSpaceId, title }: Documen
|
|||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
className="shrink-0 gap-1.5"
|
||||
className="relative shrink-0"
|
||||
disabled={downloading}
|
||||
onClick={async () => {
|
||||
setDownloading(true);
|
||||
|
|
@ -307,12 +308,13 @@ export function DocumentTabContent({ documentId, searchSpaceId, title }: Documen
|
|||
}
|
||||
}}
|
||||
>
|
||||
{downloading ? (
|
||||
<Loader2 className="size-3.5 animate-spin" />
|
||||
) : (
|
||||
<span
|
||||
className={`flex items-center gap-1.5 ${downloading ? "opacity-0" : ""}`}
|
||||
>
|
||||
<Download className="size-3.5" />
|
||||
)}
|
||||
{downloading ? "Preparing..." : "Download .md"}
|
||||
Download .md
|
||||
</span>
|
||||
{downloading && <Spinner size="sm" className="absolute" />}
|
||||
</Button>
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import {
|
|||
ChevronLeft,
|
||||
ChevronRight,
|
||||
ChevronUp,
|
||||
Edit3,
|
||||
Pencil,
|
||||
ImageIcon,
|
||||
Layers,
|
||||
Plus,
|
||||
|
|
@ -320,6 +320,30 @@ export function ModelSelector({
|
|||
[isMobile]
|
||||
);
|
||||
|
||||
const scrollProviderSidebar = useCallback(
|
||||
(direction: "backward" | "forward") => {
|
||||
const el = providerSidebarRef.current;
|
||||
if (!el) return;
|
||||
const delta = isMobile
|
||||
? Math.max(56, Math.floor(el.clientWidth * 0.5))
|
||||
: Math.max(44, Math.floor(el.clientHeight * 0.4));
|
||||
|
||||
if (isMobile) {
|
||||
el.scrollBy({
|
||||
left: direction === "backward" ? -delta : delta,
|
||||
behavior: "smooth",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
el.scrollBy({
|
||||
top: direction === "backward" ? -delta : delta,
|
||||
behavior: "smooth",
|
||||
});
|
||||
},
|
||||
[isMobile]
|
||||
);
|
||||
|
||||
// Cmd/Ctrl+M shortcut (desktop only)
|
||||
useEffect(() => {
|
||||
if (isMobile) return;
|
||||
|
|
@ -716,17 +740,40 @@ export function ModelSelector({
|
|||
return (
|
||||
<div
|
||||
className={cn(
|
||||
"shrink-0 border-border/50 flex",
|
||||
isMobile ? "flex-row items-center border-b border-border/40" : "flex-col w-10 border-r"
|
||||
"shrink-0 border-border/50 flex relative",
|
||||
isMobile
|
||||
? "flex-row items-center border-b border-border/40"
|
||||
: "flex-col w-10 border-r"
|
||||
)}
|
||||
>
|
||||
{!isMobile && sidebarScrollPos !== "top" && (
|
||||
<div className="flex items-center justify-center py-0.5 pointer-events-none">
|
||||
<ChevronUp className="size-3 text-muted-foreground" />
|
||||
{!isMobile && (
|
||||
<div
|
||||
className={cn(
|
||||
"absolute top-0 left-0 right-0 z-10 h-5 flex items-center justify-center transition-all duration-200 ease-out",
|
||||
sidebarScrollPos === "top"
|
||||
? "opacity-0 -translate-y-1 pointer-events-none"
|
||||
: "opacity-100 translate-y-0 pointer-events-auto"
|
||||
)}
|
||||
>
|
||||
<button
|
||||
type="button"
|
||||
aria-label="Scroll providers up"
|
||||
onClick={() => scrollProviderSidebar("backward")}
|
||||
className="flex h-4 w-4 items-center justify-center rounded-sm text-muted-foreground/90 hover:text-foreground hover:bg-accent/60 transition-colors"
|
||||
>
|
||||
<ChevronUp className="size-3" />
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
{isMobile && sidebarScrollPos !== "top" && (
|
||||
<div className="flex items-center justify-center px-0.5 shrink-0 pointer-events-none">
|
||||
{isMobile && (
|
||||
<div
|
||||
className={cn(
|
||||
"absolute left-0 top-0 bottom-0 z-10 w-5 flex items-center justify-center transition-all duration-200 ease-out pointer-events-none",
|
||||
sidebarScrollPos === "top"
|
||||
? "opacity-0 -translate-x-1"
|
||||
: "opacity-100 translate-x-0"
|
||||
)}
|
||||
>
|
||||
<ChevronLeft className="size-3 text-muted-foreground" />
|
||||
</div>
|
||||
)}
|
||||
|
|
@ -802,13 +849,34 @@ export function ModelSelector({
|
|||
);
|
||||
})}
|
||||
</div>
|
||||
{!isMobile && sidebarScrollPos !== "bottom" && (
|
||||
<div className="flex items-center justify-center py-0.5 pointer-events-none">
|
||||
<ChevronDown className="size-3 text-muted-foreground" />
|
||||
{!isMobile && (
|
||||
<div
|
||||
className={cn(
|
||||
"absolute bottom-0 left-0 right-0 z-10 h-5 flex items-center justify-center transition-all duration-200 ease-out",
|
||||
sidebarScrollPos === "bottom"
|
||||
? "opacity-0 translate-y-1 pointer-events-none"
|
||||
: "opacity-100 translate-y-0 pointer-events-auto"
|
||||
)}
|
||||
>
|
||||
<button
|
||||
type="button"
|
||||
aria-label="Scroll providers down"
|
||||
onClick={() => scrollProviderSidebar("forward")}
|
||||
className="flex h-4 w-4 items-center justify-center rounded-sm text-muted-foreground/90 hover:text-foreground hover:bg-accent/60 transition-colors"
|
||||
>
|
||||
<ChevronDown className="size-3" />
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
{isMobile && sidebarScrollPos !== "bottom" && (
|
||||
<div className="flex items-center justify-center px-0.5 shrink-0 pointer-events-none">
|
||||
{isMobile && (
|
||||
<div
|
||||
className={cn(
|
||||
"absolute right-0 top-0 bottom-0 z-10 w-5 flex items-center justify-center transition-all duration-200 ease-out pointer-events-none",
|
||||
sidebarScrollPos === "bottom"
|
||||
? "opacity-0 translate-x-1"
|
||||
: "opacity-100 translate-x-0"
|
||||
)}
|
||||
>
|
||||
<ChevronRight className="size-3 text-muted-foreground" />
|
||||
</div>
|
||||
)}
|
||||
|
|
@ -923,7 +991,7 @@ export function ModelSelector({
|
|||
className="size-7 rounded-md hover:bg-muted opacity-0 group-hover:opacity-100 transition-opacity"
|
||||
onClick={(e) => handleEditItem(e, item)}
|
||||
>
|
||||
<Edit3 className="size-3.5 text-muted-foreground" />
|
||||
<Pencil className="size-3.5 text-muted-foreground" />
|
||||
</Button>
|
||||
)}
|
||||
{isSelected && <Check className="size-4 text-primary shrink-0" />}
|
||||
|
|
|
|||
|
|
@ -79,8 +79,11 @@ export function PublicChatSnapshotRow({
|
|||
variant="ghost"
|
||||
size="icon"
|
||||
className={cn(
|
||||
"absolute right-0 h-6 w-6 shrink-0 hover:bg-transparent",
|
||||
dropdownOpen ? "opacity-100" : "sm:opacity-0 sm:group-hover:opacity-100"
|
||||
"absolute right-0 h-6 w-6 shrink-0",
|
||||
"hover:bg-accent",
|
||||
dropdownOpen
|
||||
? "opacity-100 bg-accent hover:bg-accent"
|
||||
: "sm:opacity-0 sm:group-hover:opacity-100"
|
||||
)}
|
||||
>
|
||||
<MoreHorizontal className="h-3.5 w-3.5 text-muted-foreground" />
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
import { ZoomInIcon, ZoomOutIcon } from "lucide-react";
|
||||
import type { PDFDocumentProxy, RenderTask } from "pdfjs-dist";
|
||||
import * as pdfjsLib from "pdfjs-dist";
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { type ReactNode, useCallback, useEffect, useRef, useState } from "react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Spinner } from "@/components/ui/spinner";
|
||||
import { getAuthHeaders } from "@/lib/auth-utils";
|
||||
|
|
@ -16,6 +16,8 @@ pdfjsLib.GlobalWorkerOptions.workerSrc = new URL(
|
|||
interface PdfViewerProps {
|
||||
pdfUrl: string;
|
||||
isPublic?: boolean;
|
||||
/** Extra actions rendered on the right side of the zoom toolbar (e.g. download, version switcher) */
|
||||
toolbarActions?: ReactNode;
|
||||
}
|
||||
|
||||
interface PageDimensions {
|
||||
|
|
@ -30,7 +32,7 @@ const PAGE_GAP = 12;
|
|||
const SCROLL_DEBOUNCE_MS = 30;
|
||||
const BUFFER_PAGES = 1;
|
||||
|
||||
export function PdfViewer({ pdfUrl, isPublic = false }: PdfViewerProps) {
|
||||
export function PdfViewer({ pdfUrl, isPublic = false, toolbarActions }: PdfViewerProps) {
|
||||
const [numPages, setNumPages] = useState(0);
|
||||
const [scale, setScale] = useState(1);
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
|
@ -286,29 +288,33 @@ export function PdfViewer({ pdfUrl, isPublic = false }: PdfViewerProps) {
|
|||
<div className="flex flex-col h-full">
|
||||
{numPages > 0 && (
|
||||
<div
|
||||
className={`flex items-center justify-center gap-2 px-4 py-2 border-b shrink-0 select-none ${isPublic ? "bg-main-panel" : "bg-sidebar"}`}
|
||||
className={`flex items-center px-4 py-2 border-b shrink-0 select-none ${isPublic ? "bg-main-panel" : "bg-sidebar"}`}
|
||||
>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
onClick={zoomOut}
|
||||
disabled={scale <= MIN_ZOOM}
|
||||
className="size-7"
|
||||
>
|
||||
<ZoomOutIcon className="size-4" />
|
||||
</Button>
|
||||
<span className="text-xs text-muted-foreground tabular-nums min-w-[40px] text-center">
|
||||
{Math.round(scale * 100)}%
|
||||
</span>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
onClick={zoomIn}
|
||||
disabled={scale >= MAX_ZOOM}
|
||||
className="size-7"
|
||||
>
|
||||
<ZoomInIcon className="size-4" />
|
||||
</Button>
|
||||
<div className="flex-1" aria-hidden="true" />
|
||||
<div className="flex items-center justify-center gap-2">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
onClick={zoomOut}
|
||||
disabled={scale <= MIN_ZOOM}
|
||||
className="size-7"
|
||||
>
|
||||
<ZoomOutIcon className="size-4" />
|
||||
</Button>
|
||||
<span className="text-xs text-muted-foreground tabular-nums min-w-[40px] text-center">
|
||||
{Math.round(scale * 100)}%
|
||||
</span>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
onClick={zoomIn}
|
||||
disabled={scale >= MAX_ZOOM}
|
||||
className="size-7"
|
||||
>
|
||||
<ZoomInIcon className="size-4" />
|
||||
</Button>
|
||||
</div>
|
||||
<div className="flex flex-1 items-center justify-end gap-1">{toolbarActions}</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
"use client";
|
||||
|
||||
import { useAtomValue, useSetAtom } from "jotai";
|
||||
import { Check, ChevronDownIcon, Copy, Pencil, XIcon } from "lucide-react";
|
||||
import { Check, ChevronDownIcon, Copy, Download, Pencil, XIcon } from "lucide-react";
|
||||
import dynamic from "next/dynamic";
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
|
|
@ -309,6 +309,7 @@ export function ReportPanelContent({
|
|||
const isResume = reportContent?.content_type === "typst";
|
||||
const showReportEditingTier = !isResume;
|
||||
const hasUnsavedChanges = editedMarkdown !== null;
|
||||
const showDesktopHeader = !!onClose;
|
||||
|
||||
const handleCancelEditing = useCallback(() => {
|
||||
setEditedMarkdown(null);
|
||||
|
|
@ -316,153 +317,177 @@ export function ReportPanelContent({
|
|||
setIsEditing(false);
|
||||
}, []);
|
||||
|
||||
const exportButton = !isEditing && (
|
||||
<>
|
||||
{isResume ? (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="size-6"
|
||||
onClick={() => handleExport("pdf")}
|
||||
disabled={isLoading || !reportContent?.content || exporting !== null}
|
||||
>
|
||||
{exporting === "pdf" ? <Spinner size="xs" /> : <Download className="size-3.5" />}
|
||||
<span className="sr-only">Download report</span>
|
||||
</Button>
|
||||
) : (
|
||||
<DropdownMenu modal={insideDrawer ? false : undefined}>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="size-6"
|
||||
disabled={isLoading || !reportContent?.content}
|
||||
>
|
||||
<Download className="size-3.5" />
|
||||
<span className="sr-only">Export report</span>
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent
|
||||
align="end"
|
||||
className={`min-w-[200px] select-none${insideDrawer ? " z-[100]" : ""}`}
|
||||
>
|
||||
<ExportDropdownItems
|
||||
onExport={handleExport}
|
||||
exporting={exporting}
|
||||
showAllFormats={!shareToken}
|
||||
/>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
|
||||
const versionSwitcher = !isEditing && versions.length > 1 && (
|
||||
<DropdownMenu modal={insideDrawer ? false : undefined}>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button variant="ghost" size="sm" className="h-6 gap-1 px-1.5 text-xs">
|
||||
v{activeVersionIndex + 1}
|
||||
<ChevronDownIcon className="size-3" />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent
|
||||
align="end"
|
||||
className={`min-w-[120px] select-none${insideDrawer ? " z-[100]" : ""}`}
|
||||
>
|
||||
{versions.map((v, i) => (
|
||||
<DropdownMenuItem
|
||||
key={v.id}
|
||||
onClick={() => setActiveReportId(v.id)}
|
||||
className={v.id === activeReportId ? "bg-accent font-medium" : ""}
|
||||
>
|
||||
Version {i + 1}
|
||||
</DropdownMenuItem>
|
||||
))}
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
);
|
||||
|
||||
const copyButton = !isEditing && showReportEditingTier && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="size-6"
|
||||
onClick={() => {
|
||||
void handleCopy();
|
||||
}}
|
||||
disabled={isLoading || !reportContent?.content}
|
||||
>
|
||||
{copied ? <Check className="size-3.5" /> : <Copy className="size-3.5" />}
|
||||
<span className="sr-only">{copied ? "Copied report content" : "Copy report content"}</span>
|
||||
</Button>
|
||||
);
|
||||
|
||||
const editingActions = showReportEditingTier &&
|
||||
!isReadOnly &&
|
||||
(isEditing ? (
|
||||
<>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-6 px-2 text-xs"
|
||||
onClick={handleCancelEditing}
|
||||
disabled={saving}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="sm"
|
||||
className="relative h-6 w-[56px] px-0 text-xs"
|
||||
onClick={async () => {
|
||||
const saveSucceeded = await handleSave();
|
||||
if (saveSucceeded) setIsEditing(false);
|
||||
}}
|
||||
disabled={saving || !hasUnsavedChanges}
|
||||
>
|
||||
<span className={saving ? "opacity-0" : ""}>Save</span>
|
||||
{saving && <Spinner size="xs" className="absolute" />}
|
||||
</Button>
|
||||
</>
|
||||
) : (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="size-6"
|
||||
onClick={() => {
|
||||
setEditedMarkdown(null);
|
||||
changeCountRef.current = 0;
|
||||
setIsEditing(true);
|
||||
}}
|
||||
>
|
||||
<Pencil className="size-3.5" />
|
||||
<span className="sr-only">Edit report</span>
|
||||
</Button>
|
||||
));
|
||||
|
||||
return (
|
||||
<>
|
||||
{/* Action bar — always visible; buttons are disabled while loading */}
|
||||
<div className="flex h-14 items-center justify-between px-4 shrink-0">
|
||||
<div className="flex items-center gap-2">
|
||||
{/* Export — plain button for resume (typst), dropdown for others */}
|
||||
{reportContent?.content_type === "typst" ? (
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() => handleExport("pdf")}
|
||||
disabled={isLoading || !reportContent?.content || exporting !== null}
|
||||
className={`h-8 min-w-[100px] px-3.5 py-4 text-[15px] ${isPublic ? "bg-main-panel" : "bg-sidebar"} select-none`}
|
||||
>
|
||||
{exporting === "pdf" ? <Spinner size="xs" /> : "Download"}
|
||||
</Button>
|
||||
) : (
|
||||
<DropdownMenu modal={insideDrawer ? false : undefined}>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
disabled={isLoading || !reportContent?.content}
|
||||
className={`h-8 px-3.5 py-4 text-[15px] gap-1.5 ${isPublic ? "bg-main-panel" : "bg-sidebar"} select-none`}
|
||||
>
|
||||
Export
|
||||
<ChevronDownIcon className="size-3" />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent
|
||||
align="start"
|
||||
className={`min-w-[200px] select-none${insideDrawer ? " z-[100]" : ""}`}
|
||||
>
|
||||
<ExportDropdownItems
|
||||
onExport={handleExport}
|
||||
exporting={exporting}
|
||||
showAllFormats={!shareToken}
|
||||
/>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
)}
|
||||
|
||||
{/* Version switcher — only shown when multiple versions exist */}
|
||||
{versions.length > 1 && (
|
||||
<DropdownMenu modal={insideDrawer ? false : undefined}>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
className={`h-8 px-3.5 py-4 text-[15px] gap-1.5 ${isPublic ? "bg-main-panel" : "bg-sidebar"} select-none`}
|
||||
>
|
||||
v{activeVersionIndex + 1}
|
||||
<ChevronDownIcon className="size-3" />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent
|
||||
align="start"
|
||||
className={`min-w-[120px] select-none${insideDrawer ? " z-[100]" : ""}`}
|
||||
>
|
||||
{versions.map((v, i) => (
|
||||
<DropdownMenuItem
|
||||
key={v.id}
|
||||
onClick={() => setActiveReportId(v.id)}
|
||||
className={v.id === activeReportId ? "bg-accent font-medium" : ""}
|
||||
>
|
||||
Version {i + 1}
|
||||
</DropdownMenuItem>
|
||||
))}
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
)}
|
||||
</div>
|
||||
{onClose && (
|
||||
<Button variant="ghost" size="icon" onClick={onClose} className="size-7 shrink-0">
|
||||
<XIcon className="size-4" />
|
||||
<span className="sr-only">Close report panel</span>
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{showReportEditingTier && (
|
||||
<div className="flex h-10 items-center justify-between gap-2 border-t border-b px-4 shrink-0">
|
||||
<div className="min-w-0 flex-1">
|
||||
<p className="truncate text-sm text-muted-foreground">
|
||||
{reportContent?.title || title}
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex items-center gap-1 shrink-0">
|
||||
{!isEditing && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="size-6"
|
||||
onClick={() => {
|
||||
void handleCopy();
|
||||
}}
|
||||
disabled={isLoading || !reportContent?.content}
|
||||
>
|
||||
{copied ? <Check className="size-3.5" /> : <Copy className="size-3.5" />}
|
||||
<span className="sr-only">
|
||||
{copied ? "Copied report content" : "Copy report content"}
|
||||
</span>
|
||||
{showDesktopHeader ? (
|
||||
<>
|
||||
{/* Header — matches the editor panel "File" header pattern */}
|
||||
<div className="flex h-14 items-center justify-between px-4 shrink-0">
|
||||
<h2 className="text-lg font-medium text-muted-foreground select-none">
|
||||
{isResume ? "Resume" : "Report"}
|
||||
</h2>
|
||||
{onClose && (
|
||||
<Button variant="ghost" size="icon" onClick={onClose} className="size-7 shrink-0">
|
||||
<XIcon className="size-4" />
|
||||
<span className="sr-only">Close report panel</span>
|
||||
</Button>
|
||||
)}
|
||||
{!isReadOnly &&
|
||||
(isEditing ? (
|
||||
<>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-6 px-2 text-xs"
|
||||
onClick={handleCancelEditing}
|
||||
disabled={saving}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="sm"
|
||||
className="relative h-6 w-[56px] px-0 text-xs"
|
||||
onClick={async () => {
|
||||
const saveSucceeded = await handleSave();
|
||||
if (saveSucceeded) setIsEditing(false);
|
||||
}}
|
||||
disabled={saving || !hasUnsavedChanges}
|
||||
>
|
||||
<span className={saving ? "opacity-0" : ""}>Save</span>
|
||||
{saving && <Spinner size="xs" className="absolute" />}
|
||||
</Button>
|
||||
</>
|
||||
) : (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="size-6"
|
||||
onClick={() => {
|
||||
setEditedMarkdown(null);
|
||||
changeCountRef.current = 0;
|
||||
setIsEditing(true);
|
||||
}}
|
||||
>
|
||||
<Pencil className="size-3.5" />
|
||||
<span className="sr-only">Edit report</span>
|
||||
</Button>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{!isResume && (
|
||||
<div className="flex h-10 items-center justify-between gap-2 border-t border-b px-4 shrink-0">
|
||||
<div className="min-w-0 flex-1">
|
||||
<p className="truncate text-sm text-muted-foreground">
|
||||
{reportContent?.title || title}
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex items-center gap-1 shrink-0">
|
||||
{versionSwitcher}
|
||||
{exportButton}
|
||||
{copyButton}
|
||||
{editingActions}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
!isResume && (
|
||||
<div className="flex h-14 items-center justify-between border-b px-4 shrink-0">
|
||||
<div className="flex-1 min-w-0">
|
||||
<h2 className="text-sm font-semibold truncate">{reportContent?.title || title}</h2>
|
||||
</div>
|
||||
<div className="flex items-center gap-1 shrink-0">
|
||||
{versionSwitcher}
|
||||
{exportButton}
|
||||
{copyButton}
|
||||
{editingActions}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
)}
|
||||
|
||||
{/* Report content — skeleton/error/viewer/editor shown only in this area */}
|
||||
|
|
@ -480,6 +505,12 @@ export function ReportPanelContent({
|
|||
<PdfViewer
|
||||
pdfUrl={`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}${shareToken ? `/api/v1/public/${shareToken}/reports/${activeReportId}/preview` : `/api/v1/reports/${activeReportId}/preview`}`}
|
||||
isPublic={isPublic}
|
||||
toolbarActions={
|
||||
<>
|
||||
{versionSwitcher}
|
||||
{exportButton}
|
||||
</>
|
||||
}
|
||||
/>
|
||||
) : reportContent.content ? (
|
||||
isReadOnly ? (
|
||||
|
|
|
|||
|
|
@ -4,10 +4,9 @@ import { useAtomValue } from "jotai";
|
|||
import {
|
||||
AlertCircle,
|
||||
Dot,
|
||||
Edit3,
|
||||
FileText,
|
||||
Info,
|
||||
MessageSquareQuote,
|
||||
Pencil,
|
||||
RefreshCw,
|
||||
Trash2,
|
||||
} from "lucide-react";
|
||||
|
|
@ -288,7 +287,7 @@ export function AgentModelManager({ searchSpaceId }: AgentModelManagerProps) {
|
|||
onClick={() => openEditDialog(config)}
|
||||
className="h-7 w-7 rounded-lg text-muted-foreground hover:text-foreground"
|
||||
>
|
||||
<Edit3 className="h-3 w-3" />
|
||||
<Pencil className="h-3 w-3" />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>Edit</TooltipContent>
|
||||
|
|
@ -323,7 +322,6 @@ export function AgentModelManager({ searchSpaceId }: AgentModelManagerProps) {
|
|||
variant="secondary"
|
||||
className="text-[10px] px-1.5 py-0.5 border-0 text-muted-foreground bg-muted"
|
||||
>
|
||||
<MessageSquareQuote className="h-2.5 w-2.5 mr-1" />
|
||||
Citations
|
||||
</Badge>
|
||||
)}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
"use client";
|
||||
|
||||
import { useAtomValue } from "jotai";
|
||||
import { AlertCircle, Dot, Edit3, Info, RefreshCw, Trash2 } from "lucide-react";
|
||||
import { AlertCircle, Dot, Info, Pencil, RefreshCw, Trash2 } from "lucide-react";
|
||||
import { useMemo, useState } from "react";
|
||||
import { deleteImageGenConfigMutationAtom } from "@/atoms/image-gen-config/image-gen-config-mutation.atoms";
|
||||
import {
|
||||
|
|
@ -116,8 +116,8 @@ export function ImageModelManager({ searchSpaceId }: ImageModelManagerProps) {
|
|||
|
||||
return (
|
||||
<div className="space-y-4 md:space-y-6">
|
||||
{/* Header */}
|
||||
<div className="flex flex-col space-y-4 sm:flex-row sm:items-center sm:justify-between sm:space-y-0">
|
||||
{/* Header actions */}
|
||||
<div className="flex items-center justify-between">
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="sm"
|
||||
|
|
@ -284,7 +284,7 @@ export function ImageModelManager({ searchSpaceId }: ImageModelManagerProps) {
|
|||
onClick={() => openEditDialog(config)}
|
||||
className="h-7 w-7 rounded-lg text-muted-foreground hover:text-foreground"
|
||||
>
|
||||
<Edit3 className="h-3 w-3" />
|
||||
<Pencil className="h-3 w-3" />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>Edit</TooltipContent>
|
||||
|
|
|
|||
|
|
@ -4,21 +4,25 @@ import { useQuery } from "@tanstack/react-query";
|
|||
import { useAtomValue } from "jotai";
|
||||
import {
|
||||
Bot,
|
||||
ChevronDown,
|
||||
Edit2,
|
||||
ChevronRight,
|
||||
ScanEye,
|
||||
Pencil,
|
||||
FileText,
|
||||
Globe,
|
||||
Earth,
|
||||
Image,
|
||||
Logs,
|
||||
type LucideIcon,
|
||||
MessageCircle,
|
||||
MessageCircleReply,
|
||||
MessageSquare,
|
||||
Mic,
|
||||
MoreHorizontal,
|
||||
Plug,
|
||||
Unplug,
|
||||
Settings,
|
||||
Shield,
|
||||
SlidersHorizontal,
|
||||
Trash2,
|
||||
Users,
|
||||
Video,
|
||||
} from "lucide-react";
|
||||
import { useCallback, useEffect, useMemo, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
|
|
@ -88,7 +92,7 @@ const CATEGORY_CONFIG: Record<
|
|||
},
|
||||
comments: {
|
||||
label: "Comments",
|
||||
icon: MessageCircle,
|
||||
icon: MessageCircleReply,
|
||||
description: "Add annotations to documents",
|
||||
order: 3,
|
||||
},
|
||||
|
|
@ -98,6 +102,24 @@ const CATEGORY_CONFIG: Record<
|
|||
description: "Configure AI model settings",
|
||||
order: 4,
|
||||
},
|
||||
image_generations: {
|
||||
label: "Image Models",
|
||||
icon: Image,
|
||||
description: "Configure image generation model settings",
|
||||
order: 4.1,
|
||||
},
|
||||
vision_configs: {
|
||||
label: "Vision Models",
|
||||
icon: ScanEye,
|
||||
description: "Configure vision model settings",
|
||||
order: 4.2,
|
||||
},
|
||||
video_presentations: {
|
||||
label: "Video Presentations",
|
||||
icon: Video,
|
||||
description: "Generate and manage video presentations",
|
||||
order: 4.3,
|
||||
},
|
||||
podcasts: {
|
||||
label: "Podcasts",
|
||||
icon: Mic,
|
||||
|
|
@ -105,8 +127,8 @@ const CATEGORY_CONFIG: Record<
|
|||
order: 5,
|
||||
},
|
||||
connectors: {
|
||||
label: "Integrations",
|
||||
icon: Plug,
|
||||
label: "Connectors",
|
||||
icon: Unplug,
|
||||
description: "Connect external data sources",
|
||||
order: 6,
|
||||
},
|
||||
|
|
@ -136,10 +158,16 @@ const CATEGORY_CONFIG: Record<
|
|||
},
|
||||
public_sharing: {
|
||||
label: "Public Chat Sharing",
|
||||
icon: Globe,
|
||||
icon: Earth,
|
||||
description: "Share chats publicly via links",
|
||||
order: 11,
|
||||
},
|
||||
general: {
|
||||
label: "General",
|
||||
icon: SlidersHorizontal,
|
||||
description: "General search space permissions",
|
||||
order: 12,
|
||||
},
|
||||
};
|
||||
|
||||
const ACTION_LABELS: Record<string, string> = {
|
||||
|
|
@ -434,12 +462,11 @@ function RolesContent({
|
|||
|
||||
return (
|
||||
<div key={role.id} className="rounded-lg border border-border/60 overflow-hidden">
|
||||
<div className="flex items-center gap-4 p-4 transition-colors hover:bg-muted/30">
|
||||
<button
|
||||
type="button"
|
||||
className="flex-1 min-w-0 text-left cursor-pointer"
|
||||
onClick={() => setExpandedRoleId(isExpanded ? null : role.id)}
|
||||
>
|
||||
<div
|
||||
className="flex items-center gap-4 p-4 transition-colors hover:bg-muted/30 cursor-pointer"
|
||||
onClick={() => setExpandedRoleId(isExpanded ? null : role.id)}
|
||||
>
|
||||
<div className="flex-1 min-w-0 text-left">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="font-medium text-sm">{role.name}</span>
|
||||
{role.is_system_role && (
|
||||
|
|
@ -458,14 +485,14 @@ function RolesContent({
|
|||
{role.description}
|
||||
</p>
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="shrink-0">
|
||||
<PermissionsBadge permissions={role.permissions} />
|
||||
</div>
|
||||
|
||||
{!role.is_system_role && (
|
||||
<div className="shrink-0" role="none">
|
||||
<div className="shrink-0" role="none" onClick={(e) => e.stopPropagation()}>
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button variant="ghost" size="icon" className="h-8 w-8">
|
||||
|
|
@ -475,7 +502,7 @@ function RolesContent({
|
|||
<DropdownMenuContent align="end" onCloseAutoFocus={(e) => e.preventDefault()}>
|
||||
{canUpdate && (
|
||||
<DropdownMenuItem onClick={() => setEditingRoleId(role.id)}>
|
||||
<Edit2 className="h-4 w-4 mr-2" />
|
||||
<Pencil className="h-4 w-4 mr-2" />
|
||||
Edit Role
|
||||
</DropdownMenuItem>
|
||||
)}
|
||||
|
|
@ -515,18 +542,14 @@ function RolesContent({
|
|||
</div>
|
||||
)}
|
||||
|
||||
<button
|
||||
type="button"
|
||||
className="shrink-0 p-1 cursor-pointer"
|
||||
onClick={() => setExpandedRoleId(isExpanded ? null : role.id)}
|
||||
>
|
||||
<ChevronDown
|
||||
<div className="shrink-0 p-1">
|
||||
<ChevronRight
|
||||
className={cn(
|
||||
"h-4 w-4 text-muted-foreground transition-transform duration-200",
|
||||
isExpanded && "rotate-180"
|
||||
isExpanded && "rotate-90"
|
||||
)}
|
||||
/>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{isExpanded && (
|
||||
|
|
@ -659,52 +682,30 @@ function PermissionsEditor({
|
|||
|
||||
return (
|
||||
<div key={category} className="rounded-lg border border-border/60 overflow-hidden">
|
||||
<div className="flex items-center justify-between px-3 py-2.5 hover:bg-muted/40 transition-colors">
|
||||
<button
|
||||
type="button"
|
||||
className="flex-1 flex items-center gap-2.5 cursor-pointer"
|
||||
onClick={() => toggleCategoryExpanded(category)}
|
||||
>
|
||||
<div
|
||||
className="flex items-center justify-between px-3 py-2.5 hover:bg-muted/40 transition-colors cursor-pointer"
|
||||
onClick={() => toggleCategoryExpanded(category)}
|
||||
>
|
||||
<div className="flex-1 flex items-center gap-2.5">
|
||||
<IconComponent className="h-4 w-4 text-muted-foreground shrink-0" />
|
||||
<span className="font-medium text-sm">{config.label}</span>
|
||||
<span className="text-[11px] text-muted-foreground tabular-nums">
|
||||
{stats.selected}/{stats.total}
|
||||
</span>
|
||||
</button>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<Checkbox
|
||||
checked={stats.allSelected}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
onCheckedChange={() => onToggleCategory(category)}
|
||||
aria-label={`Select all ${config.label} permissions`}
|
||||
/>
|
||||
<button
|
||||
type="button"
|
||||
className="cursor-pointer"
|
||||
onClick={() => toggleCategoryExpanded(category)}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
"transition-transform duration-200",
|
||||
isExpanded && "rotate-180"
|
||||
)}
|
||||
>
|
||||
<svg
|
||||
className="h-4 w-4 text-muted-foreground"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke="currentColor"
|
||||
aria-hidden="true"
|
||||
>
|
||||
<title>Toggle</title>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
strokeWidth={2}
|
||||
d="M19 9l-7 7-7-7"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
</button>
|
||||
<ChevronRight
|
||||
className={cn(
|
||||
"h-4 w-4 text-muted-foreground transition-transform duration-200",
|
||||
isExpanded && "rotate-90"
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
@ -726,7 +727,7 @@ function PermissionsEditor({
|
|||
>
|
||||
<button
|
||||
type="button"
|
||||
className="flex-1 min-w-0 text-left cursor-pointer"
|
||||
className="flex-1 min-w-0 text-left cursor-pointer focus:outline-none focus-visible:outline-none"
|
||||
onClick={() => onTogglePermission(perm.value)}
|
||||
>
|
||||
<span className="text-sm font-medium">{actionLabel}</span>
|
||||
|
|
@ -855,7 +856,8 @@ function CreateRoleDialog({
|
|||
type="button"
|
||||
onClick={() => applyPreset(key as keyof typeof ROLE_PRESETS)}
|
||||
className={cn(
|
||||
"p-3 rounded-lg border text-left transition-colors hover:bg-muted/40",
|
||||
"p-3 rounded-lg border transition-colors hover:bg-muted/40",
|
||||
"flex items-center justify-center text-center sm:block sm:text-left",
|
||||
selectedPermissions.length > 0 &&
|
||||
preset.permissions.every((p) => selectedPermissions.includes(p))
|
||||
? "border-foreground/30 bg-muted/40"
|
||||
|
|
@ -863,7 +865,7 @@ function CreateRoleDialog({
|
|||
)}
|
||||
>
|
||||
<span className="font-medium text-sm">{preset.name}</span>
|
||||
<p className="text-xs text-muted-foreground mt-0.5 line-clamp-2">
|
||||
<p className="hidden sm:block text-xs text-muted-foreground mt-0.5 line-clamp-2">
|
||||
{preset.description}
|
||||
</p>
|
||||
</button>
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
"use client";
|
||||
|
||||
import { useAtomValue } from "jotai";
|
||||
import { AlertCircle, Dot, Edit3, Info, RefreshCw, Trash2 } from "lucide-react";
|
||||
import { AlertCircle, Dot, Info, Pencil, RefreshCw, Trash2 } from "lucide-react";
|
||||
import { useMemo, useState } from "react";
|
||||
import { membersAtom, myAccessAtom } from "@/atoms/members/members-query.atoms";
|
||||
import { deleteVisionLLMConfigMutationAtom } from "@/atoms/vision-llm-config/vision-llm-config-mutation.atoms";
|
||||
|
|
@ -121,7 +121,7 @@ export function VisionModelManager({ searchSpaceId }: VisionModelManagerProps) {
|
|||
|
||||
return (
|
||||
<div className="space-y-4 md:space-y-6">
|
||||
<div className="flex flex-col space-y-4 sm:flex-row sm:items-center sm:justify-between sm:space-y-0">
|
||||
<div className="flex items-center justify-between">
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="sm"
|
||||
|
|
@ -282,7 +282,7 @@ export function VisionModelManager({ searchSpaceId }: VisionModelManagerProps) {
|
|||
onClick={() => openEditDialog(config)}
|
||||
className="h-6 w-6 text-muted-foreground hover:text-foreground"
|
||||
>
|
||||
<Edit3 className="h-3 w-3" />
|
||||
<Pencil className="h-3 w-3" />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>Edit</TooltipContent>
|
||||
|
|
|
|||
|
|
@ -764,22 +764,16 @@ export function DocumentUploadTab({
|
|||
</div>
|
||||
|
||||
<Button
|
||||
className="w-full"
|
||||
className="w-full relative"
|
||||
onClick={handleUpload}
|
||||
disabled={isAnyUploading || fileCount === 0}
|
||||
>
|
||||
{isAnyUploading ? (
|
||||
<span className="flex items-center gap-2">
|
||||
<Spinner size="sm" />
|
||||
{t("uploading")}
|
||||
</span>
|
||||
) : (
|
||||
<span className="flex items-center gap-2">
|
||||
{folderUpload
|
||||
? t("upload_folder_button", { count: fileCount })
|
||||
: t("upload_button", { count: fileCount })}
|
||||
</span>
|
||||
)}
|
||||
<span className={isAnyUploading ? "opacity-0" : ""}>
|
||||
{folderUpload
|
||||
? t("upload_folder_button", { count: fileCount })
|
||||
: t("upload_button", { count: fileCount })}
|
||||
</span>
|
||||
{isAnyUploading && <Spinner size="sm" className="absolute" />}
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
|
|
|||
|
|
@ -137,10 +137,9 @@ function ReportCard({
|
|||
const autoOpenedRef = useRef(false);
|
||||
const [metadata, setMetadata] = useState<{
|
||||
title: string;
|
||||
wordCount: number | null;
|
||||
versionLabel: string | null;
|
||||
content: string | null;
|
||||
}>({ title, wordCount: wordCount ?? null, versionLabel: null, content: null });
|
||||
}>({ title, versionLabel: null, content: null });
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
|
|
@ -169,10 +168,8 @@ function ReportCard({
|
|||
}
|
||||
}
|
||||
const resolvedTitle = parsed.data.title || title;
|
||||
const resolvedWordCount = parsed.data.report_metadata?.word_count ?? wordCount ?? null;
|
||||
setMetadata({
|
||||
title: resolvedTitle,
|
||||
wordCount: resolvedWordCount,
|
||||
versionLabel,
|
||||
content: parsed.data.content ?? null,
|
||||
});
|
||||
|
|
@ -182,7 +179,7 @@ function ReportCard({
|
|||
openPanel({
|
||||
reportId,
|
||||
title: resolvedTitle,
|
||||
wordCount: resolvedWordCount ?? undefined,
|
||||
wordCount: parsed.data.report_metadata?.word_count ?? wordCount ?? undefined,
|
||||
shareToken,
|
||||
});
|
||||
}
|
||||
|
|
@ -210,7 +207,6 @@ function ReportCard({
|
|||
openPanel({
|
||||
reportId,
|
||||
title: metadata.title,
|
||||
wordCount: metadata.wordCount ?? undefined,
|
||||
shareToken,
|
||||
});
|
||||
};
|
||||
|
|
@ -233,10 +229,8 @@ function ReportCard({
|
|||
<span className="inline-block h-3 w-24 rounded bg-muted/60 animate-pulse" />
|
||||
) : (
|
||||
<>
|
||||
{metadata.wordCount != null && `${metadata.wordCount.toLocaleString()} words`}
|
||||
{metadata.wordCount != null && metadata.versionLabel && (
|
||||
<Dot className="inline size-4" />
|
||||
)}
|
||||
Markdown
|
||||
{metadata.versionLabel && <Dot className="inline size-4" />}
|
||||
{metadata.versionLabel}
|
||||
</>
|
||||
)}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
import type { ToolCallMessagePartProps } from "@assistant-ui/react";
|
||||
import { useAtomValue, useSetAtom } from "jotai";
|
||||
import { Dot } from "lucide-react";
|
||||
import { useParams, usePathname } from "next/navigation";
|
||||
import * as pdfjsLib from "pdfjs-dist";
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
|
|
@ -9,6 +10,7 @@ import { z } from "zod";
|
|||
import { openReportPanelAtom, reportPanelAtom } from "@/atoms/chat/report-panel.atom";
|
||||
import { TextShimmerLoader } from "@/components/prompt-kit/loader";
|
||||
import { useMediaQuery } from "@/hooks/use-media-query";
|
||||
import { baseApiService } from "@/lib/apis/base-api.service";
|
||||
import { getAuthHeaders } from "@/lib/auth-utils";
|
||||
|
||||
pdfjsLib.GlobalWorkerOptions.workerSrc = new URL(
|
||||
|
|
@ -32,6 +34,18 @@ const GenerateResumeResultSchema = z.object({
|
|||
error: z.string().nullish(),
|
||||
});
|
||||
|
||||
const ResumeVersionsResponseSchema = z.object({
|
||||
id: z.number(),
|
||||
versions: z
|
||||
.array(
|
||||
z.object({
|
||||
id: z.number(),
|
||||
created_at: z.string().nullish(),
|
||||
})
|
||||
)
|
||||
.nullish(),
|
||||
});
|
||||
|
||||
type GenerateResumeArgs = z.infer<typeof GenerateResumeArgsSchema>;
|
||||
type GenerateResumeResult = z.infer<typeof GenerateResumeResultSchema>;
|
||||
|
||||
|
|
@ -201,6 +215,7 @@ function ResumeCard({
|
|||
const autoOpenedRef = useRef(false);
|
||||
const [pdfUrl, setPdfUrl] = useState<string | null>(null);
|
||||
const [thumbState, setThumbState] = useState<"loading" | "ready" | "error">("loading");
|
||||
const [versionLabel, setVersionLabel] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const previewPath = shareToken
|
||||
|
|
@ -219,6 +234,35 @@ function ResumeCard({
|
|||
}
|
||||
}, [reportId, title, shareToken, autoOpen, isDesktop, openPanel]);
|
||||
|
||||
useEffect(() => {
|
||||
let cancelled = false;
|
||||
const fetchVersions = async () => {
|
||||
try {
|
||||
const url = shareToken
|
||||
? `/api/v1/public/${shareToken}/reports/${reportId}/content`
|
||||
: `/api/v1/reports/${reportId}/content`;
|
||||
const rawData = await baseApiService.get<unknown>(url);
|
||||
if (cancelled) return;
|
||||
const parsed = ResumeVersionsResponseSchema.safeParse(rawData);
|
||||
if (parsed.success) {
|
||||
const versions = parsed.data.versions;
|
||||
if (versions && versions.length > 1) {
|
||||
const idx = versions.findIndex((v) => v.id === reportId);
|
||||
if (idx >= 0) {
|
||||
setVersionLabel(`version ${idx + 1}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// silently ignore — version label is non-critical
|
||||
}
|
||||
};
|
||||
fetchVersions();
|
||||
return () => {
|
||||
cancelled = true;
|
||||
};
|
||||
}, [reportId, shareToken]);
|
||||
|
||||
const onThumbLoad = useCallback(() => setThumbState("ready"), []);
|
||||
const onThumbError = useCallback(() => setThumbState("error"), []);
|
||||
|
||||
|
|
@ -243,8 +287,12 @@ function ResumeCard({
|
|||
className="w-full text-left transition-colors hover:bg-muted/50 focus:outline-none focus-visible:outline-none cursor-pointer select-none"
|
||||
>
|
||||
<div className="px-5 pt-5 pb-4">
|
||||
<p className="text-base font-semibold text-foreground line-clamp-2">{title}</p>
|
||||
<p className="text-sm text-muted-foreground mt-0.5">PDF</p>
|
||||
<p className="text-sm font-semibold text-foreground line-clamp-2">{title}</p>
|
||||
<p className="text-xs text-muted-foreground mt-0.5">
|
||||
PDF
|
||||
{versionLabel && <Dot className="inline size-4" />}
|
||||
{versionLabel}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="mx-5 h-px bg-border/50" />
|
||||
|
|
|
|||
|
|
@ -105,7 +105,7 @@ Connect SurfSense to your favorite tools and services. Browse the available inte
|
|||
/>
|
||||
<Card
|
||||
title="Obsidian"
|
||||
description="Connect your Obsidian vault to SurfSense"
|
||||
description="Sync your Obsidian vault using the SurfSense plugin"
|
||||
href="/docs/connectors/obsidian"
|
||||
/>
|
||||
<Card
|
||||
|
|
|
|||
|
|
@ -1,143 +1,73 @@
|
|||
---
|
||||
title: Obsidian
|
||||
description: Connect your Obsidian vault to SurfSense
|
||||
description: Sync your Obsidian vault with the SurfSense plugin
|
||||
---
|
||||
|
||||
# Obsidian Integration Setup Guide
|
||||
# Obsidian Plugin Setup Guide
|
||||
|
||||
This guide walks you through connecting your Obsidian vault to SurfSense for note search and AI-powered insights.
|
||||
|
||||
<Callout type="warn">
|
||||
This connector requires direct file system access and only works with self-hosted SurfSense installations.
|
||||
</Callout>
|
||||
SurfSense integrates with Obsidian through the SurfSense Obsidian plugin.
|
||||
|
||||
## How it works
|
||||
|
||||
The Obsidian connector scans your local Obsidian vault directory and indexes all Markdown files. It preserves your note structure and extracts metadata from YAML frontmatter.
|
||||
The plugin runs inside your Obsidian app and pushes note updates to SurfSense over HTTPS.
|
||||
This works for cloud and self-hosted deployments, including desktop and mobile clients.
|
||||
|
||||
- For follow-up indexing runs, the connector uses content hashing to skip unchanged files for faster sync.
|
||||
- Indexing should be configured to run periodically, so updates should appear in your search results within minutes.
|
||||
|
||||
---
|
||||
|
||||
## What Gets Indexed
|
||||
## What gets indexed
|
||||
|
||||
| Content Type | Description |
|
||||
|--------------|-------------|
|
||||
| Markdown Files | All `.md` files in your vault |
|
||||
| Frontmatter | YAML metadata (title, tags, aliases, dates) |
|
||||
| Wiki Links | Links between notes (`[[note]]`) |
|
||||
| Inline Tags | Tags throughout your notes (`#tag`) |
|
||||
| Note Content | Full content with intelligent chunking |
|
||||
| Markdown files | Note content (`.md`) |
|
||||
| Frontmatter | YAML metadata like title, tags, aliases, dates |
|
||||
| Wiki links | Linked notes (`[[note]]`) |
|
||||
| Tags | Inline and frontmatter tags |
|
||||
| Vault metadata | Vault and path metadata used for deep links and sync state |
|
||||
|
||||
## Quick start
|
||||
|
||||
1. Open **Connectors** in SurfSense and choose **Obsidian**.
|
||||
2. Install the plugin (recommended via BRAT) using the steps below.
|
||||
3. In Obsidian, open **Settings → SurfSense**.
|
||||
4. Paste your SurfSense API token from the user settings section.
|
||||
5. Paste your Server URL in the plugin setting: either your SurfSense main domain (if `/api/v1` rewrites are enabled) or your direct backend URL.
|
||||
6. Choose the Search Space in the plugin, then the first sync should run automatically.
|
||||
7. Confirm the connector appears as **Obsidian - <vault>** in SurfSense.
|
||||
|
||||
## Install via BRAT (recommended)
|
||||
|
||||
1. In Obsidian, open **Settings → Community plugins** and install **[BRAT](obsidian://show-plugin?id=obsidian42-brat)**.
|
||||
2. Open BRAT settings and click **Add beta plugin** button.
|
||||
3. Paste the repository: `https://github.com/MODSetter/SurfSense/`.
|
||||
4. Select the latest plugin version, then click "Add plugin".
|
||||
5. Open **Settings → SurfSense** to finish setup.
|
||||
|
||||
## Migrating from the legacy connector
|
||||
|
||||
If you previously used the legacy Obsidian connector architecture, migrate to the plugin flow:
|
||||
|
||||
1. Delete the old legacy Obsidian connector from SurfSense.
|
||||
2. Install and configure the SurfSense Obsidian plugin using the quick start above.
|
||||
3. Run the first plugin sync and verify the new **Obsidian - <vault>** connector is active.
|
||||
|
||||
<Callout type="warn">
|
||||
Binary files and attachments are not indexed by default. Enable "Include Attachments" to index embedded files.
|
||||
Deleting the legacy connector also deletes all documents that were indexed by that connector. Always finish and verify plugin sync before deleting the old connector.
|
||||
</Callout>
|
||||
|
||||
---
|
||||
|
||||
## Quick Start (Local Installation)
|
||||
|
||||
1. Navigate to **Connectors** → **Add Connector** → **Obsidian**
|
||||
2. Enter your vault path: `/Users/yourname/Documents/MyVault`
|
||||
3. Enter a vault name (e.g., `Personal Notes`)
|
||||
4. Click **Connect Obsidian**
|
||||
|
||||
<Callout type="info">
|
||||
Find your vault path: In Obsidian, right-click any note → "Reveal in Finder" (macOS) or "Show in Explorer" (Windows).
|
||||
</Callout>
|
||||
|
||||
<Callout type="info" title="Periodic Sync">
|
||||
Enable periodic sync to automatically re-index notes when content changes. Available frequencies: Every 5 minutes, 15 minutes, hourly, every 6 hours, daily, or weekly.
|
||||
</Callout>
|
||||
|
||||
---
|
||||
|
||||
## Docker Setup
|
||||
|
||||
For Docker deployments, you need to mount your Obsidian vault as a volume.
|
||||
|
||||
### Step 1: Update docker-compose.yml
|
||||
|
||||
Add your vault as a volume mount to the SurfSense backend service:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
surfsense:
|
||||
# ... other config
|
||||
volumes:
|
||||
- /path/to/your/obsidian/vault:/app/obsidian_vaults/my-vault:ro
|
||||
```
|
||||
|
||||
<Callout type="info">
|
||||
The `:ro` flag mounts the vault as read-only, which is recommended for security.
|
||||
</Callout>
|
||||
|
||||
### Step 2: Configure the Connector
|
||||
|
||||
Use the **container path** (not your local path) when setting up the connector:
|
||||
|
||||
| Your Local Path | Container Path (use this) |
|
||||
|-----------------|---------------------------|
|
||||
| `/Users/john/Documents/MyVault` | `/app/obsidian_vaults/my-vault` |
|
||||
| `C:\Users\john\Documents\MyVault` | `/app/obsidian_vaults/my-vault` |
|
||||
|
||||
### Example: Multiple Vaults
|
||||
|
||||
```yaml
|
||||
volumes:
|
||||
- /Users/john/Documents/PersonalNotes:/app/obsidian_vaults/personal:ro
|
||||
- /Users/john/Documents/WorkNotes:/app/obsidian_vaults/work:ro
|
||||
```
|
||||
|
||||
Then create separate connectors for each vault using `/app/obsidian_vaults/personal` and `/app/obsidian_vaults/work`.
|
||||
|
||||
---
|
||||
|
||||
## Connector Configuration
|
||||
|
||||
| Field | Description | Required |
|
||||
|-------|-------------|----------|
|
||||
| **Connector Name** | A friendly name to identify this connector | Yes |
|
||||
| **Vault Path** | Absolute path to your vault (container path for Docker) | Yes |
|
||||
| **Vault Name** | Display name for your vault in search results | Yes |
|
||||
| **Exclude Folders** | Comma-separated folder names to skip | No |
|
||||
| **Include Attachments** | Index embedded files (images, PDFs) | No |
|
||||
|
||||
---
|
||||
|
||||
## Recommended Exclusions
|
||||
|
||||
Common folders to exclude from indexing:
|
||||
|
||||
| Folder | Reason |
|
||||
|--------|--------|
|
||||
| `.obsidian` | Obsidian config files (always exclude) |
|
||||
| `.trash` | Obsidian's trash folder |
|
||||
| `templates` | Template files you don't want searchable |
|
||||
| `daily-notes` | If you want to exclude daily notes |
|
||||
| `attachments` | If not using "Include Attachments" |
|
||||
|
||||
Default exclusions: `.obsidian,.trash`
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Vault not found / Permission denied**
|
||||
- Verify the path exists and is accessible
|
||||
- For Docker: ensure the volume is mounted correctly in `docker-compose.yml`
|
||||
- Check file permissions: SurfSense needs read access to the vault directory
|
||||
**Plugin connects but no files appear**
|
||||
- Verify the plugin is pointed to the correct Search Space.
|
||||
- Trigger a manual sync from the plugin settings.
|
||||
- Confirm your API token is valid and not expired.
|
||||
|
||||
**No notes indexed**
|
||||
- Ensure your vault contains `.md` files
|
||||
- Check that notes aren't in excluded folders
|
||||
- Verify the path points to the vault root (contains `.obsidian` folder)
|
||||
**Self-hosted URL issues**
|
||||
- Use a public or LAN backend URL that your Obsidian device can reach.
|
||||
- If your instance is behind TLS, ensure the URL/certificate is valid for the device running Obsidian.
|
||||
|
||||
**Changes not appearing**
|
||||
- Wait for the next sync cycle, or manually trigger re-indexing
|
||||
- For Docker: restart the container if you modified volume mounts
|
||||
**Unauthorized / 401 errors**
|
||||
- Regenerate and paste a fresh API token from SurfSense.
|
||||
- Ensure the token belongs to the same account and workspace you are syncing into.
|
||||
|
||||
**Docker: "path not found" error**
|
||||
- Use the container path (`/app/obsidian_vaults/...`), not your local path
|
||||
- Verify the volume mount in `docker-compose.yml` matches
|
||||
**Cannot reach server URL**
|
||||
- Check that the backend URL is reachable from the Obsidian device.
|
||||
- For self-hosted setups, verify firewall and reverse proxy rules.
|
||||
- Avoid using localhost unless SurfSense and Obsidian run on the same machine.
|
||||
|
|
|
|||
|
|
@ -427,6 +427,19 @@ class ConnectorsApiService {
|
|||
body: { tool_name: toolName },
|
||||
});
|
||||
};
|
||||
|
||||
/** Live stats for the Obsidian connector tile. */
|
||||
getObsidianStats = async (vaultId: string): Promise<ObsidianStats> => {
|
||||
return baseApiService.get<ObsidianStats>(
|
||||
`/api/v1/obsidian/stats?vault_id=${encodeURIComponent(vaultId)}`
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
export interface ObsidianStats {
|
||||
vault_id: string;
|
||||
files_synced: number;
|
||||
last_sync_at: string | null;
|
||||
}
|
||||
|
||||
export type { SlackChannel, DiscordChannel };
|
||||
|
|
|
|||
3
versions.json
Normal file
3
versions.json
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
{
|
||||
"0.1.0": "1.5.4"
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue