diff --git a/.github/workflows/obsidian-plugin-lint.yml b/.github/workflows/obsidian-plugin-lint.yml new file mode 100644 index 000000000..42bd099b1 --- /dev/null +++ b/.github/workflows/obsidian-plugin-lint.yml @@ -0,0 +1,39 @@ +name: Obsidian Plugin Lint + +# Lints + type-checks + builds the Obsidian plugin on every push/PR that +# touches its sources. The official obsidian-sample-plugin template ships +# its own ESLint+esbuild setup; we run that here instead of folding the +# plugin into the monorepo's Biome-based code-quality.yml so the tooling +# stays aligned with what `obsidianmd/eslint-plugin-obsidianmd` checks +# against. + +on: + push: + branches: ["**"] + paths: + - "surfsense_obsidian/**" + - ".github/workflows/obsidian-plugin-lint.yml" + pull_request: + branches: ["**"] + paths: + - "surfsense_obsidian/**" + - ".github/workflows/obsidian-plugin-lint.yml" + +jobs: + lint: + runs-on: ubuntu-latest + defaults: + run: + working-directory: surfsense_obsidian + steps: + - uses: actions/checkout@v6 + + - uses: actions/setup-node@v6 + with: + node-version: 22.x + cache: npm + cache-dependency-path: surfsense_obsidian/package-lock.json + + - run: npm ci + - run: npm run lint + - run: npm run build diff --git a/.github/workflows/release-obsidian-plugin.yml b/.github/workflows/release-obsidian-plugin.yml new file mode 100644 index 000000000..dfe15e7d6 --- /dev/null +++ b/.github/workflows/release-obsidian-plugin.yml @@ -0,0 +1,119 @@ +name: Release Obsidian Plugin + +# Tag format: `obsidian-v` and `` must match `surfsense_obsidian/manifest.json` exactly. +on: + push: + tags: + - "obsidian-v*" + workflow_dispatch: + inputs: + publish: + description: "Publish to GitHub Releases" + required: true + type: choice + options: + - never + - always + default: "never" + +permissions: + contents: write + +jobs: + build-and-release: + runs-on: ubuntu-latest + defaults: + run: + working-directory: surfsense_obsidian + + steps: + - uses: actions/checkout@v6 + with: + # Need write access for the manifest/versions.json mirror commit + # back to main further down. + fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} + + - uses: actions/setup-node@v6 + with: + node-version: 22.x + cache: npm + cache-dependency-path: surfsense_obsidian/package-lock.json + + - name: Resolve plugin version + id: version + run: | + manifest_version=$(node -p "require('./manifest.json').version") + if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then + # Manual runs derive the release version from manifest.json. + version="$manifest_version" + tag="obsidian-v$version" + else + tag="${GITHUB_REF_NAME}" + if [ -z "$tag" ] || [[ "$tag" != obsidian-v* ]]; then + echo "::error::Invalid tag '$tag'. Expected format: obsidian-v" + exit 1 + fi + version="${tag#obsidian-v}" + if [ "$version" != "$manifest_version" ]; then + echo "::error::Tag version '$version' does not match manifest version '$manifest_version'" + exit 1 + fi + fi + echo "tag=$tag" >> "$GITHUB_OUTPUT" + echo "version=$version" >> "$GITHUB_OUTPUT" + + - name: Resolve publish mode + id: release_mode + run: | + if [ "${{ github.event_name }}" = "push" ] || [ "${{ inputs.publish }}" = "always" ]; then + echo "should_publish=true" >> "$GITHUB_OUTPUT" + else + echo "should_publish=false" >> "$GITHUB_OUTPUT" + fi + + - run: npm ci + + - run: npm run lint + + - run: npm run build + + - name: Verify build artifacts + run: | + for f in main.js manifest.json styles.css; do + test -f "$f" || (echo "::error::Missing release artifact: $f" && exit 1) + done + + - name: Mirror manifest.json + versions.json to repo root + if: steps.release_mode.outputs.should_publish == 'true' + working-directory: ${{ github.workspace }} + run: | + cp surfsense_obsidian/manifest.json manifest.json + cp surfsense_obsidian/versions.json versions.json + if git diff --quiet manifest.json versions.json; then + echo "Root manifest/versions already up to date." + exit 0 + fi + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + git add manifest.json versions.json + git commit -m "chore(obsidian-plugin): mirror manifest+versions for ${{ steps.version.outputs.tag }}" + # Push to the default branch so Obsidian can fetch raw files from HEAD. + if ! git push origin HEAD:${{ github.event.repository.default_branch }}; then + echo "::warning::Failed to push mirrored manifest/versions to default branch (likely branch protection). Continuing release." + fi + + # Publish release under bare `manifest.json` version (no `obsidian-v` prefix) for BRAT/store compatibility. + # `make_latest: "false"` keeps the desktop app's `v*` release headlined since Obsidian and BRAT resolve plugins via getReleaseByTag, not the latest flag. + - name: Create GitHub release + if: steps.release_mode.outputs.should_publish == 'true' + uses: softprops/action-gh-release@v3 + with: + tag_name: ${{ steps.version.outputs.version }} + name: SurfSense Obsidian Plugin ${{ steps.version.outputs.version }} + generate_release_notes: true + make_latest: "false" + files: | + surfsense_obsidian/main.js + surfsense_obsidian/manifest.json + surfsense_obsidian/styles.css diff --git a/manifest.json b/manifest.json new file mode 100644 index 000000000..d03a5b650 --- /dev/null +++ b/manifest.json @@ -0,0 +1,10 @@ +{ + "id": "surfsense-obsidian", + "name": "SurfSense", + "version": "0.1.0", + "minAppVersion": "1.5.4", + "description": "Turn your vault into a searchable second brain with SurfSense.", + "author": "SurfSense", + "authorUrl": "https://www.surfsense.com", + "isDesktopOnly": false +} diff --git a/surfsense_backend/alembic/versions/129_obsidian_plugin_vault_identity.py b/surfsense_backend/alembic/versions/129_obsidian_plugin_vault_identity.py new file mode 100644 index 000000000..0c0e3dbe5 --- /dev/null +++ b/surfsense_backend/alembic/versions/129_obsidian_plugin_vault_identity.py @@ -0,0 +1,106 @@ +"""129_obsidian_plugin_vault_identity + +Revision ID: 129 +Revises: 128 +Create Date: 2026-04-21 + +Locks down vault identity for the Obsidian plugin connector: + +- Deactivates pre-plugin OBSIDIAN_CONNECTOR rows. +- Partial unique index on ``(user_id, (config->>'vault_id'))`` for the + ``/obsidian/connect`` upsert fast path. +- Partial unique index on ``(user_id, (config->>'vault_fingerprint'))`` + so two devices observing the same vault content can never produce + two connector rows. Collisions are caught by the route handler and + routed through the merge path. +""" + +from __future__ import annotations + +from collections.abc import Sequence + +import sqlalchemy as sa + +from alembic import op + +revision: str = "129" +down_revision: str | None = "128" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None + + +def upgrade() -> None: + conn = op.get_bind() + + conn.execute( + sa.text( + """ + UPDATE search_source_connectors + SET + is_indexable = false, + periodic_indexing_enabled = false, + next_scheduled_at = NULL, + config = COALESCE(config, '{}'::json)::jsonb + || jsonb_build_object( + 'legacy', true, + 'deactivated_at', to_char( + now() AT TIME ZONE 'UTC', + 'YYYY-MM-DD"T"HH24:MI:SS"Z"' + ) + ) + WHERE connector_type = 'OBSIDIAN_CONNECTOR' + AND COALESCE((config::jsonb)->>'source', '') <> 'plugin' + """ + ) + ) + + conn.execute( + sa.text( + """ + CREATE UNIQUE INDEX IF NOT EXISTS + search_source_connectors_obsidian_plugin_vault_uniq + ON search_source_connectors (user_id, ((config->>'vault_id'))) + WHERE connector_type = 'OBSIDIAN_CONNECTOR' + AND config->>'source' = 'plugin' + AND config->>'vault_id' IS NOT NULL + """ + ) + ) + + conn.execute( + sa.text( + """ + CREATE UNIQUE INDEX IF NOT EXISTS + search_source_connectors_obsidian_plugin_fingerprint_uniq + ON search_source_connectors (user_id, ((config->>'vault_fingerprint'))) + WHERE connector_type = 'OBSIDIAN_CONNECTOR' + AND config->>'source' = 'plugin' + AND config->>'vault_fingerprint' IS NOT NULL + """ + ) + ) + + +def downgrade() -> None: + conn = op.get_bind() + conn.execute( + sa.text( + "DROP INDEX IF EXISTS " + "search_source_connectors_obsidian_plugin_fingerprint_uniq" + ) + ) + conn.execute( + sa.text( + "DROP INDEX IF EXISTS search_source_connectors_obsidian_plugin_vault_uniq" + ) + ) + conn.execute( + sa.text( + """ + UPDATE search_source_connectors + SET config = (config::jsonb - 'legacy' - 'deactivated_at')::json + WHERE connector_type = 'OBSIDIAN_CONNECTOR' + AND (config::jsonb) ? 'legacy' + """ + ) + ) diff --git a/surfsense_backend/app/agents/new_chat/middleware/file_intent.py b/surfsense_backend/app/agents/new_chat/middleware/file_intent.py index 1e5fd0ede..4bf5dcfe4 100644 --- a/surfsense_backend/app/agents/new_chat/middleware/file_intent.py +++ b/surfsense_backend/app/agents/new_chat/middleware/file_intent.py @@ -109,37 +109,6 @@ def _sanitize_path_segment(value: str) -> str: return segment -def _infer_text_file_extension(user_text: str) -> str: - lowered = user_text.lower() - if any(token in lowered for token in ("json", ".json")): - return ".json" - if any(token in lowered for token in ("yaml", "yml", ".yaml", ".yml")): - return ".yaml" - if any(token in lowered for token in ("csv", ".csv")): - return ".csv" - if any(token in lowered for token in ("python", ".py")): - return ".py" - if any(token in lowered for token in ("typescript", ".ts", ".tsx")): - return ".ts" - if any(token in lowered for token in ("javascript", ".js", ".mjs", ".cjs")): - return ".js" - if any(token in lowered for token in ("html", ".html")): - return ".html" - if any(token in lowered for token in ("css", ".css")): - return ".css" - if any(token in lowered for token in ("sql", ".sql")): - return ".sql" - if any(token in lowered for token in ("toml", ".toml")): - return ".toml" - if any(token in lowered for token in ("ini", ".ini")): - return ".ini" - if any(token in lowered for token in ("xml", ".xml")): - return ".xml" - if any(token in lowered for token in ("markdown", ".md", "readme")): - return ".md" - return ".md" - - def _normalize_directory(value: str) -> str: raw = value.strip().replace("\\", "/") raw = raw.strip("/") @@ -193,7 +162,6 @@ def _fallback_path( suggested_path: str | None = None, user_text: str, ) -> str: - default_extension = _infer_text_file_extension(user_text) inferred_dir = _infer_directory_from_user_text(user_text) sanitized_filename = "" @@ -202,9 +170,9 @@ def _fallback_path( if sanitized_filename.lower().endswith(".txt"): sanitized_filename = f"{sanitized_filename[:-4]}.md" if not sanitized_filename: - sanitized_filename = f"notes{default_extension}" + sanitized_filename = "notes.md" elif "." not in sanitized_filename: - sanitized_filename = f"{sanitized_filename}{default_extension}" + sanitized_filename = f"{sanitized_filename}.md" normalized_suggested_path = ( _normalize_file_path(suggested_path) if suggested_path else "" diff --git a/surfsense_backend/app/agents/new_chat/middleware/filesystem.py b/surfsense_backend/app/agents/new_chat/middleware/filesystem.py index 1706e3705..8dfa89ef2 100644 --- a/surfsense_backend/app/agents/new_chat/middleware/filesystem.py +++ b/surfsense_backend/app/agents/new_chat/middleware/filesystem.py @@ -7,6 +7,7 @@ This middleware customizes prompts and persists write/edit operations for from __future__ import annotations import asyncio +import json import logging import re import secrets @@ -141,6 +142,31 @@ IMPORTANT: content. """ +SURFSENSE_MOVE_FILE_TOOL_DESCRIPTION = """Moves or renames a file or folder. + +Use absolute paths for both source and destination. + +Notes: +- In local-folder mode, paths should use mount prefixes (e.g., //foo.txt). +- Rename is a special case of move (same folder, different filename). +- Cross-mount moves are not supported. +""" + +SURFSENSE_LIST_TREE_TOOL_DESCRIPTION = """Lists files/folders recursively in a single bounded call. + +Use this in desktop local-folder mode to discover nested files at scale. + +Args: +- path: absolute mount-prefixed path (e.g., //src) or "/" for mount roots. +- max_depth: recursion depth limit (default 8). +- page_size: maximum number of entries returned (max 1000). +- include_files/include_dirs: filter returned entry types. + +Returns JSON with: +- entries: [{path, is_dir, size, modified_at, depth}] +- truncated: true when additional entries were omitted due to page_size +""" + SURFSENSE_GLOB_TOOL_DESCRIPTION = """Find files matching a glob pattern. Supports standard glob patterns: `*`, `**`, `?`. @@ -222,11 +248,14 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): ) if filesystem_mode == FilesystemMode.DESKTOP_LOCAL_FOLDER: system_prompt += ( + "\n- move_file: move or rename files/folders in local-folder mode." + "\n- list_tree: recursively list nested local paths in one bounded response." "\n\n## Local Folder Mode" "\n\nThis chat is running in desktop local-folder mode." " Keep all file operations local. Do not use save_document." " Always use mount-prefixed absolute paths like //file.ext." " If you are unsure which mounts are available, call ls('/') first." + " For big trees: use list_tree, then grep, then read_file." ) super().__init__( @@ -237,6 +266,8 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): "read_file": SURFSENSE_READ_FILE_TOOL_DESCRIPTION, "write_file": SURFSENSE_WRITE_FILE_TOOL_DESCRIPTION, "edit_file": SURFSENSE_EDIT_FILE_TOOL_DESCRIPTION, + "move_file": SURFSENSE_MOVE_FILE_TOOL_DESCRIPTION, + "list_tree": SURFSENSE_LIST_TREE_TOOL_DESCRIPTION, "glob": SURFSENSE_GLOB_TOOL_DESCRIPTION, "grep": SURFSENSE_GREP_TOOL_DESCRIPTION, }, @@ -244,6 +275,9 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): max_execute_timeout=self._MAX_EXECUTE_TIMEOUT, ) self.tools = [t for t in self.tools if t.name != "execute"] + if self._filesystem_mode == FilesystemMode.DESKTOP_LOCAL_FOLDER: + self.tools.append(self._create_move_file_tool()) + self.tools.append(self._create_list_tree_tool()) if self._should_persist_documents(): self.tools.append(self._create_save_document_tool()) if self._sandbox_available: @@ -776,35 +810,97 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): """Only cloud mode persists file content to Document/Chunk tables.""" return self._filesystem_mode == FilesystemMode.CLOUD - def _default_mount_prefix(self, runtime: ToolRuntime[None, FilesystemState]) -> str: - backend = self._get_backend(runtime) - if isinstance(backend, MultiRootLocalFolderBackend): - return f"/{backend.default_mount()}" - return "" + @staticmethod + def _normalize_absolute_path(candidate: str) -> str: + normalized = re.sub(r"/+", "/", candidate.strip().replace("\\", "/")) + if not normalized: + return "/" + if normalized.startswith("/"): + return normalized + return f"/{normalized.lstrip('/')}" + + @staticmethod + def _extract_mount_from_path(path: str, mounts: tuple[str, ...]) -> str | None: + rel = path.lstrip("/") + if not rel: + return None + mount, _, _ = rel.partition("/") + if mount in mounts: + return mount + return None + + @staticmethod + def _local_parent_path(path: str) -> str: + rel = path.lstrip("/") + if "/" not in rel: + return "/" + parent = rel.rsplit("/", 1)[0].strip("/") + if not parent: + return "/" + return f"/{parent}" + + @staticmethod + def _path_exists_under_mount( + backend: MultiRootLocalFolderBackend, + mount: str, + local_path: str, + ) -> bool: + result = backend.list_tree( + f"/{mount}{local_path}", + max_depth=0, + page_size=1, + include_files=True, + include_dirs=True, + ) + return not bool(result.get("error")) def _normalize_local_mount_path( - self, candidate: str, runtime: ToolRuntime[None, FilesystemState] + self, + candidate: str, + runtime: ToolRuntime[None, FilesystemState], ) -> str: + normalized = self._normalize_absolute_path(candidate) backend = self._get_backend(runtime) - mount_prefix = self._default_mount_prefix(runtime) - normalized_candidate = re.sub(r"/+", "/", candidate.strip().replace("\\", "/")) - if not mount_prefix or not isinstance(backend, MultiRootLocalFolderBackend): - if normalized_candidate.startswith("/"): - return normalized_candidate - return f"/{normalized_candidate.lstrip('/')}" + if not isinstance(backend, MultiRootLocalFolderBackend): + return normalized - mount_names = set(backend.list_mounts()) - if normalized_candidate.startswith("/"): - first_segment = normalized_candidate.lstrip("/").split("/", 1)[0] - if first_segment in mount_names: - return normalized_candidate - return f"{mount_prefix}{normalized_candidate}" + mounts = backend.list_mounts() + explicit_mount = self._extract_mount_from_path(normalized, mounts) + if explicit_mount: + return normalized - relative = normalized_candidate.lstrip("/") - first_segment = relative.split("/", 1)[0] - if first_segment in mount_names: - return f"/{relative}" - return f"{mount_prefix}/{relative}" + if len(mounts) == 1: + return f"/{mounts[0]}{normalized}" + + suggested_mount: str | None = None + contract = runtime.state.get("file_operation_contract") or {} + suggested_path = contract.get("suggested_path") + if isinstance(suggested_path, str) and suggested_path.strip(): + normalized_suggested = self._normalize_absolute_path(suggested_path) + suggested_mount = self._extract_mount_from_path(normalized_suggested, mounts) + + matching_mounts = [ + mount + for mount in mounts + if self._path_exists_under_mount(backend, mount, normalized) + ] + if len(matching_mounts) == 1: + return f"/{matching_mounts[0]}{normalized}" + + parent_path = self._local_parent_path(normalized) + if parent_path != "/": + parent_matching_mounts = [ + mount + for mount in mounts + if self._path_exists_under_mount(backend, mount, parent_path) + ] + if len(parent_matching_mounts) == 1: + return f"/{parent_matching_mounts[0]}{normalized}" + + if suggested_mount: + return f"/{suggested_mount}{normalized}" + + return f"/{backend.default_mount()}{normalized}" def _get_contract_suggested_path( self, runtime: ToolRuntime[None, FilesystemState] @@ -812,14 +908,7 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): contract = runtime.state.get("file_operation_contract") or {} suggested = contract.get("suggested_path") if isinstance(suggested, str) and suggested.strip(): - cleaned = suggested.strip() - if self._filesystem_mode == FilesystemMode.DESKTOP_LOCAL_FOLDER: - return self._normalize_local_mount_path(cleaned, runtime) - return cleaned - if self._filesystem_mode == FilesystemMode.DESKTOP_LOCAL_FOLDER: - mount_prefix = self._default_mount_prefix(runtime) - if mount_prefix: - return f"{mount_prefix}/notes.md" + return self._normalize_absolute_path(suggested) return "/notes.md" def _resolve_write_target_path( @@ -836,6 +925,34 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): return f"/{candidate.lstrip('/')}" return candidate + def _resolve_move_target_path( + self, + file_path: str, + runtime: ToolRuntime[None, FilesystemState], + ) -> str: + candidate = file_path.strip() + if not candidate: + return "" + if self._filesystem_mode == FilesystemMode.DESKTOP_LOCAL_FOLDER: + return self._normalize_local_mount_path(candidate, runtime) + if not candidate.startswith("/"): + return f"/{candidate.lstrip('/')}" + return candidate + + def _resolve_list_target_path( + self, + path: str, + runtime: ToolRuntime[None, FilesystemState], + ) -> str: + candidate = path.strip() or "/" + if candidate == "/": + return "/" + if self._filesystem_mode == FilesystemMode.DESKTOP_LOCAL_FOLDER: + return self._normalize_local_mount_path(candidate, runtime) + if not candidate.startswith("/"): + return f"/{candidate.lstrip('/')}" + return candidate + @staticmethod def _is_error_text(value: str) -> bool: return value.startswith("Error:") @@ -930,6 +1047,246 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): ) return None, updated_content + def _create_move_file_tool(self) -> BaseTool: + """Create move_file for desktop local-folder mode.""" + tool_description = ( + self._custom_tool_descriptions.get("move_file") + or SURFSENSE_MOVE_FILE_TOOL_DESCRIPTION + ) + + def sync_move_file( + source_path: Annotated[ + str, + "Absolute source path to move from.", + ], + destination_path: Annotated[ + str, + "Absolute destination path to move to.", + ], + runtime: ToolRuntime[None, FilesystemState], + *, + overwrite: Annotated[ + bool, + "If True, replace an existing destination file. Defaults to False.", + ] = False, + ) -> Command | str: + if self._filesystem_mode != FilesystemMode.DESKTOP_LOCAL_FOLDER: + return "Error: move_file is only available in desktop local-folder mode." + + if not source_path.strip() or not destination_path.strip(): + return "Error: source_path and destination_path are required." + + resolved_backend = self._get_backend(runtime) + source_target = self._resolve_move_target_path(source_path, runtime) + destination_target = self._resolve_move_target_path(destination_path, runtime) + try: + validated_source = validate_path(source_target) + validated_destination = validate_path(destination_target) + except ValueError as exc: + return f"Error: {exc}" + res: WriteResult = resolved_backend.move( + validated_source, + validated_destination, + overwrite=overwrite, + ) + if res.error: + return res.error + if res.files_update is not None: + return Command( + update={ + "files": res.files_update, + "messages": [ + ToolMessage( + content=( + f"Moved '{validated_source}' to " + f"'{res.path or validated_destination}'" + ), + tool_call_id=runtime.tool_call_id, + ) + ], + } + ) + return f"Moved '{validated_source}' to '{res.path or validated_destination}'" + + async def async_move_file( + source_path: Annotated[ + str, + "Absolute source path to move from.", + ], + destination_path: Annotated[ + str, + "Absolute destination path to move to.", + ], + runtime: ToolRuntime[None, FilesystemState], + *, + overwrite: Annotated[ + bool, + "If True, replace an existing destination file. Defaults to False.", + ] = False, + ) -> Command | str: + if self._filesystem_mode != FilesystemMode.DESKTOP_LOCAL_FOLDER: + return "Error: move_file is only available in desktop local-folder mode." + + if not source_path.strip() or not destination_path.strip(): + return "Error: source_path and destination_path are required." + + resolved_backend = self._get_backend(runtime) + source_target = self._resolve_move_target_path(source_path, runtime) + destination_target = self._resolve_move_target_path(destination_path, runtime) + try: + validated_source = validate_path(source_target) + validated_destination = validate_path(destination_target) + except ValueError as exc: + return f"Error: {exc}" + res: WriteResult = await resolved_backend.amove( + validated_source, + validated_destination, + overwrite=overwrite, + ) + if res.error: + return res.error + if res.files_update is not None: + return Command( + update={ + "files": res.files_update, + "messages": [ + ToolMessage( + content=( + f"Moved '{validated_source}' to " + f"'{res.path or validated_destination}'" + ), + tool_call_id=runtime.tool_call_id, + ) + ], + } + ) + return f"Moved '{validated_source}' to '{res.path or validated_destination}'" + + return StructuredTool.from_function( + name="move_file", + description=tool_description, + func=sync_move_file, + coroutine=async_move_file, + ) + + def _create_list_tree_tool(self) -> BaseTool: + """Create list_tree for desktop local-folder mode.""" + tool_description = ( + self._custom_tool_descriptions.get("list_tree") + or SURFSENSE_LIST_TREE_TOOL_DESCRIPTION + ) + + def sync_list_tree( + runtime: ToolRuntime[None, FilesystemState], + *, + path: Annotated[ + str, + "Absolute path to list from. Use '/' for mount roots.", + ] = "/", + max_depth: Annotated[ + int, + "Maximum recursion depth to traverse. Defaults to 8.", + ] = 8, + page_size: Annotated[ + int, + "Maximum number of entries to return. Defaults to 500 (max 1000).", + ] = 500, + include_files: Annotated[ + bool, + "Whether file entries should be included.", + ] = True, + include_dirs: Annotated[ + bool, + "Whether directory entries should be included.", + ] = True, + ) -> str: + if self._filesystem_mode != FilesystemMode.DESKTOP_LOCAL_FOLDER: + return "Error: list_tree is only available in desktop local-folder mode." + if max_depth < 0: + return "Error: max_depth must be >= 0." + if page_size < 1: + return "Error: page_size must be >= 1." + if not include_files and not include_dirs: + return "Error: include_files and include_dirs cannot both be false." + + resolved_backend = self._get_backend(runtime) + target_path = self._resolve_list_target_path(path, runtime) + try: + validated_path = validate_path(target_path) + except ValueError as exc: + return f"Error: {exc}" + + result = resolved_backend.list_tree( + validated_path, + max_depth=max_depth, + page_size=page_size, + include_files=include_files, + include_dirs=include_dirs, + ) + error = result.get("error") if isinstance(result, dict) else None + if isinstance(error, str) and error: + return error + return json.dumps(result, ensure_ascii=True) + + async def async_list_tree( + runtime: ToolRuntime[None, FilesystemState], + *, + path: Annotated[ + str, + "Absolute path to list from. Use '/' for mount roots.", + ] = "/", + max_depth: Annotated[ + int, + "Maximum recursion depth to traverse. Defaults to 8.", + ] = 8, + page_size: Annotated[ + int, + "Maximum number of entries to return. Defaults to 500 (max 1000).", + ] = 500, + include_files: Annotated[ + bool, + "Whether file entries should be included.", + ] = True, + include_dirs: Annotated[ + bool, + "Whether directory entries should be included.", + ] = True, + ) -> str: + if self._filesystem_mode != FilesystemMode.DESKTOP_LOCAL_FOLDER: + return "Error: list_tree is only available in desktop local-folder mode." + if max_depth < 0: + return "Error: max_depth must be >= 0." + if page_size < 1: + return "Error: page_size must be >= 1." + if not include_files and not include_dirs: + return "Error: include_files and include_dirs cannot both be false." + + resolved_backend = self._get_backend(runtime) + target_path = self._resolve_list_target_path(path, runtime) + try: + validated_path = validate_path(target_path) + except ValueError as exc: + return f"Error: {exc}" + + result = await resolved_backend.alist_tree( + validated_path, + max_depth=max_depth, + page_size=page_size, + include_files=include_files, + include_dirs=include_dirs, + ) + error = result.get("error") if isinstance(result, dict) else None + if isinstance(error, str) and error: + return error + return json.dumps(result, ensure_ascii=True) + + return StructuredTool.from_function( + name="list_tree", + description=tool_description, + func=sync_list_tree, + coroutine=async_list_tree, + ) + def _create_edit_file_tool(self) -> BaseTool: """Create edit_file with DB persistence (skipped for KB documents).""" tool_description = ( diff --git a/surfsense_backend/app/agents/new_chat/middleware/local_folder_backend.py b/surfsense_backend/app/agents/new_chat/middleware/local_folder_backend.py index 60d967053..0cee3e007 100644 --- a/surfsense_backend/app/agents/new_chat/middleware/local_folder_backend.py +++ b/surfsense_backend/app/agents/new_chat/middleware/local_folder_backend.py @@ -6,7 +6,10 @@ import asyncio import fnmatch import os import threading +from collections import deque +from contextlib import ExitStack from pathlib import Path +from typing import Any from deepagents.backends.protocol import ( EditResult, @@ -71,6 +74,44 @@ class LocalFolderBackend: temp_path.write_text(content, encoding="utf-8") os.replace(temp_path, path) + def _acquire_path_locks(self, *paths: str) -> ExitStack: + ordered_paths = sorted(set(paths)) + stack = ExitStack() + for path in ordered_paths: + stack.enter_context(self._lock_for(path)) + return stack + + @staticmethod + def _clamp_page_size(page_size: int) -> int: + return max(1, min(page_size, 1000)) + + def _read_dir_entries(self, directory_path: str) -> list[dict[str, Any]]: + directory = Path(directory_path) + try: + children = sorted( + directory.iterdir(), + key=lambda p: (not p.is_dir(), p.name.lower()), + ) + except OSError: + return [] + + entries: list[dict[str, Any]] = [] + for child in children: + try: + stat_result = child.stat() + except OSError: + continue + entries.append( + { + "path": self._to_virtual(child, self._root), + "is_dir": child.is_dir(), + "size": stat_result.st_size if child.is_file() else 0, + "modified_at": str(stat_result.st_mtime), + "absolute_path": str(child), + } + ) + return entries + def ls_info(self, path: str) -> list[FileInfo]: try: target = self._resolve_virtual(path, allow_root=True) @@ -139,12 +180,178 @@ class LocalFolderBackend: "Read and then make an edit, or write to a new path." ) ) + parent = path.parent + if not parent.exists() or not parent.is_dir(): + return WriteResult( + error=( + f"Error: parent directory for '{file_path}' does not exist. " + "Create the folder first or write to an existing directory." + ) + ) self._write_text_atomic(path, content) return WriteResult(path=file_path, files_update=None) async def awrite(self, file_path: str, content: str) -> WriteResult: return await asyncio.to_thread(self.write, file_path, content) + def list_tree( + self, + path: str = "/", + *, + max_depth: int | None = 8, + page_size: int = 500, + include_files: bool = True, + include_dirs: bool = True, + ) -> dict[str, Any]: + if not include_files and not include_dirs: + return { + "entries": [], + "truncated": False, + } + + normalized_depth = None if max_depth is None else max(0, int(max_depth)) + page_limit = self._clamp_page_size(int(page_size)) + try: + start = self._resolve_virtual(path, allow_root=True) + except ValueError: + return {"error": f"Error: invalid path '{path}'"} + if not start.exists(): + return {"error": f"Error: path '{path}' not found"} + if start.is_file(): + stat_result = start.stat() + if include_files: + return { + "entries": [ + { + "path": self._to_virtual(start, self._root), + "is_dir": False, + "size": stat_result.st_size, + "modified_at": str(stat_result.st_mtime), + "depth": 0, + } + ], + "truncated": False, + } + return { + "entries": [], + "truncated": False, + } + + pending_dirs: deque[tuple[str, int]] = deque([(str(start), 0)]) + entries: list[dict[str, Any]] = [] + truncated = False + while pending_dirs and not truncated: + next_dir_path, next_depth = pending_dirs.popleft() + active_entries = self._read_dir_entries(next_dir_path) + for item in active_entries: + item_depth = next_depth + 1 + if normalized_depth is not None and item_depth > normalized_depth: + continue + if item["is_dir"]: + if normalized_depth is None or item_depth <= normalized_depth: + pending_dirs.append((item["absolute_path"], item_depth)) + if include_dirs: + entries.append( + { + "path": item["path"], + "is_dir": True, + "size": 0, + "modified_at": item["modified_at"], + "depth": item_depth, + } + ) + elif include_files: + entries.append( + { + "path": item["path"], + "is_dir": False, + "size": item["size"], + "modified_at": item["modified_at"], + "depth": item_depth, + } + ) + if len(entries) >= page_limit: + truncated = True + break + + return { + "entries": entries, + "truncated": truncated, + } + + async def alist_tree( + self, + path: str = "/", + *, + max_depth: int | None = 8, + page_size: int = 500, + include_files: bool = True, + include_dirs: bool = True, + ) -> dict[str, Any]: + return await asyncio.to_thread( + self.list_tree, + path, + max_depth=max_depth, + page_size=page_size, + include_files=include_files, + include_dirs=include_dirs, + ) + + def move( + self, + source_path: str, + destination_path: str, + overwrite: bool = False, + ) -> WriteResult: + try: + source = self._resolve_virtual(source_path) + destination = self._resolve_virtual(destination_path) + except ValueError: + return WriteResult( + error=( + f"Error: invalid source '{source_path}' or destination " + f"'{destination_path}' path" + ) + ) + if source == destination: + return WriteResult(error="Error: source and destination paths are the same") + with self._acquire_path_locks(source_path, destination_path): + if not source.exists(): + return WriteResult(error=f"Error: source path '{source_path}' not found") + if destination.exists(): + if not overwrite: + return WriteResult( + error=( + f"Error: destination path '{destination_path}' already exists. " + "Set overwrite=True to replace files." + ) + ) + if source.is_dir() or destination.is_dir(): + return WriteResult( + error=( + "Error: overwrite=True is only supported for file-to-file moves." + ) + ) + destination.parent.mkdir(parents=True, exist_ok=True) + try: + if overwrite: + os.replace(source, destination) + else: + source.rename(destination) + except OSError as exc: + return WriteResult(error=f"Error: failed to move '{source_path}': {exc}") + return WriteResult(path=self._to_virtual(destination, self._root), files_update=None) + + async def amove( + self, + source_path: str, + destination_path: str, + overwrite: bool = False, + ) -> WriteResult: + return await asyncio.to_thread( + self.move, source_path, destination_path, overwrite + ) + def edit( self, file_path: str, diff --git a/surfsense_backend/app/agents/new_chat/middleware/multi_root_local_folder_backend.py b/surfsense_backend/app/agents/new_chat/middleware/multi_root_local_folder_backend.py index 12632f00f..82914f9ce 100644 --- a/surfsense_backend/app/agents/new_chat/middleware/multi_root_local_folder_backend.py +++ b/surfsense_backend/app/agents/new_chat/middleware/multi_root_local_folder_backend.py @@ -132,6 +132,82 @@ class MultiRootLocalFolderBackend: async def als_info(self, path: str) -> list[FileInfo]: return await asyncio.to_thread(self.ls_info, path) + def list_tree( + self, + path: str = "/", + *, + max_depth: int | None = 8, + page_size: int = 500, + include_files: bool = True, + include_dirs: bool = True, + ) -> dict[str, Any]: + if path == "/": + entries = [ + { + "path": f"/{mount}", + "is_dir": True, + "size": 0, + "modified_at": "0", + "depth": 0, + } + for mount in self._mount_order + ] + return { + "entries": entries if include_dirs else [], + "truncated": False, + } + + try: + mount, local_path = self._split_mount_path(path) + except ValueError as exc: + return {"error": f"Error: {exc}"} + + result = self._mount_to_backend[mount].list_tree( + local_path, + max_depth=max_depth, + page_size=page_size, + include_files=include_files, + include_dirs=include_dirs, + ) + if result.get("error"): + return result + + entries: list[dict[str, Any]] = [] + for entry in result.get("entries", []): + raw_path = self._get_str(entry, "path") + entries.append( + { + "path": self._prefix_mount_path(mount, raw_path), + "is_dir": self._get_bool(entry, "is_dir"), + "size": self._get_int(entry, "size"), + "modified_at": self._get_str(entry, "modified_at"), + "depth": self._get_int(entry, "depth"), + } + ) + + return { + "entries": entries, + "truncated": self._get_bool(result, "truncated"), + } + + async def alist_tree( + self, + path: str = "/", + *, + max_depth: int | None = 8, + page_size: int = 500, + include_files: bool = True, + include_dirs: bool = True, + ) -> dict[str, Any]: + return await asyncio.to_thread( + self.list_tree, + path, + max_depth=max_depth, + page_size=page_size, + include_files=include_files, + include_dirs=include_dirs, + ) + def read(self, file_path: str, offset: int = 0, limit: int = 2000) -> str: try: mount, local_path = self._split_mount_path(file_path) @@ -165,6 +241,48 @@ class MultiRootLocalFolderBackend: async def awrite(self, file_path: str, content: str) -> WriteResult: return await asyncio.to_thread(self.write, file_path, content) + def move( + self, + source_path: str, + destination_path: str, + overwrite: bool = False, + ) -> WriteResult: + try: + source_mount, source_local_path = self._split_mount_path(source_path) + destination_mount, destination_local_path = self._split_mount_path( + destination_path + ) + except ValueError as exc: + return WriteResult(error=f"Error: {exc}") + if source_mount != destination_mount: + return WriteResult( + error=( + "Error: cross-mount moves are not supported. " + "Source and destination must be under the same mounted root." + ) + ) + result = self._mount_to_backend[source_mount].move( + source_local_path, + destination_local_path, + overwrite=overwrite, + ) + if result.path: + result.path = self._prefix_mount_path(source_mount, result.path) + return result + + async def amove( + self, + source_path: str, + destination_path: str, + overwrite: bool = False, + ) -> WriteResult: + return await asyncio.to_thread( + self.move, + source_path, + destination_path, + overwrite, + ) + def edit( self, file_path: str, diff --git a/surfsense_backend/app/celery_app.py b/surfsense_backend/app/celery_app.py index e3a520c48..58a8b0f39 100644 --- a/surfsense_backend/app/celery_app.py +++ b/surfsense_backend/app/celery_app.py @@ -90,6 +90,7 @@ celery_app = Celery( "app.tasks.celery_tasks.podcast_tasks", "app.tasks.celery_tasks.video_presentation_tasks", "app.tasks.celery_tasks.connector_tasks", + "app.tasks.celery_tasks.obsidian_tasks", "app.tasks.celery_tasks.schedule_checker_task", "app.tasks.celery_tasks.document_reindex_tasks", "app.tasks.celery_tasks.stale_notification_cleanup_task", @@ -144,8 +145,8 @@ celery_app.conf.update( "index_elasticsearch_documents": {"queue": CONNECTORS_QUEUE}, "index_crawled_urls": {"queue": CONNECTORS_QUEUE}, "index_bookstack_pages": {"queue": CONNECTORS_QUEUE}, - "index_obsidian_vault": {"queue": CONNECTORS_QUEUE}, "index_composio_connector": {"queue": CONNECTORS_QUEUE}, + "index_obsidian_attachment": {"queue": CONNECTORS_QUEUE}, # Everything else (document processing, podcasts, reindexing, # schedule checker, cleanup) stays on the default fast queue. }, diff --git a/surfsense_backend/app/db.py b/surfsense_backend/app/db.py index 16b40983e..e16590afc 100644 --- a/surfsense_backend/app/db.py +++ b/surfsense_backend/app/db.py @@ -1510,6 +1510,31 @@ class SearchSourceConnector(BaseModel, TimestampMixin): "name", name="uq_searchspace_user_connector_type_name", ), + # Mirrors migration 129; backs the ``/obsidian/connect`` upsert. + Index( + "search_source_connectors_obsidian_plugin_vault_uniq", + "user_id", + text("(config->>'vault_id')"), + unique=True, + postgresql_where=text( + "connector_type = 'OBSIDIAN_CONNECTOR' " + "AND config->>'source' = 'plugin' " + "AND config->>'vault_id' IS NOT NULL" + ), + ), + # Cross-device dedup: same vault content from different devices + # cannot produce two connector rows. + Index( + "search_source_connectors_obsidian_plugin_fingerprint_uniq", + "user_id", + text("(config->>'vault_fingerprint')"), + unique=True, + postgresql_where=text( + "connector_type = 'OBSIDIAN_CONNECTOR' " + "AND config->>'source' = 'plugin' " + "AND config->>'vault_fingerprint' IS NOT NULL" + ), + ), ) name = Column(String(100), nullable=False, index=True) diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py index 9464a7ded..fafd4d356 100644 --- a/surfsense_backend/app/routes/__init__.py +++ b/surfsense_backend/app/routes/__init__.py @@ -37,6 +37,7 @@ from .new_llm_config_routes import router as new_llm_config_router from .notes_routes import router as notes_router from .notifications_routes import router as notifications_router from .notion_add_connector_route import router as notion_add_connector_router +from .obsidian_plugin_routes import router as obsidian_plugin_router from .onedrive_add_connector_route import router as onedrive_add_connector_router from .podcasts_routes import router as podcasts_router from .prompts_routes import router as prompts_router @@ -84,6 +85,7 @@ router.include_router(notion_add_connector_router) router.include_router(slack_add_connector_router) router.include_router(teams_add_connector_router) router.include_router(onedrive_add_connector_router) +router.include_router(obsidian_plugin_router) # Obsidian plugin push API router.include_router(discord_add_connector_router) router.include_router(jira_add_connector_router) router.include_router(confluence_add_connector_router) diff --git a/surfsense_backend/app/routes/obsidian_plugin_routes.py b/surfsense_backend/app/routes/obsidian_plugin_routes.py new file mode 100644 index 000000000..0dae7a463 --- /dev/null +++ b/surfsense_backend/app/routes/obsidian_plugin_routes.py @@ -0,0 +1,706 @@ +"""Obsidian plugin ingestion routes (``/api/v1/obsidian/*``). + +Wire surface for the ``surfsense_obsidian/`` plugin. Versioning anchor is +the ``/api/v1/`` URL prefix; additive feature detection rides the +``capabilities`` array on /health and /connect. +""" + +from __future__ import annotations + +import logging +from datetime import UTC, datetime + +from fastapi import APIRouter, Depends, HTTPException, Query, status +from sqlalchemy import and_, case, func +from sqlalchemy.dialects.postgresql import insert as pg_insert +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.db import ( + Document, + DocumentType, + SearchSourceConnector, + SearchSourceConnectorType, + SearchSpace, + User, + get_async_session, +) +from app.schemas.obsidian_plugin import ( + ALLOWED_ATTACHMENT_EXTENSIONS, + ATTACHMENT_MIME_TYPES, + ConnectRequest, + ConnectResponse, + DeleteAck, + DeleteAckItem, + DeleteBatchRequest, + HealthResponse, + ManifestResponse, + RenameAck, + RenameAckItem, + RenameBatchRequest, + StatsResponse, + SyncAck, + SyncAckItem, + SyncBatchRequest, +) +from app.services.notification_service import NotificationService +from app.services.obsidian_plugin_indexer import ( + delete_note, + get_manifest, + merge_obsidian_connectors, + rename_note, + upsert_note, +) +from app.tasks.celery_tasks.obsidian_tasks import index_obsidian_attachment_task +from app.users import current_active_user + +logger = logging.getLogger(__name__) + +router = APIRouter(prefix="/obsidian", tags=["obsidian-plugin"]) + + +# Plugins feature-gate on these. Add entries, never rename or remove. +OBSIDIAN_CAPABILITIES: list[str] = ["sync", "rename", "delete", "manifest", "stats"] + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _build_handshake() -> dict[str, object]: + return {"capabilities": list(OBSIDIAN_CAPABILITIES)} + + +def _connector_type_value(connector: SearchSourceConnector) -> str: + connector_type = connector.connector_type + if hasattr(connector_type, "value"): + return str(connector_type.value) + return str(connector_type) + + +async def _start_obsidian_sync_notification( + session: AsyncSession, + *, + user: User, + connector: SearchSourceConnector, + total_count: int, +): + """Create/update the rolling inbox item for Obsidian plugin sync. + + Obsidian sync is continuous and batched, so we keep one stable + operation_id per connector instead of creating a new notification per batch. + """ + handler = NotificationService.connector_indexing + operation_id = f"obsidian_sync_connector_{connector.id}" + connector_name = connector.name or "Obsidian" + notification = await handler.find_or_create_notification( + session=session, + user_id=user.id, + operation_id=operation_id, + title=f"Syncing: {connector_name}", + message="Syncing from Obsidian plugin", + search_space_id=connector.search_space_id, + initial_metadata={ + "connector_id": connector.id, + "connector_name": connector_name, + "connector_type": _connector_type_value(connector), + "sync_stage": "processing", + "indexed_count": 0, + "failed_count": 0, + "total_count": total_count, + "source": "obsidian_plugin", + }, + ) + return await handler.update_notification( + session=session, + notification=notification, + status="in_progress", + metadata_updates={ + "sync_stage": "processing", + "total_count": total_count, + }, + ) + + +async def _finish_obsidian_sync_notification( + session: AsyncSession, + *, + notification, + indexed: int, + failed: int, +): + """Mark the rolling Obsidian sync inbox item complete or failed.""" + handler = NotificationService.connector_indexing + connector_name = notification.notification_metadata.get( + "connector_name", "Obsidian" + ) + if failed > 0 and indexed == 0: + title = f"Failed: {connector_name}" + message = ( + f"Sync failed: {failed} file(s) failed" + if failed > 1 + else "Sync failed: 1 file failed" + ) + status_value = "failed" + stage = "failed" + else: + title = f"Ready: {connector_name}" + if failed > 0: + message = f"Partially synced: {indexed} file(s) synced, {failed} failed." + elif indexed == 0: + message = "Already up to date!" + elif indexed == 1: + message = "Now searchable! 1 file synced." + else: + message = f"Now searchable! {indexed} files synced." + status_value = "completed" + stage = "completed" + + await handler.update_notification( + session=session, + notification=notification, + title=title, + message=message, + status=status_value, + metadata_updates={ + "indexed_count": indexed, + "failed_count": failed, + "sync_stage": stage, + }, + ) + + +async def _resolve_vault_connector( + session: AsyncSession, + *, + user: User, + vault_id: str, +) -> SearchSourceConnector: + """Find the OBSIDIAN_CONNECTOR row that owns ``vault_id`` for this user.""" + # ``config`` is core ``JSON`` (not ``JSONB``); ``as_string()`` is the + # cross-dialect equivalent of ``.astext`` and compiles to ``->>``. + stmt = select(SearchSourceConnector).where( + and_( + SearchSourceConnector.user_id == user.id, + SearchSourceConnector.connector_type + == SearchSourceConnectorType.OBSIDIAN_CONNECTOR, + SearchSourceConnector.config["vault_id"].as_string() == vault_id, + SearchSourceConnector.config["source"].as_string() == "plugin", + ) + ) + + connector = (await session.execute(stmt)).scalars().first() + if connector is not None: + return connector + + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail={ + "code": "VAULT_NOT_REGISTERED", + "message": ( + "No Obsidian plugin connector found for this vault. " + "Call POST /obsidian/connect first." + ), + "vault_id": vault_id, + }, + ) + + +def _queue_obsidian_attachment( + *, connector_id: int, note_payload: dict, user_id: str +) -> None: + """Enqueue one non-markdown Obsidian note for background ETL/indexing.""" + index_obsidian_attachment_task.delay( + connector_id=connector_id, + payload_data=note_payload, + user_id=user_id, + ) + + +async def _ensure_search_space_access( + session: AsyncSession, + *, + user: User, + search_space_id: int, +) -> SearchSpace: + """Owner-only access to the search space (shared spaces are a follow-up).""" + result = await session.execute( + select(SearchSpace).where( + and_(SearchSpace.id == search_space_id, SearchSpace.user_id == user.id) + ) + ) + space = result.scalars().first() + if space is None: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail={ + "code": "SEARCH_SPACE_FORBIDDEN", + "message": "You don't own that search space.", + }, + ) + return space + + +# --------------------------------------------------------------------------- +# Endpoints +# --------------------------------------------------------------------------- + + +@router.get("/health", response_model=HealthResponse) +async def obsidian_health( + user: User = Depends(current_active_user), +) -> HealthResponse: + """Return the API contract handshake; plugin caches it per onload.""" + return HealthResponse( + **_build_handshake(), + server_time_utc=datetime.now(UTC), + ) + + +async def _find_by_vault_id( + session: AsyncSession, *, user_id, vault_id: str +) -> SearchSourceConnector | None: + stmt = select(SearchSourceConnector).where( + and_( + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type + == SearchSourceConnectorType.OBSIDIAN_CONNECTOR, + SearchSourceConnector.config["source"].as_string() == "plugin", + SearchSourceConnector.config["vault_id"].as_string() == vault_id, + ) + ) + return (await session.execute(stmt)).scalars().first() + + +async def _find_by_fingerprint( + session: AsyncSession, *, user_id, vault_fingerprint: str +) -> SearchSourceConnector | None: + stmt = select(SearchSourceConnector).where( + and_( + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type + == SearchSourceConnectorType.OBSIDIAN_CONNECTOR, + SearchSourceConnector.config["source"].as_string() == "plugin", + SearchSourceConnector.config["vault_fingerprint"].as_string() + == vault_fingerprint, + ) + ) + return (await session.execute(stmt)).scalars().first() + + +def _build_config(payload: ConnectRequest, *, now_iso: str) -> dict[str, object]: + return { + "vault_id": payload.vault_id, + "vault_name": payload.vault_name, + "vault_fingerprint": payload.vault_fingerprint, + "source": "plugin", + "last_connect_at": now_iso, + } + + +def _display_name(vault_name: str) -> str: + return f"Obsidian - {vault_name}" + + +@router.post("/connect", response_model=ConnectResponse) +async def obsidian_connect( + payload: ConnectRequest, + user: User = Depends(current_active_user), + session: AsyncSession = Depends(get_async_session), +) -> ConnectResponse: + """Register a vault, refresh an existing one, or adopt another device's row. + + Resolution order: + 1. ``(user_id, vault_id)`` → known device, refresh metadata. + 2. ``(user_id, vault_fingerprint)`` → another device of the same vault, + caller adopts the surviving ``vault_id``. + 3. Insert a new row. + + Fingerprint collisions on (1) trigger ``merge_obsidian_connectors`` so + the partial unique index can never produce two live rows for one vault. + """ + await _ensure_search_space_access( + session, user=user, search_space_id=payload.search_space_id + ) + + now_iso = datetime.now(UTC).isoformat() + cfg = _build_config(payload, now_iso=now_iso) + display_name = _display_name(payload.vault_name) + + existing_by_vid = await _find_by_vault_id( + session, user_id=user.id, vault_id=payload.vault_id + ) + if existing_by_vid is not None: + collision = await _find_by_fingerprint( + session, user_id=user.id, vault_fingerprint=payload.vault_fingerprint + ) + if collision is not None and collision.id != existing_by_vid.id: + await merge_obsidian_connectors( + session, source=existing_by_vid, target=collision + ) + collision_cfg = dict(collision.config or {}) + collision_cfg["vault_name"] = payload.vault_name + collision_cfg["last_connect_at"] = now_iso + collision.config = collision_cfg + collision.name = _display_name(payload.vault_name) + response = ConnectResponse( + connector_id=collision.id, + vault_id=collision_cfg["vault_id"], + search_space_id=collision.search_space_id, + server_time_utc=datetime.now(UTC), + **_build_handshake(), + ) + await session.commit() + return response + + existing_by_vid.name = display_name + existing_by_vid.config = cfg + existing_by_vid.search_space_id = payload.search_space_id + existing_by_vid.is_indexable = False + response = ConnectResponse( + connector_id=existing_by_vid.id, + vault_id=payload.vault_id, + search_space_id=existing_by_vid.search_space_id, + server_time_utc=datetime.now(UTC), + **_build_handshake(), + ) + await session.commit() + return response + + existing_by_fp = await _find_by_fingerprint( + session, user_id=user.id, vault_fingerprint=payload.vault_fingerprint + ) + if existing_by_fp is not None: + survivor_cfg = dict(existing_by_fp.config or {}) + survivor_cfg["vault_name"] = payload.vault_name + survivor_cfg["last_connect_at"] = now_iso + existing_by_fp.config = survivor_cfg + existing_by_fp.name = display_name + response = ConnectResponse( + connector_id=existing_by_fp.id, + vault_id=survivor_cfg["vault_id"], + search_space_id=existing_by_fp.search_space_id, + server_time_utc=datetime.now(UTC), + **_build_handshake(), + ) + await session.commit() + return response + + # ON CONFLICT DO NOTHING matches any unique index (vault_id OR + # fingerprint), so concurrent first-time connects from two devices + # of the same vault never raise IntegrityError — the loser just + # gets an empty RETURNING and falls through to re-fetch the winner. + insert_stmt = ( + pg_insert(SearchSourceConnector) + .values( + name=display_name, + connector_type=SearchSourceConnectorType.OBSIDIAN_CONNECTOR, + is_indexable=False, + config=cfg, + user_id=user.id, + search_space_id=payload.search_space_id, + ) + .on_conflict_do_nothing() + .returning( + SearchSourceConnector.id, + SearchSourceConnector.search_space_id, + ) + ) + inserted = (await session.execute(insert_stmt)).first() + if inserted is not None: + response = ConnectResponse( + connector_id=inserted.id, + vault_id=payload.vault_id, + search_space_id=inserted.search_space_id, + server_time_utc=datetime.now(UTC), + **_build_handshake(), + ) + await session.commit() + return response + + winner = await _find_by_fingerprint( + session, user_id=user.id, vault_fingerprint=payload.vault_fingerprint + ) + if winner is None: + winner = await _find_by_vault_id( + session, user_id=user.id, vault_id=payload.vault_id + ) + if winner is None: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail="vault registration conflicted but winning row could not be located", + ) + response = ConnectResponse( + connector_id=winner.id, + vault_id=(winner.config or {})["vault_id"], + search_space_id=winner.search_space_id, + server_time_utc=datetime.now(UTC), + **_build_handshake(), + ) + await session.commit() + return response + + +@router.post("/sync", response_model=SyncAck) +async def obsidian_sync( + payload: SyncBatchRequest, + user: User = Depends(current_active_user), + session: AsyncSession = Depends(get_async_session), +) -> SyncAck: + """Batch-upsert notes; returns per-note ack so the plugin can dequeue/retry.""" + connector = await _resolve_vault_connector( + session, user=user, vault_id=payload.vault_id + ) + notification = None + try: + notification = await _start_obsidian_sync_notification( + session, user=user, connector=connector, total_count=len(payload.notes) + ) + except Exception: + logger.warning( + "obsidian sync notification start failed connector=%s user=%s", + connector.id, + user.id, + exc_info=True, + ) + + items: list[SyncAckItem] = [] + indexed = 0 + failed = 0 + + for note in payload.notes: + try: + if note.is_binary: + ext = note.extension.lstrip(".").lower() + if ext not in ALLOWED_ATTACHMENT_EXTENSIONS: + failed += 1 + items.append( + SyncAckItem( + path=note.path, + status="error", + error=f"unsupported attachment extension: .{ext}", + ) + ) + continue + expected_mime = ATTACHMENT_MIME_TYPES[ext] + if note.mime_type != expected_mime: + failed += 1 + items.append( + SyncAckItem( + path=note.path, + status="error", + error=( + f"mime_type '{note.mime_type}' does not match " + f"extension .{ext}" + ), + ) + ) + continue + _queue_obsidian_attachment( + connector_id=connector.id, + note_payload=note.model_dump(mode="json"), + user_id=str(user.id), + ) + indexed += 1 + items.append(SyncAckItem(path=note.path, status="queued")) + continue + + doc = await upsert_note( + session, connector=connector, payload=note, user_id=str(user.id) + ) + indexed += 1 + items.append(SyncAckItem(path=note.path, status="ok", document_id=doc.id)) + except HTTPException: + raise + except Exception as exc: + failed += 1 + logger.exception( + "obsidian /sync failed for path=%s vault=%s", + note.path, + payload.vault_id, + ) + items.append( + SyncAckItem(path=note.path, status="error", error=str(exc)[:300]) + ) + + if notification is not None: + try: + await _finish_obsidian_sync_notification( + session, + notification=notification, + indexed=indexed, + failed=failed, + ) + except Exception: + logger.warning( + "obsidian sync notification finish failed connector=%s user=%s", + connector.id, + user.id, + exc_info=True, + ) + + return SyncAck( + vault_id=payload.vault_id, + indexed=indexed, + failed=failed, + items=items, + ) + + +@router.post("/rename", response_model=RenameAck) +async def obsidian_rename( + payload: RenameBatchRequest, + user: User = Depends(current_active_user), + session: AsyncSession = Depends(get_async_session), +) -> RenameAck: + """Apply a batch of vault rename events.""" + connector = await _resolve_vault_connector( + session, user=user, vault_id=payload.vault_id + ) + + items: list[RenameAckItem] = [] + renamed = 0 + missing = 0 + + for item in payload.renames: + try: + doc = await rename_note( + session, + connector=connector, + old_path=item.old_path, + new_path=item.new_path, + vault_id=payload.vault_id, + ) + if doc is None: + missing += 1 + items.append( + RenameAckItem( + old_path=item.old_path, + new_path=item.new_path, + status="missing", + ) + ) + else: + renamed += 1 + items.append( + RenameAckItem( + old_path=item.old_path, + new_path=item.new_path, + status="ok", + document_id=doc.id, + ) + ) + except Exception as exc: + logger.exception( + "obsidian /rename failed for old=%s new=%s vault=%s", + item.old_path, + item.new_path, + payload.vault_id, + ) + items.append( + RenameAckItem( + old_path=item.old_path, + new_path=item.new_path, + status="error", + error=str(exc)[:300], + ) + ) + + return RenameAck( + vault_id=payload.vault_id, + renamed=renamed, + missing=missing, + items=items, + ) + + +@router.delete("/notes", response_model=DeleteAck) +async def obsidian_delete_notes( + payload: DeleteBatchRequest, + user: User = Depends(current_active_user), + session: AsyncSession = Depends(get_async_session), +) -> DeleteAck: + """Soft-delete a batch of notes by vault-relative path.""" + connector = await _resolve_vault_connector( + session, user=user, vault_id=payload.vault_id + ) + + deleted = 0 + missing = 0 + items: list[DeleteAckItem] = [] + for path in payload.paths: + try: + ok = await delete_note( + session, + connector=connector, + vault_id=payload.vault_id, + path=path, + ) + if ok: + deleted += 1 + items.append(DeleteAckItem(path=path, status="ok")) + else: + missing += 1 + items.append(DeleteAckItem(path=path, status="missing")) + except Exception as exc: + logger.exception( + "obsidian DELETE /notes failed for path=%s vault=%s", + path, + payload.vault_id, + ) + items.append(DeleteAckItem(path=path, status="error", error=str(exc)[:300])) + + return DeleteAck( + vault_id=payload.vault_id, + deleted=deleted, + missing=missing, + items=items, + ) + + +@router.get("/manifest", response_model=ManifestResponse) +async def obsidian_manifest( + vault_id: str = Query(..., description="Plugin-side stable vault UUID"), + user: User = Depends(current_active_user), + session: AsyncSession = Depends(get_async_session), +) -> ManifestResponse: + """Return ``{path: {hash, mtime}}`` for the plugin's onload reconcile diff.""" + connector = await _resolve_vault_connector(session, user=user, vault_id=vault_id) + return await get_manifest(session, connector=connector, vault_id=vault_id) + + +@router.get("/stats", response_model=StatsResponse) +async def obsidian_stats( + vault_id: str = Query(..., description="Plugin-side stable vault UUID"), + user: User = Depends(current_active_user), + session: AsyncSession = Depends(get_async_session), +) -> StatsResponse: + """Active-note count + last sync time for the web tile. + + ``files_synced`` excludes tombstones so it matches ``/manifest``; + ``last_sync_at`` includes them so deletes advance the freshness signal. + """ + connector = await _resolve_vault_connector(session, user=user, vault_id=vault_id) + + is_active = Document.document_metadata["deleted_at"].as_string().is_(None) + + row = ( + await session.execute( + select( + func.count(case((is_active, 1))).label("files_synced"), + func.max(Document.updated_at).label("last_sync_at"), + ).where( + and_( + Document.connector_id == connector.id, + Document.document_type == DocumentType.OBSIDIAN_CONNECTOR, + ) + ) + ) + ).first() + + return StatsResponse( + vault_id=vault_id, + files_synced=int(row[0] or 0), + last_sync_at=row[1], + ) diff --git a/surfsense_backend/app/routes/search_source_connectors_routes.py b/surfsense_backend/app/routes/search_source_connectors_routes.py index b8142c192..d42a7fa1a 100644 --- a/surfsense_backend/app/routes/search_source_connectors_routes.py +++ b/surfsense_backend/app/routes/search_source_connectors_routes.py @@ -81,6 +81,36 @@ _heartbeat_redis_client: redis.Redis | None = None # Redis key TTL - notification is stale if no heartbeat in this time HEARTBEAT_TTL_SECONDS = 120 # 2 minutes +# How often the background loop refreshes the Redis key. Must be < TTL so +# the key cannot expire between refreshes when the indexing function is +# doing blocking work (e.g. gitingest in Phase 1) that doesn't trigger +# on_heartbeat_callback. +HEARTBEAT_REFRESH_INTERVAL = 60 + + +async def _run_indexing_heartbeat_loop(notification_id: int) -> None: + """Background coroutine that refreshes the Redis heartbeat every + HEARTBEAT_REFRESH_INTERVAL seconds while a connector indexing task is + running. + + Mirrors `_run_heartbeat_loop` in app/tasks/celery_tasks/document_tasks.py. + Cancelled via heartbeat_task.cancel() when the indexing call returns + (success or failure). If the worker dies, the coroutine dies with it + and the Redis key expires naturally on its TTL. + """ + key = _get_heartbeat_key(notification_id) + try: + while True: + await asyncio.sleep(HEARTBEAT_REFRESH_INTERVAL) + try: + get_heartbeat_redis_client().setex(key, HEARTBEAT_TTL_SECONDS, "alive") + except Exception as e: + logger.warning( + f"Failed to refresh Redis heartbeat for notification " + f"{notification_id}: {e}" + ) + except asyncio.CancelledError: + pass # Normal cancellation when the indexing task completes def get_heartbeat_redis_client() -> redis.Redis: @@ -1028,25 +1058,6 @@ async def index_connector_content( ) response_message = "Web page indexing started in the background." - elif connector.connector_type == SearchSourceConnectorType.OBSIDIAN_CONNECTOR: - from app.config import config as app_config - from app.tasks.celery_tasks.connector_tasks import index_obsidian_vault_task - - # Obsidian connector only available in self-hosted mode - if not app_config.is_self_hosted(): - raise HTTPException( - status_code=400, - detail="Obsidian connector is only available in self-hosted mode", - ) - - logger.info( - f"Triggering Obsidian vault indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_obsidian_vault_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Obsidian vault indexing started in the background." - elif ( connector.connector_type == SearchSourceConnectorType.COMPOSIO_GOOGLE_DRIVE_CONNECTOR @@ -1284,6 +1295,7 @@ async def _run_indexing_with_notifications( notification = None connector_lock_acquired = False + heartbeat_task: asyncio.Task | None = None # Track indexed count for retry notifications and heartbeat current_indexed_count = 0 @@ -1329,6 +1341,16 @@ async def _run_indexing_with_notifications( except Exception as e: logger.warning(f"Failed to set initial Redis heartbeat: {e}") + # Start a background coroutine that refreshes the + # heartbeat every HEARTBEAT_REFRESH_INTERVAL seconds. + # Without this the cleanup_stale_indexing_notifications + # task can mark the doc failed when on_heartbeat_callback + # doesn't fire — for example during the GitHub + # connector's Phase 1 gitingest blocking call (#1295). + heartbeat_task = asyncio.create_task( + _run_indexing_heartbeat_loop(notification.id) + ) + # Update notification to fetching stage if notification: await NotificationService.connector_indexing.notify_indexing_progress( @@ -1619,6 +1641,13 @@ async def _run_indexing_with_notifications( except Exception as notif_error: logger.error(f"Failed to update notification: {notif_error!s}") finally: + # Stop the background heartbeat refresher BEFORE deleting the + # Redis key, so the loop cannot race and re-create the key + # after we delete it. + if heartbeat_task is not None: + heartbeat_task.cancel() + with suppress(Exception): + await asyncio.gather(heartbeat_task, return_exceptions=True) # Clean up Redis heartbeat key when task completes (success or failure) if notification: try: @@ -2501,59 +2530,6 @@ async def run_bookstack_indexing( ) -# Add new helper functions for Obsidian indexing -async def run_obsidian_indexing_with_new_session( - connector_id: int, - search_space_id: int, - user_id: str, - start_date: str, - end_date: str, -): - """Wrapper to run Obsidian indexing with its own database session.""" - logger.info( - f"Background task started: Indexing Obsidian connector {connector_id} into space {search_space_id} from {start_date} to {end_date}" - ) - async with async_session_maker() as session: - await run_obsidian_indexing( - session, connector_id, search_space_id, user_id, start_date, end_date - ) - logger.info(f"Background task finished: Indexing Obsidian connector {connector_id}") - - -async def run_obsidian_indexing( - session: AsyncSession, - connector_id: int, - search_space_id: int, - user_id: str, - start_date: str, - end_date: str, -): - """ - Background task to run Obsidian vault indexing. - - Args: - session: Database session - connector_id: ID of the Obsidian connector - search_space_id: ID of the search space - user_id: ID of the user - start_date: Start date for indexing - end_date: End date for indexing - """ - from app.tasks.connector_indexers import index_obsidian_vault - - await _run_indexing_with_notifications( - session=session, - connector_id=connector_id, - search_space_id=search_space_id, - user_id=user_id, - start_date=start_date, - end_date=end_date, - indexing_function=index_obsidian_vault, - update_timestamp_func=_update_connector_timestamp_by_id, - supports_heartbeat_callback=True, - ) - - async def run_composio_indexing_with_new_session( connector_id: int, search_space_id: int, diff --git a/surfsense_backend/app/schemas/obsidian_auth_credentials.py b/surfsense_backend/app/schemas/obsidian_auth_credentials.py deleted file mode 100644 index ab178eac8..000000000 --- a/surfsense_backend/app/schemas/obsidian_auth_credentials.py +++ /dev/null @@ -1,59 +0,0 @@ -""" -Obsidian Connector Credentials Schema. - -Obsidian is a local-first note-taking app that stores notes as markdown files. -This connector supports indexing from local file system (self-hosted only). -""" - -from pydantic import BaseModel, field_validator - - -class ObsidianAuthCredentialsBase(BaseModel): - """ - Credentials/configuration for the Obsidian connector. - - Since Obsidian vaults are local directories, this schema primarily - holds the vault path and configuration options rather than API tokens. - """ - - vault_path: str - vault_name: str | None = None - exclude_folders: list[str] | None = None - include_attachments: bool = False - - @field_validator("vault_path") - @classmethod - def validate_vault_path(cls, v: str) -> str: - """Ensure vault path is provided and stripped of whitespace.""" - if not v or not v.strip(): - raise ValueError("Vault path is required") - return v.strip() - - @field_validator("exclude_folders", mode="before") - @classmethod - def parse_exclude_folders(cls, v): - """Parse exclude_folders from string if needed.""" - if v is None: - return [".trash", ".obsidian", "templates"] - if isinstance(v, str): - return [f.strip() for f in v.split(",") if f.strip()] - return v - - def to_dict(self) -> dict: - """Convert credentials to dictionary for storage.""" - return { - "vault_path": self.vault_path, - "vault_name": self.vault_name, - "exclude_folders": self.exclude_folders, - "include_attachments": self.include_attachments, - } - - @classmethod - def from_dict(cls, data: dict) -> "ObsidianAuthCredentialsBase": - """Create credentials from dictionary.""" - return cls( - vault_path=data.get("vault_path", ""), - vault_name=data.get("vault_name"), - exclude_folders=data.get("exclude_folders"), - include_attachments=data.get("include_attachments", False), - ) diff --git a/surfsense_backend/app/schemas/obsidian_plugin.py b/surfsense_backend/app/schemas/obsidian_plugin.py new file mode 100644 index 000000000..89be08c8e --- /dev/null +++ b/surfsense_backend/app/schemas/obsidian_plugin.py @@ -0,0 +1,234 @@ +"""Wire schemas spoken between the SurfSense Obsidian plugin and the backend. + +All schemas inherit ``extra='ignore'`` from :class:`_PluginBase` so additive +field changes never break either side; hard breaks live behind a new URL +prefix (``/api/v2/...``). +""" + +from __future__ import annotations + +from datetime import datetime +from typing import Any, Literal + +from pydantic import BaseModel, ConfigDict, Field, model_validator + +_PLUGIN_MODEL_CONFIG = ConfigDict(extra="ignore") + + +# Source of truth for the attachment whitelist. Mirrors MIME_BY_EXTENSION in +# surfsense_obsidian/src/sync-engine.ts — keep in sync. +ATTACHMENT_MIME_TYPES: dict[str, str] = { + "pdf": "application/pdf", + "png": "image/png", + "jpg": "image/jpeg", + "jpeg": "image/jpeg", + "gif": "image/gif", + "webp": "image/webp", + "svg": "image/svg+xml", + "txt": "text/plain", +} +ALLOWED_ATTACHMENT_EXTENSIONS: frozenset[str] = frozenset(ATTACHMENT_MIME_TYPES) + + +class _PluginBase(BaseModel): + """Base schema carrying the shared forward-compatibility config.""" + + model_config = _PLUGIN_MODEL_CONFIG + + +class HeadingRef(_PluginBase): + """One markdown heading extracted from Obsidian metadata cache.""" + + heading: str + level: int = Field(ge=1, le=6) + + +class NotePayload(_PluginBase): + """One Obsidian note as pushed by the plugin (the source of truth).""" + + vault_id: str = Field( + ..., description="Stable plugin-generated UUID for this vault" + ) + path: str = Field(..., description="Vault-relative path, e.g. 'notes/foo.md'") + name: str = Field(..., description="File stem (no extension)") + extension: str = Field( + default="md", description="File extension without leading dot" + ) + content: str = Field(default="", description="Raw markdown body (post-frontmatter)") + + frontmatter: dict[str, Any] = Field(default_factory=dict) + tags: list[str] = Field(default_factory=list) + headings: list[HeadingRef] = Field(default_factory=list) + resolved_links: list[str] = Field(default_factory=list) + unresolved_links: list[str] = Field(default_factory=list) + embeds: list[str] = Field(default_factory=list) + aliases: list[str] = Field(default_factory=list) + + content_hash: str = Field( + ..., description="Plugin-computed SHA-256 of the raw content" + ) + is_binary: bool = Field( + default=False, + description=( + "True when payload represents a non-markdown attachment. " + "If set, the plugin may include binary_base64 for ETL extraction." + ), + ) + binary_base64: str | None = Field( + default=None, + description=( + "Base64-encoded raw file bytes for binary attachments. " + "Used by the backend ETL pipeline." + ), + ) + mime_type: str | None = Field( + default=None, + description="Optional MIME type hint for binary attachments.", + ) + size: int | None = Field( + default=None, + ge=0, + description="Byte size of the local file (mtime+size short-circuit signal). Optional for forward compatibility.", + ) + mtime: datetime + ctime: datetime + + @model_validator(mode="after") + def _enforce_binary_invariants(self) -> NotePayload: + if self.is_binary: + if not self.binary_base64: + raise ValueError("binary_base64 is required when is_binary is True") + if not self.mime_type: + raise ValueError("mime_type is required when is_binary is True") + elif self.binary_base64 is not None or self.mime_type is not None: + raise ValueError( + "binary_base64 and mime_type must be omitted when is_binary is False", + ) + return self + + +class SyncBatchRequest(_PluginBase): + """Batch upsert; plugin sends 10-20 notes per request.""" + + vault_id: str + notes: list[NotePayload] = Field(default_factory=list, max_length=100) + + +class RenameItem(_PluginBase): + old_path: str + new_path: str + + +class RenameBatchRequest(_PluginBase): + vault_id: str + renames: list[RenameItem] = Field(default_factory=list, max_length=200) + + +class DeleteBatchRequest(_PluginBase): + vault_id: str + paths: list[str] = Field(default_factory=list, max_length=500) + + +class ManifestEntry(_PluginBase): + hash: str + mtime: datetime + size: int | None = Field( + default=None, + description="Byte size last seen by the server. Enables mtime+size short-circuit; absent when not yet recorded.", + ) + + +class ManifestResponse(_PluginBase): + """Path-keyed manifest of every non-deleted note for a vault.""" + + vault_id: str + items: dict[str, ManifestEntry] = Field(default_factory=dict) + + +class ConnectRequest(_PluginBase): + """Vault registration / heartbeat. Replayed on every plugin onload.""" + + vault_id: str + vault_name: str + search_space_id: int + vault_fingerprint: str = Field( + ..., + description=( + "Deterministic SHA-256 over the sorted markdown paths in the vault " + "(plus vault_name). Same vault content on any device produces the " + "same value; the server uses it to dedup connectors across devices." + ), + ) + + +class ConnectResponse(_PluginBase): + """Carries the same handshake fields as ``HealthResponse`` so the plugin + learns the contract without a separate ``GET /health`` round-trip.""" + + connector_id: int + vault_id: str + search_space_id: int + capabilities: list[str] + server_time_utc: datetime + + +class HealthResponse(_PluginBase): + """API contract handshake. ``capabilities`` is additive-only string list.""" + + capabilities: list[str] + server_time_utc: datetime + + +# Per-item batch ack schemas — wire shape is load-bearing for the plugin +# queue (see api-client.ts / sync-engine.ts:processBatch). + + +class SyncAckItem(_PluginBase): + path: str + status: Literal["ok", "queued", "error"] + document_id: int | None = None + error: str | None = None + + +class SyncAck(_PluginBase): + vault_id: str + indexed: int + failed: int + items: list[SyncAckItem] = Field(default_factory=list) + + +class RenameAckItem(_PluginBase): + old_path: str + new_path: str + # ``missing`` is treated as success client-side (end state reached). + status: Literal["ok", "error", "missing"] + document_id: int | None = None + error: str | None = None + + +class RenameAck(_PluginBase): + vault_id: str + renamed: int + missing: int + items: list[RenameAckItem] = Field(default_factory=list) + + +class DeleteAckItem(_PluginBase): + path: str + status: Literal["ok", "error", "missing"] + error: str | None = None + + +class DeleteAck(_PluginBase): + vault_id: str + deleted: int + missing: int + items: list[DeleteAckItem] = Field(default_factory=list) + + +class StatsResponse(_PluginBase): + """Backs the Obsidian connector tile in the web UI.""" + + vault_id: str + files_synced: int + last_sync_at: datetime | None = None diff --git a/surfsense_backend/app/services/obsidian_plugin_indexer.py b/surfsense_backend/app/services/obsidian_plugin_indexer.py new file mode 100644 index 000000000..8fbdad269 --- /dev/null +++ b/surfsense_backend/app/services/obsidian_plugin_indexer.py @@ -0,0 +1,616 @@ +""" +Obsidian plugin indexer service. + +Bridges the SurfSense Obsidian plugin's HTTP payloads +(see ``app/schemas/obsidian_plugin.py``) into the shared +``IndexingPipelineService``. + +Responsibilities: + +- ``upsert_note`` — push one note through the indexing pipeline; respects + unchanged content (skip) and version-snapshots existing rows before + rewrite. +- ``rename_note`` — rewrite path-derived fields (path metadata, + ``unique_identifier_hash``, ``source_url``) without re-indexing content. +- ``delete_note`` — soft delete with a tombstone in ``document_metadata`` + so reconciliation can distinguish "user explicitly killed this in the UI" + from "plugin hasn't synced yet". +- ``get_manifest`` — return ``{path: {hash, mtime, size}}`` for every + non-deleted note belonging to a vault, used by the plugin's reconcile + pass on ``onload``. + +Design notes +------------ + +The plugin's content hash and the backend's ``content_hash`` are computed +differently (plugin uses raw SHA-256 of the markdown body; backend salts +with ``search_space_id``). We persist the plugin's hash in +``document_metadata['plugin_content_hash']`` so the manifest endpoint can +return what the plugin sent — that's the only number the plugin can +compare without re-downloading content. +""" + +from __future__ import annotations + +import base64 +import contextlib +import logging +import os +import tempfile +from datetime import UTC, datetime +from typing import Any +from urllib.parse import quote + +from sqlalchemy import and_, select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.db import ( + Document, + DocumentStatus, + DocumentType, + SearchSourceConnector, +) +from app.indexing_pipeline.connector_document import ConnectorDocument +from app.indexing_pipeline.indexing_pipeline_service import IndexingPipelineService +from app.schemas.obsidian_plugin import ( + ManifestEntry, + ManifestResponse, + NotePayload, +) +from app.utils.document_converters import generate_unique_identifier_hash +from app.utils.document_versioning import create_version_snapshot + +logger = logging.getLogger(__name__) + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _vault_path_unique_id(vault_id: str, path: str) -> str: + """Stable identifier for a note. Vault-scoped so the same path under two + different vaults doesn't collide.""" + return f"{vault_id}:{path}" + + +def _build_source_url(vault_name: str, path: str) -> str: + """Build the ``obsidian://`` deep link for the web UI's "Open in Obsidian" + button. Both segments are URL-encoded because vault names and paths can + contain spaces, ``#``, ``?``, etc. + """ + return ( + "obsidian://open" + f"?vault={quote(vault_name, safe='')}" + f"&file={quote(path, safe='')}" + ) + + +def _build_metadata( + payload: NotePayload, + *, + vault_name: str, + connector_id: int, + extra: dict[str, Any] | None = None, +) -> dict[str, Any]: + """Flatten the rich plugin payload into the JSONB ``document_metadata`` + column. Keys here are what the chat UI / search UI surface to users. + """ + meta: dict[str, Any] = { + "source": "plugin", + "vault_id": payload.vault_id, + "vault_name": vault_name, + "file_path": payload.path, + "file_name": payload.name, + "extension": payload.extension, + "frontmatter": payload.frontmatter, + "tags": payload.tags, + "headings": [h.model_dump() for h in payload.headings], + "outgoing_links": payload.resolved_links, + "unresolved_links": payload.unresolved_links, + "embeds": payload.embeds, + "aliases": payload.aliases, + "plugin_content_hash": payload.content_hash, + "plugin_file_size": payload.size, + "mtime": payload.mtime.isoformat(), + "ctime": payload.ctime.isoformat(), + "connector_id": connector_id, + "url": _build_source_url(vault_name, payload.path), + } + if payload.is_binary: + meta["is_binary"] = True + meta["mime_type"] = payload.mime_type + if extra: + meta.update(extra) + return meta + + +def _build_document_string( + payload: NotePayload, vault_name: str, *, content_override: str | None = None +) -> str: + """Compose the indexable string the pipeline embeds and chunks. + + Mirrors the legacy obsidian indexer's METADATA + CONTENT framing so + existing search relevance heuristics keep working unchanged. + """ + tags_line = ", ".join(payload.tags) if payload.tags else "None" + links_line = ", ".join(payload.resolved_links) if payload.resolved_links else "None" + body = payload.content if content_override is None else content_override + return ( + "\n" + f"Title: {payload.name}\n" + f"Vault: {vault_name}\n" + f"Path: {payload.path}\n" + f"Tags: {tags_line}\n" + f"Links to: {links_line}\n" + "\n\n" + "\n" + f"{body}\n" + "\n" + ) + + +async def _extract_binary_attachment_markdown( + payload: NotePayload, *, vision_llm +) -> tuple[str, dict[str, Any]]: + try: + raw_bytes = base64.b64decode(payload.binary_base64, validate=True) + except Exception: + logger.warning("obsidian attachment payload had invalid base64: %s", payload.path) + return "", {"attachment_extraction_status": "invalid_binary_payload"} + + suffix = f".{payload.extension.lstrip('.')}" + temp_path: str | None = None + filename = payload.path.rsplit("/", 1)[-1] or payload.name + try: + with tempfile.NamedTemporaryFile(delete=False, suffix=suffix) as tmp: + tmp.write(raw_bytes) + temp_path = tmp.name + + result = await _run_etl_extract( + file_path=temp_path, + filename=filename, + vision_llm=vision_llm, + ) + metadata: dict[str, Any] = { + "attachment_extraction_status": "ok", + "attachment_etl_service": result.etl_service, + "attachment_content_type": result.content_type, + } + return result.markdown_content, metadata + except Exception as exc: + logger.warning( + "obsidian attachment ETL failed for %s: %s", payload.path, exc, exc_info=True + ) + return "", { + "attachment_extraction_status": "etl_failed", + "attachment_extraction_error": str(exc)[:300], + } + finally: + if temp_path and os.path.exists(temp_path): + with contextlib.suppress(Exception): + os.unlink(temp_path) + + +async def _run_etl_extract(*, file_path: str, filename: str, vision_llm): + """Lazy-load ETL dependencies to avoid module-import cycles.""" + from app.etl_pipeline.etl_document import EtlRequest + from app.etl_pipeline.etl_pipeline_service import EtlPipelineService + + return await EtlPipelineService(vision_llm=vision_llm).extract( + EtlRequest(file_path=file_path, filename=filename) + ) + + +def _is_image_attachment(payload: NotePayload) -> bool: + ext = payload.extension.lower().lstrip(".") + return ext in {"png", "jpg", "jpeg", "gif", "webp", "svg"} + + +async def _resolve_attachment_vision_llm( + session: AsyncSession, + *, + connector: SearchSourceConnector, + search_space_id: int, + payload: NotePayload, +): + """Match connector indexers: only fetch vision LLM for image attachments + when the connector has vision indexing enabled.""" + if not payload.is_binary: + return None + if not _is_image_attachment(payload): + return None + if not getattr(connector, "enable_vision_llm", False): + return None + + from app.services.llm_service import get_vision_llm + + return await get_vision_llm(session, search_space_id) + + +async def _resolve_summary_llm( + session: AsyncSession, *, user_id: str, search_space_id: int, should_summarize: bool +): + """Fetch summary LLM only when indexing summary is enabled.""" + if not should_summarize: + return None + + from app.services.llm_service import get_user_long_context_llm + + return await get_user_long_context_llm(session, user_id, search_space_id) + + +def _require_extracted_attachment_content( + *, content: str, etl_meta: dict[str, Any], path: str +) -> str: + extracted = content.strip() + if extracted: + return extracted + + status = etl_meta.get("attachment_extraction_status", "unknown") + reason = etl_meta.get("attachment_extraction_error") + if reason: + raise RuntimeError( + f"Attachment extraction failed for {path} ({status}): {reason}" + ) + raise RuntimeError(f"Attachment extraction failed for {path} ({status})") + + +async def _find_existing_document( + session: AsyncSession, + *, + search_space_id: int, + vault_id: str, + path: str, +) -> Document | None: + unique_id = _vault_path_unique_id(vault_id, path) + uid_hash = generate_unique_identifier_hash( + DocumentType.OBSIDIAN_CONNECTOR, + unique_id, + search_space_id, + ) + result = await session.execute( + select(Document).where(Document.unique_identifier_hash == uid_hash) + ) + return result.scalars().first() + + +# --------------------------------------------------------------------------- +# Public API +# --------------------------------------------------------------------------- + + +async def upsert_note( + session: AsyncSession, + *, + connector: SearchSourceConnector, + payload: NotePayload, + user_id: str, +) -> Document: + """Index or refresh a single note pushed by the plugin. + + Returns the resulting ``Document`` (whether newly created, updated, or + a skip-because-unchanged hit). + """ + vault_name: str = (connector.config or {}).get("vault_name") or "Vault" + search_space_id = connector.search_space_id + + existing = await _find_existing_document( + session, + search_space_id=search_space_id, + vault_id=payload.vault_id, + path=payload.path, + ) + + plugin_hash = payload.content_hash + if existing is not None: + existing_meta = existing.document_metadata or {} + was_tombstoned = bool(existing_meta.get("deleted_at")) + + if ( + not was_tombstoned + and existing_meta.get("plugin_content_hash") == plugin_hash + and DocumentStatus.is_state(existing.status, DocumentStatus.READY) + ): + return existing + + try: + await create_version_snapshot(session, existing) + except Exception: + logger.debug( + "version snapshot failed for obsidian doc %s", + existing.id, + exc_info=True, + ) + + content_for_index = payload.content + extra_meta: dict[str, Any] = {} + vision_llm = None + if payload.is_binary: + vision_llm = await _resolve_attachment_vision_llm( + session, + connector=connector, + search_space_id=search_space_id, + payload=payload, + ) + content_for_index, etl_meta = await _extract_binary_attachment_markdown( + payload, vision_llm=vision_llm + ) + extra_meta.update(etl_meta) + # Strict KB behavior: do not index metadata-only attachments. + content_for_index = _require_extracted_attachment_content( + content=content_for_index, + etl_meta=etl_meta, + path=payload.path, + ) + + llm = await _resolve_summary_llm( + session, + user_id=str(user_id), + search_space_id=search_space_id, + should_summarize=connector.enable_summary, + ) + + document_string = _build_document_string( + payload, vault_name, content_override=content_for_index + ) + metadata = _build_metadata( + payload, + vault_name=vault_name, + connector_id=connector.id, + extra=extra_meta, + ) + + connector_doc = ConnectorDocument( + title=payload.name, + source_markdown=document_string, + unique_id=_vault_path_unique_id(payload.vault_id, payload.path), + document_type=DocumentType.OBSIDIAN_CONNECTOR, + search_space_id=search_space_id, + connector_id=connector.id, + created_by_id=str(user_id), + should_summarize=connector.enable_summary, + fallback_summary=f"Obsidian Note: {payload.name}\n\n{content_for_index}", + metadata=metadata, + ) + + pipeline = IndexingPipelineService(session) + prepared = await pipeline.prepare_for_indexing([connector_doc]) + if not prepared: + if existing is not None: + return existing + raise RuntimeError(f"Indexing pipeline rejected obsidian note {payload.path}") + + document = prepared[0] + + return await pipeline.index(document, connector_doc, llm) + + +async def rename_note( + session: AsyncSession, + *, + connector: SearchSourceConnector, + old_path: str, + new_path: str, + vault_id: str, +) -> Document | None: + """Rewrite path-derived columns without re-indexing content. + + Returns the updated document, or ``None`` if no row matched the + ``old_path`` (this happens when the plugin is renaming a file that was + never synced — safe to ignore, the next ``sync`` will create it under + the new path). + """ + vault_name: str = (connector.config or {}).get("vault_name") or "Vault" + search_space_id = connector.search_space_id + + existing = await _find_existing_document( + session, + search_space_id=search_space_id, + vault_id=vault_id, + path=old_path, + ) + if existing is None: + return None + + new_unique_id = _vault_path_unique_id(vault_id, new_path) + new_uid_hash = generate_unique_identifier_hash( + DocumentType.OBSIDIAN_CONNECTOR, + new_unique_id, + search_space_id, + ) + + collision = await session.execute( + select(Document).where( + and_( + Document.unique_identifier_hash == new_uid_hash, + Document.id != existing.id, + ) + ) + ) + collision_row = collision.scalars().first() + if collision_row is not None: + logger.warning( + "obsidian rename target already exists " + "(vault=%s old=%s new=%s); skipping rename so the next /sync " + "can resolve the conflict via content_hash", + vault_id, + old_path, + new_path, + ) + return existing + + new_filename = new_path.rsplit("/", 1)[-1] + new_stem = new_filename.rsplit(".", 1)[0] if "." in new_filename else new_filename + + existing.unique_identifier_hash = new_uid_hash + existing.title = new_stem + + meta = dict(existing.document_metadata or {}) + meta["file_path"] = new_path + meta["file_name"] = new_stem + meta["url"] = _build_source_url(vault_name, new_path) + existing.document_metadata = meta + existing.updated_at = datetime.now(UTC) + + await session.commit() + return existing + + +async def delete_note( + session: AsyncSession, + *, + connector: SearchSourceConnector, + vault_id: str, + path: str, +) -> bool: + """Soft-delete via tombstone in ``document_metadata``. + + The row is *not* removed and chunks are *not* dropped, so existing + citations in chat threads remain resolvable. The manifest endpoint + filters tombstoned rows out, so the plugin's reconcile pass will not + see this path and won't try to "resurrect" a note the user deleted in + the SurfSense UI. + + Returns True if a row was tombstoned, False if no matching row existed. + """ + existing = await _find_existing_document( + session, + search_space_id=connector.search_space_id, + vault_id=vault_id, + path=path, + ) + if existing is None: + return False + + meta = dict(existing.document_metadata or {}) + if meta.get("deleted_at"): + return True + + meta["deleted_at"] = datetime.now(UTC).isoformat() + meta["deleted_by_source"] = "plugin" + existing.document_metadata = meta + existing.updated_at = datetime.now(UTC) + + await session.commit() + return True + + +async def merge_obsidian_connectors( + session: AsyncSession, + *, + source: SearchSourceConnector, + target: SearchSourceConnector, +) -> None: + """Fold ``source``'s documents into ``target`` and delete ``source``. + + Triggered when the fingerprint dedup detects two plugin connectors + pointing at the same vault (e.g. a mobile install raced with iCloud + hydration and got a partial fingerprint, then caught up). Path + collisions resolve in favour of ``target`` (the surviving row); + ``source``'s duplicate documents are hard-deleted along with their + chunks via the ``cascade='all, delete-orphan'`` on ``Document.chunks``. + """ + if source.id == target.id: + return + + target_vault_id = (target.config or {}).get("vault_id") + target_search_space_id = target.search_space_id + if not target_vault_id: + raise RuntimeError("merge target is missing vault_id") + + target_paths_result = await session.execute( + select(Document).where( + and_( + Document.connector_id == target.id, + Document.document_type == DocumentType.OBSIDIAN_CONNECTOR, + ) + ) + ) + target_paths: set[str] = set() + for doc in target_paths_result.scalars().all(): + meta = doc.document_metadata or {} + path = meta.get("file_path") + if path: + target_paths.add(path) + + source_docs_result = await session.execute( + select(Document).where( + and_( + Document.connector_id == source.id, + Document.document_type == DocumentType.OBSIDIAN_CONNECTOR, + ) + ) + ) + + for doc in source_docs_result.scalars().all(): + meta = dict(doc.document_metadata or {}) + path = meta.get("file_path") + if not path or path in target_paths: + await session.delete(doc) + continue + + new_unique_id = _vault_path_unique_id(target_vault_id, path) + new_uid_hash = generate_unique_identifier_hash( + DocumentType.OBSIDIAN_CONNECTOR, + new_unique_id, + target_search_space_id, + ) + meta["vault_id"] = target_vault_id + meta["connector_id"] = target.id + doc.document_metadata = meta + doc.connector_id = target.id + doc.search_space_id = target_search_space_id + doc.unique_identifier_hash = new_uid_hash + target_paths.add(path) + + await session.flush() + await session.delete(source) + + +async def get_manifest( + session: AsyncSession, + *, + connector: SearchSourceConnector, + vault_id: str, +) -> ManifestResponse: + """Return ``{path: {hash, mtime, size}}`` for every non-deleted note in + this vault. + + The plugin compares this against its local vault on every ``onload`` to + catch up edits made while offline. Rows missing ``plugin_content_hash`` + (e.g. tombstoned, or somehow indexed without going through this + service) are excluded so the plugin doesn't get confused by partial + data. + """ + result = await session.execute( + select(Document).where( + and_( + Document.search_space_id == connector.search_space_id, + Document.connector_id == connector.id, + Document.document_type == DocumentType.OBSIDIAN_CONNECTOR, + ) + ) + ) + + items: dict[str, ManifestEntry] = {} + for doc in result.scalars().all(): + meta = doc.document_metadata or {} + if meta.get("deleted_at"): + continue + if meta.get("vault_id") != vault_id: + continue + path = meta.get("file_path") + plugin_hash = meta.get("plugin_content_hash") + mtime_raw = meta.get("mtime") + if not path or not plugin_hash or not mtime_raw: + continue + try: + mtime = datetime.fromisoformat(mtime_raw) + except ValueError: + continue + size_raw = meta.get("plugin_file_size") + size = int(size_raw) if isinstance(size_raw, int) else None + items[path] = ManifestEntry(hash=plugin_hash, mtime=mtime, size=size) + + return ManifestResponse(vault_id=vault_id, items=items) diff --git a/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py b/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py index 141d5ffca..fe1ac19d3 100644 --- a/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py +++ b/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py @@ -536,49 +536,6 @@ async def _index_bookstack_pages( ) -@celery_app.task(name="index_obsidian_vault", bind=True) -def index_obsidian_vault_task( - self, - connector_id: int, - search_space_id: int, - user_id: str, - start_date: str, - end_date: str, -): - """Celery task to index Obsidian vault notes.""" - import asyncio - - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - - try: - loop.run_until_complete( - _index_obsidian_vault( - connector_id, search_space_id, user_id, start_date, end_date - ) - ) - finally: - loop.close() - - -async def _index_obsidian_vault( - connector_id: int, - search_space_id: int, - user_id: str, - start_date: str, - end_date: str, -): - """Index Obsidian vault with new session.""" - from app.routes.search_source_connectors_routes import ( - run_obsidian_indexing, - ) - - async with get_celery_session_maker()() as session: - await run_obsidian_indexing( - session, connector_id, search_space_id, user_id, start_date, end_date - ) - - @celery_app.task(name="index_composio_connector", bind=True) def index_composio_connector_task( self, diff --git a/surfsense_backend/app/tasks/celery_tasks/obsidian_tasks.py b/surfsense_backend/app/tasks/celery_tasks/obsidian_tasks.py new file mode 100644 index 000000000..98b107af3 --- /dev/null +++ b/surfsense_backend/app/tasks/celery_tasks/obsidian_tasks.py @@ -0,0 +1,59 @@ +"""Celery tasks for Obsidian plugin background processing.""" + +from __future__ import annotations + +import asyncio +import logging + +from app.celery_app import celery_app +from app.db import SearchSourceConnector +from app.schemas.obsidian_plugin import NotePayload +from app.services.obsidian_plugin_indexer import upsert_note +from app.tasks.celery_tasks import get_celery_session_maker + +logger = logging.getLogger(__name__) + + +@celery_app.task(name="index_obsidian_attachment", bind=True) +def index_obsidian_attachment_task( + self, + connector_id: int, + payload_data: dict, + user_id: str, +) -> None: + """Process one Obsidian non-markdown attachment asynchronously.""" + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + try: + loop.run_until_complete( + _index_obsidian_attachment( + connector_id=connector_id, + payload_data=payload_data, + user_id=user_id, + ) + ) + finally: + loop.close() + + +async def _index_obsidian_attachment( + *, + connector_id: int, + payload_data: dict, + user_id: str, +) -> None: + async with get_celery_session_maker()() as session: + connector = await session.get(SearchSourceConnector, connector_id) + if connector is None: + logger.warning( + "obsidian attachment task skipped: connector %s not found", connector_id + ) + return + + payload = NotePayload.model_validate(payload_data) + await upsert_note( + session, + connector=connector, + payload=payload, + user_id=user_id, + ) diff --git a/surfsense_backend/app/tasks/connector_indexers/__init__.py b/surfsense_backend/app/tasks/connector_indexers/__init__.py index 2b0ad7fa0..218f21066 100644 --- a/surfsense_backend/app/tasks/connector_indexers/__init__.py +++ b/surfsense_backend/app/tasks/connector_indexers/__init__.py @@ -14,18 +14,16 @@ from .google_calendar_indexer import index_google_calendar_events from .google_drive_indexer import index_google_drive_files from .google_gmail_indexer import index_google_gmail_messages from .notion_indexer import index_notion_pages -from .obsidian_indexer import index_obsidian_vault from .webcrawler_indexer import index_crawled_urls __all__ = [ "index_bookstack_pages", "index_confluence_pages", + "index_crawled_urls", "index_elasticsearch_documents", "index_github_repos", "index_google_calendar_events", "index_google_drive_files", "index_google_gmail_messages", "index_notion_pages", - "index_obsidian_vault", - "index_crawled_urls", ] diff --git a/surfsense_backend/app/tasks/connector_indexers/obsidian_indexer.py b/surfsense_backend/app/tasks/connector_indexers/obsidian_indexer.py deleted file mode 100644 index 5356ecfb7..000000000 --- a/surfsense_backend/app/tasks/connector_indexers/obsidian_indexer.py +++ /dev/null @@ -1,676 +0,0 @@ -""" -Obsidian connector indexer. - -Indexes markdown notes from a local Obsidian vault. -This connector is only available in self-hosted mode. - -Implements 2-phase document status updates for real-time UI feedback: -- Phase 1: Create all documents with 'pending' status (visible in UI immediately) -- Phase 2: Process each document: pending → processing → ready/failed -""" - -import os -import re -import time -from collections.abc import Awaitable, Callable -from datetime import UTC, datetime -from pathlib import Path - -import yaml -from sqlalchemy.exc import SQLAlchemyError -from sqlalchemy.ext.asyncio import AsyncSession - -from app.config import config -from app.db import Document, DocumentStatus, DocumentType, SearchSourceConnectorType -from app.services.llm_service import get_user_long_context_llm -from app.services.task_logging_service import TaskLoggingService -from app.utils.document_converters import ( - create_document_chunks, - embed_text, - generate_content_hash, - generate_document_summary, - generate_unique_identifier_hash, -) - -from .base import ( - build_document_metadata_string, - check_document_by_unique_identifier, - check_duplicate_document_by_hash, - get_connector_by_id, - get_current_timestamp, - logger, - safe_set_chunks, - update_connector_last_indexed, -) - -# Type hint for heartbeat callback -HeartbeatCallbackType = Callable[[int], Awaitable[None]] - -# Heartbeat interval in seconds -HEARTBEAT_INTERVAL_SECONDS = 30 - - -def parse_frontmatter(content: str) -> tuple[dict | None, str]: - """ - Parse YAML frontmatter from markdown content. - - Args: - content: The full markdown content - - Returns: - Tuple of (frontmatter dict or None, content without frontmatter) - """ - if not content.startswith("---"): - return None, content - - # Find the closing --- - end_match = re.search(r"\n---\n", content[3:]) - if not end_match: - return None, content - - frontmatter_str = content[3 : end_match.start() + 3] - remaining_content = content[end_match.end() + 3 :] - - try: - frontmatter = yaml.safe_load(frontmatter_str) - return frontmatter, remaining_content.strip() - except yaml.YAMLError: - return None, content - - -def extract_wiki_links(content: str) -> list[str]: - """ - Extract [[wiki-style links]] from content. - - Args: - content: Markdown content - - Returns: - List of linked note names - """ - # Match [[link]] or [[link|alias]] - pattern = r"\[\[([^\]|]+)(?:\|[^\]]+)?\]\]" - matches = re.findall(pattern, content) - return list(set(matches)) - - -def extract_tags(content: str) -> list[str]: - """ - Extract #tags from content (both inline and frontmatter). - - Args: - content: Markdown content - - Returns: - List of tags (without # prefix) - """ - # Match #tag but not ## headers - pattern = r"(? list[dict]: - """ - Scan an Obsidian vault for markdown files. - - Args: - vault_path: Path to the Obsidian vault - exclude_folders: List of folder names to exclude - - Returns: - List of file info dicts with path, name, modified time - """ - if exclude_folders is None: - exclude_folders = [".trash", ".obsidian", "templates"] - - vault = Path(vault_path) - if not vault.exists(): - raise ValueError(f"Vault path does not exist: {vault_path}") - - files = [] - for md_file in vault.rglob("*.md"): - # Check if file is in an excluded folder - relative_path = md_file.relative_to(vault) - parts = relative_path.parts - - if any(excluded in parts for excluded in exclude_folders): - continue - - try: - stat = md_file.stat() - files.append( - { - "path": str(md_file), - "relative_path": str(relative_path), - "name": md_file.stem, - "modified_at": datetime.fromtimestamp(stat.st_mtime, tz=UTC), - "created_at": datetime.fromtimestamp(stat.st_ctime, tz=UTC), - "size": stat.st_size, - } - ) - except OSError as e: - logger.warning(f"Could not stat file {md_file}: {e}") - - return files - - -async def index_obsidian_vault( - session: AsyncSession, - connector_id: int, - search_space_id: int, - user_id: str, - start_date: str | None = None, - end_date: str | None = None, - update_last_indexed: bool = True, - on_heartbeat_callback: HeartbeatCallbackType | None = None, -) -> tuple[int, str | None]: - """ - Index notes from a local Obsidian vault. - - This indexer is only available in self-hosted mode as it requires - direct file system access to the user's Obsidian vault. - - Args: - session: Database session - connector_id: ID of the Obsidian connector - search_space_id: ID of the search space to store documents in - user_id: ID of the user - start_date: Start date for filtering (YYYY-MM-DD format) - optional - end_date: End date for filtering (YYYY-MM-DD format) - optional - update_last_indexed: Whether to update the last_indexed_at timestamp - on_heartbeat_callback: Optional callback to update notification during long-running indexing. - - Returns: - Tuple containing (number of documents indexed, error message or None) - """ - task_logger = TaskLoggingService(session, search_space_id) - - # Check if self-hosted mode - if not config.is_self_hosted(): - return 0, "Obsidian connector is only available in self-hosted mode" - - # Log task start - log_entry = await task_logger.log_task_start( - task_name="obsidian_vault_indexing", - source="connector_indexing_task", - message=f"Starting Obsidian vault indexing for connector {connector_id}", - metadata={ - "connector_id": connector_id, - "user_id": str(user_id), - "start_date": start_date, - "end_date": end_date, - }, - ) - - try: - # Get the connector - await task_logger.log_task_progress( - log_entry, - f"Retrieving Obsidian connector {connector_id} from database", - {"stage": "connector_retrieval"}, - ) - - connector = await get_connector_by_id( - session, connector_id, SearchSourceConnectorType.OBSIDIAN_CONNECTOR - ) - - if not connector: - await task_logger.log_task_failure( - log_entry, - f"Connector with ID {connector_id} not found or is not an Obsidian connector", - "Connector not found", - {"error_type": "ConnectorNotFound"}, - ) - return ( - 0, - f"Connector with ID {connector_id} not found or is not an Obsidian connector", - ) - - # Get vault path from connector config - vault_path = connector.config.get("vault_path") - if not vault_path: - await task_logger.log_task_failure( - log_entry, - "Vault path not configured for this connector", - "Missing vault path", - {"error_type": "MissingVaultPath"}, - ) - return 0, "Vault path not configured for this connector" - - # Validate vault path exists - if not os.path.exists(vault_path): - await task_logger.log_task_failure( - log_entry, - f"Vault path does not exist: {vault_path}", - "Vault path not found", - {"error_type": "VaultNotFound", "vault_path": vault_path}, - ) - return 0, f"Vault path does not exist: {vault_path}" - - # Get configuration options - exclude_folders = connector.config.get( - "exclude_folders", [".trash", ".obsidian", "templates"] - ) - vault_name = connector.config.get("vault_name") or os.path.basename(vault_path) - - await task_logger.log_task_progress( - log_entry, - f"Scanning Obsidian vault: {vault_name}", - {"stage": "vault_scan", "vault_path": vault_path}, - ) - - # Scan vault for markdown files - try: - files = scan_vault(vault_path, exclude_folders) - except Exception as e: - await task_logger.log_task_failure( - log_entry, - f"Failed to scan vault: {e}", - "Vault scan error", - {"error_type": "VaultScanError"}, - ) - return 0, f"Failed to scan vault: {e}" - - logger.info(f"Found {len(files)} markdown files in vault") - - await task_logger.log_task_progress( - log_entry, - f"Found {len(files)} markdown files to process", - {"stage": "files_discovered", "file_count": len(files)}, - ) - - # Filter by date if provided (handle "undefined" string from frontend) - # Also handle inverted dates (start > end) by skipping filtering - start_dt = None - end_dt = None - - if start_date and start_date != "undefined": - start_dt = datetime.strptime(start_date, "%Y-%m-%d").replace(tzinfo=UTC) - - if end_date and end_date != "undefined": - # Make end_date inclusive (end of day) - end_dt = datetime.strptime(end_date, "%Y-%m-%d").replace(tzinfo=UTC) - end_dt = end_dt.replace(hour=23, minute=59, second=59) - - # Only apply date filtering if dates are valid and in correct order - if start_dt and end_dt and start_dt > end_dt: - logger.warning( - f"start_date ({start_date}) is after end_date ({end_date}), skipping date filter" - ) - else: - if start_dt: - files = [f for f in files if f["modified_at"] >= start_dt] - logger.info( - f"After start_date filter ({start_date}): {len(files)} files" - ) - if end_dt: - files = [f for f in files if f["modified_at"] <= end_dt] - logger.info(f"After end_date filter ({end_date}): {len(files)} files") - - logger.info(f"Processing {len(files)} files after date filtering") - - indexed_count = 0 - skipped_count = 0 - failed_count = 0 - duplicate_content_count = 0 - - # Heartbeat tracking - update notification periodically to prevent appearing stuck - last_heartbeat_time = time.time() - - # ======================================================================= - # PHASE 1: Analyze all files, create pending documents - # This makes ALL documents visible in the UI immediately with pending status - # ======================================================================= - files_to_process = [] # List of dicts with document and file data - new_documents_created = False - - for file_info in files: - try: - file_path = file_info["path"] - relative_path = file_info["relative_path"] - - # Read file content - try: - with open(file_path, encoding="utf-8") as f: - content = f.read() - except UnicodeDecodeError: - logger.warning(f"Could not decode file {file_path}, skipping") - skipped_count += 1 - continue - - if not content.strip(): - logger.debug(f"Empty file {file_path}, skipping") - skipped_count += 1 - continue - - # Parse frontmatter and extract metadata - frontmatter, body_content = parse_frontmatter(content) - wiki_links = extract_wiki_links(content) - tags = extract_tags(content) - - # Get title from frontmatter or filename - title = file_info["name"] - if frontmatter: - title = frontmatter.get("title", title) - # Also extract tags from frontmatter - fm_tags = frontmatter.get("tags", []) - if isinstance(fm_tags, list): - tags = list({*tags, *fm_tags}) - elif isinstance(fm_tags, str): - tags = list({*tags, fm_tags}) - - # Generate unique identifier using vault name and relative path - unique_identifier = f"{vault_name}:{relative_path}" - unique_identifier_hash = generate_unique_identifier_hash( - DocumentType.OBSIDIAN_CONNECTOR, - unique_identifier, - search_space_id, - ) - - # Generate content hash - content_hash = generate_content_hash(content, search_space_id) - - # Check for existing document - existing_document = await check_document_by_unique_identifier( - session, unique_identifier_hash - ) - - if existing_document: - # Document exists - check if content has changed - if existing_document.content_hash == content_hash: - # Ensure status is ready (might have been stuck in processing/pending) - if not DocumentStatus.is_state( - existing_document.status, DocumentStatus.READY - ): - existing_document.status = DocumentStatus.ready() - logger.debug(f"Note {title} unchanged, skipping") - skipped_count += 1 - continue - - # Queue existing document for update (will be set to processing in Phase 2) - files_to_process.append( - { - "document": existing_document, - "is_new": False, - "file_info": file_info, - "content": content, - "body_content": body_content, - "frontmatter": frontmatter, - "wiki_links": wiki_links, - "tags": tags, - "title": title, - "relative_path": relative_path, - "content_hash": content_hash, - "unique_identifier_hash": unique_identifier_hash, - } - ) - continue - - # Document doesn't exist by unique_identifier_hash - # Check if a document with the same content_hash exists (from another connector) - with session.no_autoflush: - duplicate_by_content = await check_duplicate_document_by_hash( - session, content_hash - ) - - if duplicate_by_content: - logger.info( - f"Obsidian note {title} already indexed by another connector " - f"(existing document ID: {duplicate_by_content.id}, " - f"type: {duplicate_by_content.document_type}). Skipping." - ) - duplicate_content_count += 1 - skipped_count += 1 - continue - - # Create new document with PENDING status (visible in UI immediately) - document = Document( - search_space_id=search_space_id, - title=title, - document_type=DocumentType.OBSIDIAN_CONNECTOR, - document_metadata={ - "vault_name": vault_name, - "file_path": relative_path, - "connector_id": connector_id, - }, - content="Pending...", # Placeholder until processed - content_hash=unique_identifier_hash, # Temporary unique value - updated when ready - unique_identifier_hash=unique_identifier_hash, - embedding=None, - chunks=[], # Empty at creation - safe for async - status=DocumentStatus.pending(), # Pending until processing starts - updated_at=get_current_timestamp(), - created_by_id=user_id, - connector_id=connector_id, - ) - session.add(document) - new_documents_created = True - - files_to_process.append( - { - "document": document, - "is_new": True, - "file_info": file_info, - "content": content, - "body_content": body_content, - "frontmatter": frontmatter, - "wiki_links": wiki_links, - "tags": tags, - "title": title, - "relative_path": relative_path, - "content_hash": content_hash, - "unique_identifier_hash": unique_identifier_hash, - } - ) - - except Exception as e: - logger.exception( - f"Error in Phase 1 for file {file_info.get('path', 'unknown')}: {e}" - ) - failed_count += 1 - continue - - # Commit all pending documents - they all appear in UI now - if new_documents_created: - logger.info( - f"Phase 1: Committing {len([f for f in files_to_process if f['is_new']])} pending documents" - ) - await session.commit() - - # ======================================================================= - # PHASE 2: Process each document one by one - # Each document transitions: pending → processing → ready/failed - # ======================================================================= - logger.info(f"Phase 2: Processing {len(files_to_process)} documents") - - # Get LLM for summarization - long_context_llm = await get_user_long_context_llm( - session, user_id, search_space_id - ) - - for item in files_to_process: - # Send heartbeat periodically - if on_heartbeat_callback: - current_time = time.time() - if current_time - last_heartbeat_time >= HEARTBEAT_INTERVAL_SECONDS: - await on_heartbeat_callback(indexed_count) - last_heartbeat_time = current_time - - document = item["document"] - try: - # Set to PROCESSING and commit - shows "processing" in UI for THIS document only - document.status = DocumentStatus.processing() - await session.commit() - - # Extract data from item - title = item["title"] - relative_path = item["relative_path"] - content = item["content"] - body_content = item["body_content"] - frontmatter = item["frontmatter"] - wiki_links = item["wiki_links"] - tags = item["tags"] - content_hash = item["content_hash"] - file_info = item["file_info"] - - # Build metadata - document_metadata = { - "vault_name": vault_name, - "file_path": relative_path, - "tags": tags, - "outgoing_links": wiki_links, - "frontmatter": frontmatter, - "modified_at": file_info["modified_at"].isoformat(), - "created_at": file_info["created_at"].isoformat(), - "word_count": len(body_content.split()), - } - - # Build document content with metadata - metadata_sections = [ - ( - "METADATA", - [ - f"Title: {title}", - f"Vault: {vault_name}", - f"Path: {relative_path}", - f"Tags: {', '.join(tags) if tags else 'None'}", - f"Links to: {', '.join(wiki_links) if wiki_links else 'None'}", - ], - ), - ("CONTENT", [body_content]), - ] - document_string = build_document_metadata_string(metadata_sections) - - # Generate summary - summary_content = "" - if long_context_llm and connector.enable_summary: - summary_content, _ = await generate_document_summary( - document_string, - long_context_llm, - document_metadata, - ) - - # Generate embedding - embedding = embed_text(document_string) - - # Add URL and summary to metadata - document_metadata["url"] = f"obsidian://{vault_name}/{relative_path}" - document_metadata["summary"] = summary_content - document_metadata["connector_id"] = connector_id - - # Create chunks - chunks = await create_document_chunks(document_string) - - # Update document to READY with actual content - document.title = title - document.content = document_string - document.content_hash = content_hash - document.embedding = embedding - document.document_metadata = document_metadata - await safe_set_chunks(session, document, chunks) - document.updated_at = get_current_timestamp() - document.status = DocumentStatus.ready() - - indexed_count += 1 - - # Batch commit every 10 documents (for ready status updates) - if indexed_count % 10 == 0: - logger.info( - f"Committing batch: {indexed_count} Obsidian notes processed so far" - ) - await session.commit() - - except Exception as e: - logger.exception( - f"Error processing file {item.get('file_info', {}).get('path', 'unknown')}: {e}" - ) - # Mark document as failed with reason (visible in UI) - try: - document.status = DocumentStatus.failed(str(e)) - document.updated_at = get_current_timestamp() - except Exception as status_error: - logger.error( - f"Failed to update document status to failed: {status_error}" - ) - failed_count += 1 - continue - - # CRITICAL: Always update timestamp (even if 0 documents indexed) so Zero syncs - await update_connector_last_indexed(session, connector, update_last_indexed) - - # Final commit for any remaining documents not yet committed in batches - logger.info(f"Final commit: Total {indexed_count} Obsidian notes processed") - try: - await session.commit() - logger.info( - "Successfully committed all Obsidian document changes to database" - ) - except Exception as e: - # Handle any remaining integrity errors gracefully (race conditions, etc.) - if ( - "duplicate key value violates unique constraint" in str(e).lower() - or "uniqueviolationerror" in str(e).lower() - ): - logger.warning( - f"Duplicate content_hash detected during final commit. " - f"This may occur if the same note was indexed by multiple connectors. " - f"Rolling back and continuing. Error: {e!s}" - ) - await session.rollback() - # Don't fail the entire task - some documents may have been successfully indexed - else: - raise - - # Build warning message if there were issues - warning_parts = [] - if duplicate_content_count > 0: - warning_parts.append(f"{duplicate_content_count} duplicate") - if failed_count > 0: - warning_parts.append(f"{failed_count} failed") - warning_message = ", ".join(warning_parts) if warning_parts else None - - total_processed = indexed_count - - await task_logger.log_task_success( - log_entry, - f"Successfully completed Obsidian vault indexing for connector {connector_id}", - { - "notes_processed": total_processed, - "documents_indexed": indexed_count, - "documents_skipped": skipped_count, - "documents_failed": failed_count, - "duplicate_content_count": duplicate_content_count, - }, - ) - - logger.info( - f"Obsidian vault indexing completed: {indexed_count} ready, " - f"{skipped_count} skipped, {failed_count} failed " - f"({duplicate_content_count} duplicate content)" - ) - return total_processed, warning_message - - except SQLAlchemyError as e: - logger.exception(f"Database error during Obsidian indexing: {e}") - await session.rollback() - await task_logger.log_task_failure( - log_entry, - f"Database error during Obsidian indexing: {e}", - "Database error", - {"error_type": "SQLAlchemyError"}, - ) - return 0, f"Database error: {e}" - - except Exception as e: - logger.exception(f"Error during Obsidian indexing: {e}") - await task_logger.log_task_failure( - log_entry, - f"Error during Obsidian indexing: {e}", - "Unexpected error", - {"error_type": type(e).__name__}, - ) - return 0, str(e) diff --git a/surfsense_backend/app/utils/periodic_scheduler.py b/surfsense_backend/app/utils/periodic_scheduler.py index 923f969d5..35e8ad781 100644 --- a/surfsense_backend/app/utils/periodic_scheduler.py +++ b/surfsense_backend/app/utils/periodic_scheduler.py @@ -24,7 +24,6 @@ CONNECTOR_TASK_MAP = { SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR: "index_elasticsearch_documents", SearchSourceConnectorType.WEBCRAWLER_CONNECTOR: "index_crawled_urls", SearchSourceConnectorType.BOOKSTACK_CONNECTOR: "index_bookstack_pages", - SearchSourceConnectorType.OBSIDIAN_CONNECTOR: "index_obsidian_vault", } @@ -81,7 +80,6 @@ def create_periodic_schedule( index_elasticsearch_documents_task, index_github_repos_task, index_notion_pages_task, - index_obsidian_vault_task, ) task_map = { @@ -91,7 +89,6 @@ def create_periodic_schedule( SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR: index_elasticsearch_documents_task, SearchSourceConnectorType.WEBCRAWLER_CONNECTOR: index_crawled_urls_task, SearchSourceConnectorType.BOOKSTACK_CONNECTOR: index_bookstack_pages_task, - SearchSourceConnectorType.OBSIDIAN_CONNECTOR: index_obsidian_vault_task, } # Trigger the first run immediately diff --git a/surfsense_backend/tests/integration/test_obsidian_plugin_routes.py b/surfsense_backend/tests/integration/test_obsidian_plugin_routes.py new file mode 100644 index 000000000..41779a570 --- /dev/null +++ b/surfsense_backend/tests/integration/test_obsidian_plugin_routes.py @@ -0,0 +1,625 @@ +"""Integration tests for the Obsidian plugin HTTP wire contract. + +Three concerns: + +1. The /connect upsert really collapses concurrent first-time connects to + exactly one row. This locks the partial unique index from migration 129 + to its purpose. +2. The fingerprint dedup path: a second device connecting with a fresh + ``vault_id`` but the same ``vault_fingerprint`` adopts the existing + connector instead of creating a duplicate. +3. The end-to-end response shapes returned by /connect /sync /rename + /notes /manifest /stats match the schemas the plugin's TypeScript + decoders expect. Each renamed field is a contract change, and a smoke + pass like this is the cheapest way to catch a future drift before it + ships. +""" + +from __future__ import annotations + +import asyncio +import uuid +from datetime import UTC, datetime +from unittest.mock import AsyncMock, patch + +import pytest +import pytest_asyncio +from sqlalchemy import func, select, text +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession + +from app.db import ( + SearchSourceConnector, + SearchSourceConnectorType, + SearchSpace, + User, +) +from app.routes.obsidian_plugin_routes import ( + obsidian_connect, + obsidian_delete_notes, + obsidian_manifest, + obsidian_rename, + obsidian_stats, + obsidian_sync, +) +from app.schemas.obsidian_plugin import ( + ConnectRequest, + DeleteAck, + DeleteBatchRequest, + HeadingRef, + ManifestResponse, + NotePayload, + RenameAck, + RenameBatchRequest, + RenameItem, + StatsResponse, + SyncAck, + SyncBatchRequest, +) + +pytestmark = pytest.mark.integration + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _make_note_payload(vault_id: str, path: str, content_hash: str) -> NotePayload: + """Minimal NotePayload that the schema accepts; the indexer is mocked + out so the values don't have to round-trip through the real pipeline.""" + now = datetime.now(UTC) + return NotePayload( + vault_id=vault_id, + path=path, + name=path.rsplit("/", 1)[-1].rsplit(".", 1)[0], + extension="md", + content="# Test\n\nbody", + headings=[HeadingRef(heading="Test", level=1)], + content_hash=content_hash, + mtime=now, + ctime=now, + ) + + +@pytest_asyncio.fixture +async def race_user_and_space(async_engine): + """User + SearchSpace committed via the live engine so the two + concurrent /connect sessions in the race test can both see them. + + We can't use the savepoint-trapped ``db_session`` fixture here + because the concurrent sessions need to see committed rows. + """ + user_id = uuid.uuid4() + async with AsyncSession(async_engine) as setup: + user = User( + id=user_id, + email=f"obsidian-race-{uuid.uuid4()}@surfsense.test", + hashed_password="x", + is_active=True, + is_superuser=False, + is_verified=True, + ) + space = SearchSpace(name="Race Space", user_id=user_id) + setup.add_all([user, space]) + await setup.commit() + await setup.refresh(space) + space_id = space.id + + yield user_id, space_id + + async with AsyncSession(async_engine) as cleanup: + # Order matters: connectors -> documents -> space -> user. The + # connectors test creates documents, so we wipe them too. The + # CASCADE on user_id catches anything we missed. + await cleanup.execute( + text("DELETE FROM search_source_connectors WHERE user_id = :uid"), + {"uid": user_id}, + ) + await cleanup.execute( + text("DELETE FROM searchspaces WHERE id = :id"), + {"id": space_id}, + ) + await cleanup.execute( + text('DELETE FROM "user" WHERE id = :uid'), + {"uid": user_id}, + ) + await cleanup.commit() + + +# --------------------------------------------------------------------------- +# /connect race + index enforcement +# --------------------------------------------------------------------------- + + +class TestConnectRace: + async def test_concurrent_first_connects_collapse_to_one_row( + self, async_engine, race_user_and_space + ): + """Two simultaneous /connect calls for the same vault should + produce exactly one row, not two. Same vault_id + same + fingerprint funnels through both partial unique indexes; the + loser falls back to the survivor row via the IntegrityError + branch in obsidian_connect.""" + user_id, space_id = race_user_and_space + vault_id = str(uuid.uuid4()) + fingerprint = "fp-" + uuid.uuid4().hex + + async def _call(name_suffix: str) -> None: + async with AsyncSession(async_engine) as s: + fresh_user = await s.get(User, user_id) + payload = ConnectRequest( + vault_id=vault_id, + vault_name=f"My Vault {name_suffix}", + search_space_id=space_id, + vault_fingerprint=fingerprint, + ) + await obsidian_connect(payload, user=fresh_user, session=s) + + results = await asyncio.gather(_call("a"), _call("b"), return_exceptions=True) + for r in results: + assert not isinstance(r, Exception), f"Connect raised: {r!r}" + + async with AsyncSession(async_engine) as verify: + count = ( + await verify.execute( + select(func.count(SearchSourceConnector.id)).where( + SearchSourceConnector.user_id == user_id, + ) + ) + ).scalar_one() + assert count == 1 + + async def test_partial_unique_index_blocks_raw_duplicate( + self, async_engine, race_user_and_space + ): + """Raw INSERTs that bypass the route must still be blocked by + the partial unique indexes from migration 129.""" + user_id, space_id = race_user_and_space + vault_id = str(uuid.uuid4()) + + async with AsyncSession(async_engine) as s: + s.add( + SearchSourceConnector( + name="Obsidian - First", + connector_type=SearchSourceConnectorType.OBSIDIAN_CONNECTOR, + is_indexable=False, + config={ + "vault_id": vault_id, + "vault_name": "First", + "source": "plugin", + "vault_fingerprint": "fp-1", + }, + user_id=user_id, + search_space_id=space_id, + ) + ) + await s.commit() + + with pytest.raises(IntegrityError): + async with AsyncSession(async_engine) as s: + s.add( + SearchSourceConnector( + name="Obsidian - Second", + connector_type=SearchSourceConnectorType.OBSIDIAN_CONNECTOR, + is_indexable=False, + config={ + "vault_id": vault_id, + "vault_name": "Second", + "source": "plugin", + "vault_fingerprint": "fp-2", + }, + user_id=user_id, + search_space_id=space_id, + ) + ) + await s.commit() + + async def test_fingerprint_blocks_raw_cross_device_duplicate( + self, async_engine, race_user_and_space + ): + """Two connectors for the same user with different vault_ids but + the same fingerprint cannot coexist.""" + user_id, space_id = race_user_and_space + fingerprint = "fp-" + uuid.uuid4().hex + + async with AsyncSession(async_engine) as s: + s.add( + SearchSourceConnector( + name="Obsidian - Desktop", + connector_type=SearchSourceConnectorType.OBSIDIAN_CONNECTOR, + is_indexable=False, + config={ + "vault_id": str(uuid.uuid4()), + "vault_name": "Vault", + "source": "plugin", + "vault_fingerprint": fingerprint, + }, + user_id=user_id, + search_space_id=space_id, + ) + ) + await s.commit() + + with pytest.raises(IntegrityError): + async with AsyncSession(async_engine) as s: + s.add( + SearchSourceConnector( + name="Obsidian - Mobile", + connector_type=SearchSourceConnectorType.OBSIDIAN_CONNECTOR, + is_indexable=False, + config={ + "vault_id": str(uuid.uuid4()), + "vault_name": "Vault", + "source": "plugin", + "vault_fingerprint": fingerprint, + }, + user_id=user_id, + search_space_id=space_id, + ) + ) + await s.commit() + + async def test_second_device_adopts_existing_connector_via_fingerprint( + self, async_engine, race_user_and_space + ): + """Device A connects with vault_id=A. Device B then connects with + a fresh vault_id=B but the same fingerprint. The route must + return A's identity (not create a B row), proving cross-device + dedup happens transparently to the plugin.""" + user_id, space_id = race_user_and_space + vault_id_a = str(uuid.uuid4()) + vault_id_b = str(uuid.uuid4()) + fingerprint = "fp-" + uuid.uuid4().hex + + async with AsyncSession(async_engine) as s: + fresh_user = await s.get(User, user_id) + resp_a = await obsidian_connect( + ConnectRequest( + vault_id=vault_id_a, + vault_name="Shared Vault", + search_space_id=space_id, + vault_fingerprint=fingerprint, + ), + user=fresh_user, + session=s, + ) + + async with AsyncSession(async_engine) as s: + fresh_user = await s.get(User, user_id) + resp_b = await obsidian_connect( + ConnectRequest( + vault_id=vault_id_b, + vault_name="Shared Vault", + search_space_id=space_id, + vault_fingerprint=fingerprint, + ), + user=fresh_user, + session=s, + ) + + assert resp_b.vault_id == vault_id_a + assert resp_b.connector_id == resp_a.connector_id + + async with AsyncSession(async_engine) as verify: + count = ( + await verify.execute( + select(func.count(SearchSourceConnector.id)).where( + SearchSourceConnector.user_id == user_id, + ) + ) + ).scalar_one() + assert count == 1 + + +# --------------------------------------------------------------------------- +# Combined wire-shape smoke test +# --------------------------------------------------------------------------- + + +class TestWireContractSmoke: + """Walks /connect -> /sync -> /rename -> /notes -> /manifest -> /stats + sequentially and asserts each response matches the new schema. With + `response_model=` on every route, FastAPI is already validating the + shape on real traffic; this test mainly guards against accidental + field renames the way the TypeScript decoder would catch them.""" + + async def test_full_flow_returns_typed_payloads( + self, db_session: AsyncSession, db_user: User, db_search_space: SearchSpace + ): + vault_id = str(uuid.uuid4()) + + # 1. /connect + connect_resp = await obsidian_connect( + ConnectRequest( + vault_id=vault_id, + vault_name="Smoke Vault", + search_space_id=db_search_space.id, + vault_fingerprint="fp-" + uuid.uuid4().hex, + ), + user=db_user, + session=db_session, + ) + assert connect_resp.connector_id > 0 + assert connect_resp.vault_id == vault_id + assert "sync" in connect_resp.capabilities + assert connect_resp.server_time_utc is not None + + # 2. /sync — stub the indexer so the call doesn't drag the LLM / + # embedding pipeline in. We're testing the wire contract, not the + # indexer itself. + fake_doc = type("FakeDoc", (), {"id": 12345})() + with patch( + "app.routes.obsidian_plugin_routes.upsert_note", + new=AsyncMock(return_value=fake_doc), + ): + sync_resp = await obsidian_sync( + SyncBatchRequest( + vault_id=vault_id, + notes=[ + _make_note_payload(vault_id, "ok.md", "hash-ok"), + _make_note_payload(vault_id, "fail.md", "hash-fail"), + ], + ), + user=db_user, + session=db_session, + ) + + assert isinstance(sync_resp, SyncAck) + assert sync_resp.vault_id == vault_id + assert sync_resp.indexed == 2 + assert sync_resp.failed == 0 + assert len(sync_resp.items) == 2 + assert all(it.status == "ok" for it in sync_resp.items) + # The TypeScript decoder filters on items[].status === "error" and + # extracts .path, so confirm both fields are present and named. + assert {it.path for it in sync_resp.items} == {"ok.md", "fail.md"} + + # 2b. Re-run /sync but force the indexer to raise on one note so + # the per-item failure decoder gets exercised end-to-end. + async def _selective_upsert(session, *, connector, payload, user_id): + if payload.path == "fail.md": + raise RuntimeError("simulated indexing failure") + return fake_doc + + with patch( + "app.routes.obsidian_plugin_routes.upsert_note", + new=AsyncMock(side_effect=_selective_upsert), + ): + sync_resp = await obsidian_sync( + SyncBatchRequest( + vault_id=vault_id, + notes=[ + _make_note_payload(vault_id, "ok.md", "h1"), + _make_note_payload(vault_id, "fail.md", "h2"), + ], + ), + user=db_user, + session=db_session, + ) + assert sync_resp.indexed == 1 + assert sync_resp.failed == 1 + statuses = {it.path: it.status for it in sync_resp.items} + assert statuses == {"ok.md": "ok", "fail.md": "error"} + + # 3. /rename — patch rename_note so we don't need a real Document. + async def _rename(*args, **kwargs) -> object: + if kwargs.get("old_path") == "missing.md": + return None + return fake_doc + + with patch( + "app.routes.obsidian_plugin_routes.rename_note", + new=AsyncMock(side_effect=_rename), + ): + rename_resp = await obsidian_rename( + RenameBatchRequest( + vault_id=vault_id, + renames=[ + RenameItem(old_path="a.md", new_path="b.md"), + RenameItem(old_path="missing.md", new_path="x.md"), + ], + ), + user=db_user, + session=db_session, + ) + assert isinstance(rename_resp, RenameAck) + assert rename_resp.renamed == 1 + assert rename_resp.missing == 1 + assert {it.status for it in rename_resp.items} == {"ok", "missing"} + # snake_case fields are deliberate — the plugin decoder maps them + # to camelCase explicitly. + assert all(it.old_path and it.new_path for it in rename_resp.items) + + # 4. /notes DELETE + async def _delete(*args, **kwargs) -> bool: + return kwargs.get("path") != "ghost.md" + + with patch( + "app.routes.obsidian_plugin_routes.delete_note", + new=AsyncMock(side_effect=_delete), + ): + delete_resp = await obsidian_delete_notes( + DeleteBatchRequest(vault_id=vault_id, paths=["b.md", "ghost.md"]), + user=db_user, + session=db_session, + ) + assert isinstance(delete_resp, DeleteAck) + assert delete_resp.deleted == 1 + assert delete_resp.missing == 1 + assert {it.path: it.status for it in delete_resp.items} == { + "b.md": "ok", + "ghost.md": "missing", + } + + # 5. /manifest — empty (no real Documents were created because + # upsert_note was mocked) but the response shape is what we care + # about. + manifest_resp = await obsidian_manifest( + vault_id=vault_id, user=db_user, session=db_session + ) + assert isinstance(manifest_resp, ManifestResponse) + assert manifest_resp.vault_id == vault_id + assert manifest_resp.items == {} + + # 6. /stats — same; row count is 0 because upsert_note was mocked. + stats_resp = await obsidian_stats( + vault_id=vault_id, user=db_user, session=db_session + ) + assert isinstance(stats_resp, StatsResponse) + assert stats_resp.vault_id == vault_id + assert stats_resp.files_synced == 0 + assert stats_resp.last_sync_at is None + + async def test_sync_queues_binary_attachments( + self, db_session: AsyncSession, db_user: User, db_search_space: SearchSpace + ): + vault_id = str(uuid.uuid4()) + await obsidian_connect( + ConnectRequest( + vault_id=vault_id, + vault_name="Queue Vault", + search_space_id=db_search_space.id, + vault_fingerprint="fp-" + uuid.uuid4().hex, + ), + user=db_user, + session=db_session, + ) + + fake_doc = type("FakeDoc", (), {"id": 12345})() + binary_note = _make_note_payload(vault_id, "image.png", "hash-bin") + binary_note.extension = "png" + binary_note.is_binary = True + binary_note.binary_base64 = "aGVsbG8=" + binary_note.mime_type = "image/png" + binary_note.content = "" + + with ( + patch( + "app.routes.obsidian_plugin_routes.upsert_note", + new=AsyncMock(return_value=fake_doc), + ) as upsert_mock, + patch("app.routes.obsidian_plugin_routes._queue_obsidian_attachment") as queue_mock, + ): + sync_resp = await obsidian_sync( + SyncBatchRequest( + vault_id=vault_id, + notes=[ + _make_note_payload(vault_id, "ok.md", "hash-ok"), + binary_note, + ], + ), + user=db_user, + session=db_session, + ) + + assert sync_resp.indexed == 2 + assert sync_resp.failed == 0 + statuses = {it.path: it.status for it in sync_resp.items} + assert statuses == {"ok.md": "ok", "image.png": "queued"} + assert upsert_mock.await_count == 1 + queue_mock.assert_called_once() + + async def test_sync_rejects_unsupported_attachment_extension( + self, db_session: AsyncSession, db_user: User, db_search_space: SearchSpace + ): + vault_id = str(uuid.uuid4()) + await obsidian_connect( + ConnectRequest( + vault_id=vault_id, + vault_name="Reject Vault", + search_space_id=db_search_space.id, + vault_fingerprint="fp-" + uuid.uuid4().hex, + ), + user=db_user, + session=db_session, + ) + + fake_doc = type("FakeDoc", (), {"id": 12345})() + bad_note = _make_note_payload(vault_id, "photo.heic", "hash-heic") + bad_note.extension = "heic" + bad_note.is_binary = True + bad_note.binary_base64 = "aGVsbG8=" + bad_note.mime_type = "image/heic" + bad_note.content = "" + + with ( + patch( + "app.routes.obsidian_plugin_routes.upsert_note", + new=AsyncMock(return_value=fake_doc), + ), + patch("app.routes.obsidian_plugin_routes._queue_obsidian_attachment") as queue_mock, + ): + sync_resp = await obsidian_sync( + SyncBatchRequest( + vault_id=vault_id, + notes=[ + _make_note_payload(vault_id, "ok.md", "hash-ok"), + bad_note, + ], + ), + user=db_user, + session=db_session, + ) + + assert sync_resp.indexed == 1 + assert sync_resp.failed == 1 + items_by_path = {it.path: it for it in sync_resp.items} + assert items_by_path["ok.md"].status == "ok" + assert items_by_path["photo.heic"].status == "error" + assert "unsupported attachment extension" in ( + items_by_path["photo.heic"].error or "" + ) + queue_mock.assert_not_called() + + async def test_sync_rejects_mime_extension_mismatch( + self, db_session: AsyncSession, db_user: User, db_search_space: SearchSpace + ): + vault_id = str(uuid.uuid4()) + await obsidian_connect( + ConnectRequest( + vault_id=vault_id, + vault_name="Mismatch Vault", + search_space_id=db_search_space.id, + vault_fingerprint="fp-" + uuid.uuid4().hex, + ), + user=db_user, + session=db_session, + ) + + fake_doc = type("FakeDoc", (), {"id": 12345})() + mismatched = _make_note_payload(vault_id, "image.png", "hash-png") + mismatched.extension = "png" + mismatched.is_binary = True + mismatched.binary_base64 = "aGVsbG8=" + mismatched.mime_type = "application/pdf" + mismatched.content = "" + + with ( + patch( + "app.routes.obsidian_plugin_routes.upsert_note", + new=AsyncMock(return_value=fake_doc), + ), + patch("app.routes.obsidian_plugin_routes._queue_obsidian_attachment") as queue_mock, + ): + sync_resp = await obsidian_sync( + SyncBatchRequest( + vault_id=vault_id, + notes=[ + _make_note_payload(vault_id, "ok.md", "hash-ok"), + mismatched, + ], + ), + user=db_user, + session=db_session, + ) + + assert sync_resp.indexed == 1 + assert sync_resp.failed == 1 + items_by_path = {it.path: it for it in sync_resp.items} + assert items_by_path["ok.md"].status == "ok" + assert items_by_path["image.png"].status == "error" + assert "does not match extension" in ( + items_by_path["image.png"].error or "" + ) + queue_mock.assert_not_called() diff --git a/surfsense_backend/tests/unit/middleware/test_file_intent_middleware.py b/surfsense_backend/tests/unit/middleware/test_file_intent_middleware.py index c0281fa29..673331b0a 100644 --- a/surfsense_backend/tests/unit/middleware/test_file_intent_middleware.py +++ b/surfsense_backend/tests/unit/middleware/test_file_intent_middleware.py @@ -79,7 +79,7 @@ async def test_file_write_null_filename_uses_semantic_default_path(): @pytest.mark.asyncio -async def test_file_write_null_filename_infers_json_extension(): +async def test_file_write_null_filename_defaults_to_markdown_path(): llm = _FakeLLM( '{"intent":"file_write","confidence":0.71,"suggested_filename":null}' ) @@ -94,7 +94,7 @@ async def test_file_write_null_filename_infers_json_extension(): assert result is not None contract = result["file_operation_contract"] assert contract["intent"] == FileOperationIntent.FILE_WRITE.value - assert contract["suggested_path"] == "/notes.json" + assert contract["suggested_path"] == "/notes.md" @pytest.mark.asyncio diff --git a/surfsense_backend/tests/unit/middleware/test_filesystem_backends.py b/surfsense_backend/tests/unit/middleware/test_filesystem_backends.py index 9600b7e05..98996d6bc 100644 --- a/surfsense_backend/tests/unit/middleware/test_filesystem_backends.py +++ b/surfsense_backend/tests/unit/middleware/test_filesystem_backends.py @@ -30,6 +30,7 @@ def test_backend_resolver_returns_multi_root_backend_for_single_root(tmp_path: P backend = resolver(_RuntimeStub()) assert isinstance(backend, MultiRootLocalFolderBackend) + assert backend.list_mounts() == ("tmp",) def test_backend_resolver_uses_cloud_mode_by_default(): @@ -57,3 +58,4 @@ def test_backend_resolver_returns_multi_root_backend_for_multiple_roots(tmp_path backend = resolver(_RuntimeStub()) assert isinstance(backend, MultiRootLocalFolderBackend) + assert backend.list_mounts() == ("resume", "notes") diff --git a/surfsense_backend/tests/unit/middleware/test_filesystem_verification.py b/surfsense_backend/tests/unit/middleware/test_filesystem_verification.py index 7b4119bb5..d00365032 100644 --- a/surfsense_backend/tests/unit/middleware/test_filesystem_verification.py +++ b/surfsense_backend/tests/unit/middleware/test_filesystem_verification.py @@ -34,6 +34,11 @@ class _RuntimeNoSuggestedPath: state = {"file_operation_contract": {}} +class _RuntimeWithSuggestedPath: + def __init__(self, suggested_path: str) -> None: + self.state = {"file_operation_contract": {"suggested_path": suggested_path}} + + def test_verify_written_content_prefers_raw_sync() -> None: middleware = SurfSenseFilesystemMiddleware.__new__(SurfSenseFilesystemMiddleware) expected = "line1\nline2" @@ -162,3 +167,47 @@ def test_normalize_local_mount_path_prefixes_posix_absolute_path_for_linux_and_m resolved = middleware._normalize_local_mount_path("/var/log/app.log", runtime) # type: ignore[arg-type] assert resolved == "/pc_backups/var/log/app.log" + + +def test_normalize_local_mount_path_prefers_unique_existing_parent_mount( + tmp_path: Path, +) -> None: + root_a = tmp_path / "RootA" + root_b = tmp_path / "RootB" + (root_a / "other").mkdir(parents=True) + (root_b / "nested" / "deep").mkdir(parents=True) + backend = MultiRootLocalFolderBackend( + (("root_a", str(root_a)), ("root_b", str(root_b))) + ) + runtime = _RuntimeNoSuggestedPath() + middleware = SurfSenseFilesystemMiddleware.__new__(SurfSenseFilesystemMiddleware) + middleware._get_backend = lambda _runtime: backend # type: ignore[method-assign] + + resolved = middleware._normalize_local_mount_path( # type: ignore[arg-type] + "/nested/deep/new-note.md", + runtime, + ) + + assert resolved == "/root_b/nested/deep/new-note.md" + + +def test_normalize_local_mount_path_uses_suggested_mount_when_ambiguous( + tmp_path: Path, +) -> None: + root_a = tmp_path / "RootA" + root_b = tmp_path / "RootB" + root_a.mkdir(parents=True) + root_b.mkdir(parents=True) + backend = MultiRootLocalFolderBackend( + (("root_a", str(root_a)), ("root_b", str(root_b))) + ) + runtime = _RuntimeWithSuggestedPath("/root_b/notes/context.md") + middleware = SurfSenseFilesystemMiddleware.__new__(SurfSenseFilesystemMiddleware) + middleware._get_backend = lambda _runtime: backend # type: ignore[method-assign] + + resolved = middleware._normalize_local_mount_path( # type: ignore[arg-type] + "/brand-new-note.md", + runtime, + ) + + assert resolved == "/root_b/brand-new-note.md" diff --git a/surfsense_backend/tests/unit/middleware/test_local_folder_backend.py b/surfsense_backend/tests/unit/middleware/test_local_folder_backend.py index 3484a2cc4..7dfc68402 100644 --- a/surfsense_backend/tests/unit/middleware/test_local_folder_backend.py +++ b/surfsense_backend/tests/unit/middleware/test_local_folder_backend.py @@ -9,6 +9,7 @@ pytestmark = pytest.mark.unit def test_local_backend_write_read_edit_roundtrip(tmp_path: Path): backend = LocalFolderBackend(str(tmp_path)) + (tmp_path / "notes").mkdir() write = backend.write("/notes/test.md", "line1\nline2") assert write.error is None @@ -51,9 +52,20 @@ def test_local_backend_glob_and_grep(tmp_path: Path): def test_local_backend_read_raw_returns_exact_content(tmp_path: Path): backend = LocalFolderBackend(str(tmp_path)) + (tmp_path / "notes").mkdir() expected = "# Title\n\nline 1\nline 2\n" write = backend.write("/notes/raw.md", expected) assert write.error is None raw = backend.read_raw("/notes/raw.md") assert raw == expected + + +def test_local_backend_write_rejects_missing_parent_directory(tmp_path: Path): + backend = LocalFolderBackend(str(tmp_path)) + + write = backend.write("/tempoo/new-note.md", "# New note") + + assert write.error is not None + assert "parent directory" in write.error + assert not (tmp_path / "tempoo").exists() diff --git a/surfsense_backend/tests/unit/middleware/test_multi_root_local_folder_backend.py b/surfsense_backend/tests/unit/middleware/test_multi_root_local_folder_backend.py index 7afb47e26..43a671178 100644 --- a/surfsense_backend/tests/unit/middleware/test_multi_root_local_folder_backend.py +++ b/surfsense_backend/tests/unit/middleware/test_multi_root_local_folder_backend.py @@ -26,3 +26,12 @@ def test_mount_ids_preserve_client_mapping_order(tmp_path: Path) -> None: ) assert backend.list_mounts() == ("pc_backups", "pc_backups_2", "notes_2026") + + +def test_mount_id_is_authoritative_not_folder_name(tmp_path: Path) -> None: + root = tmp_path / "Resume Folder" + root.mkdir() + + backend = MultiRootLocalFolderBackend((("custom_resume_mount", str(root)),)) + + assert backend.list_mounts() == ("custom_resume_mount",) diff --git a/surfsense_backend/tests/unit/test_obsidian_plugin_indexer.py b/surfsense_backend/tests/unit/test_obsidian_plugin_indexer.py new file mode 100644 index 000000000..7ab3c52e0 --- /dev/null +++ b/surfsense_backend/tests/unit/test_obsidian_plugin_indexer.py @@ -0,0 +1,225 @@ +from __future__ import annotations + +import base64 +from datetime import UTC, datetime + +import pytest +from pydantic import ValidationError + +from app.etl_pipeline.etl_document import EtlResult +from app.schemas.obsidian_plugin import HeadingRef, NotePayload +from app.services.obsidian_plugin_indexer import ( + _build_metadata, + _extract_binary_attachment_markdown, + _is_image_attachment, + _require_extracted_attachment_content, +) + + +_FAKE_PNG_B64 = base64.b64encode(b"\x89PNG\r\n\x1a\n").decode("ascii") + + +def test_build_metadata_serializes_headings_to_plain_json() -> None: + now = datetime.now(UTC) + payload = NotePayload( + vault_id="vault-1", + path="notes.md", + name="notes", + extension="md", + content="# Notes", + headings=[HeadingRef(heading="Notes", level=1)], + content_hash="abc123", + mtime=now, + ctime=now, + ) + + metadata = _build_metadata(payload, vault_name="My Vault", connector_id=42) + + assert metadata["headings"] == [{"heading": "Notes", "level": 1}] + + +def test_build_metadata_marks_binary_attachment_fields() -> None: + now = datetime.now(UTC) + payload = NotePayload( + vault_id="vault-1", + path="assets/diagram.png", + name="diagram", + extension="png", + content="", + content_hash="abc123", + mtime=now, + ctime=now, + is_binary=True, + binary_base64=_FAKE_PNG_B64, + mime_type="image/png", + ) + + metadata = _build_metadata(payload, vault_name="My Vault", connector_id=42) + + assert metadata["is_binary"] is True + assert metadata["mime_type"] == "image/png" + + +@pytest.mark.asyncio +async def test_extract_binary_attachment_markdown_handles_invalid_base64() -> None: + now = datetime.now(UTC) + payload = NotePayload( + vault_id="vault-1", + path="assets/diagram.png", + name="diagram", + extension="png", + content="", + content_hash="abc123", + mtime=now, + ctime=now, + is_binary=True, + binary_base64="not-valid-base64!!", + mime_type="image/png", + ) + + content, metadata = await _extract_binary_attachment_markdown( + payload, vision_llm=None + ) + + assert content == "" + assert metadata["attachment_extraction_status"] == "invalid_binary_payload" + + +@pytest.mark.asyncio +async def test_extract_binary_attachment_markdown_uses_etl(monkeypatch) -> None: + now = datetime.now(UTC) + payload = NotePayload( + vault_id="vault-1", + path="assets/spec.pdf", + name="spec", + extension="pdf", + content="", + content_hash="abc123", + mtime=now, + ctime=now, + is_binary=True, + binary_base64=base64.b64encode(b"%PDF-1.7 fake bytes").decode("ascii"), + mime_type="application/pdf", + ) + + async def _fake_run_etl_extract( # noqa: ANN001 + *, file_path, filename, vision_llm + ): + assert filename == "spec.pdf" + assert file_path + assert vision_llm is None + return EtlResult( + markdown_content="Extracted content", + etl_service="TEST_ETL", + content_type="document", + ) + + monkeypatch.setattr( + "app.services.obsidian_plugin_indexer._run_etl_extract", + _fake_run_etl_extract, + ) + + content, metadata = await _extract_binary_attachment_markdown( + payload, vision_llm=None + ) + + assert content == "Extracted content" + assert metadata["attachment_extraction_status"] == "ok" + assert metadata["attachment_etl_service"] == "TEST_ETL" + + +def test_is_image_attachment_detects_image_extensions() -> None: + now = datetime.now(UTC) + image_payload = NotePayload( + vault_id="vault-1", + path="assets/screenshot.PNG", + name="screenshot", + extension="PNG", + content="", + content_hash="abc123", + mtime=now, + ctime=now, + is_binary=True, + binary_base64=_FAKE_PNG_B64, + mime_type="image/png", + ) + pdf_payload = NotePayload( + vault_id="vault-1", + path="assets/spec.pdf", + name="spec", + extension="pdf", + content="", + content_hash="abc123", + mtime=now, + ctime=now, + is_binary=True, + binary_base64=_FAKE_PNG_B64, + mime_type="application/pdf", + ) + + assert _is_image_attachment(image_payload) is True + assert _is_image_attachment(pdf_payload) is False + + +def test_note_payload_rejects_binary_without_base64() -> None: + now = datetime.now(UTC) + with pytest.raises(ValidationError, match="binary_base64 is required"): + NotePayload( + vault_id="vault-1", + path="assets/diagram.png", + name="diagram", + extension="png", + content="", + content_hash="abc123", + mtime=now, + ctime=now, + is_binary=True, + mime_type="image/png", + ) + + +def test_note_payload_rejects_binary_without_mime_type() -> None: + now = datetime.now(UTC) + with pytest.raises(ValidationError, match="mime_type is required"): + NotePayload( + vault_id="vault-1", + path="assets/diagram.png", + name="diagram", + extension="png", + content="", + content_hash="abc123", + mtime=now, + ctime=now, + is_binary=True, + binary_base64=_FAKE_PNG_B64, + ) + + +def test_note_payload_rejects_markdown_with_binary_fields() -> None: + now = datetime.now(UTC) + with pytest.raises( + ValidationError, + match="binary_base64 and mime_type must be omitted when is_binary is False", + ): + NotePayload( + vault_id="vault-1", + path="notes.md", + name="notes", + extension="md", + content="# Notes", + content_hash="abc123", + mtime=now, + ctime=now, + binary_base64=_FAKE_PNG_B64, + ) + + +def test_require_extracted_attachment_content_rejects_empty_content() -> None: + with pytest.raises( + RuntimeError, match="Attachment extraction failed for assets/img.png" + ): + _require_extracted_attachment_content( + content=" ", + etl_meta={"attachment_extraction_status": "etl_failed"}, + path="assets/img.png", + ) diff --git a/surfsense_desktop/src/ipc/channels.ts b/surfsense_desktop/src/ipc/channels.ts index 1007e3a37..8d2af5107 100644 --- a/surfsense_desktop/src/ipc/channels.ts +++ b/surfsense_desktop/src/ipc/channels.ts @@ -57,6 +57,10 @@ export const IPC_CHANNELS = { // Agent filesystem mode AGENT_FILESYSTEM_GET_SETTINGS: 'agent-filesystem:get-settings', AGENT_FILESYSTEM_GET_MOUNTS: 'agent-filesystem:get-mounts', + AGENT_FILESYSTEM_LIST_FILES: 'agent-filesystem:list-files', + AGENT_FILESYSTEM_TREE_WATCH_START: 'agent-filesystem:tree-watch-start', + AGENT_FILESYSTEM_TREE_WATCH_STOP: 'agent-filesystem:tree-watch-stop', + AGENT_FILESYSTEM_TREE_DIRTY: 'agent-filesystem:tree-dirty', AGENT_FILESYSTEM_SET_SETTINGS: 'agent-filesystem:set-settings', AGENT_FILESYSTEM_PICK_ROOT: 'agent-filesystem:pick-root', } as const; diff --git a/surfsense_desktop/src/ipc/handlers.ts b/surfsense_desktop/src/ipc/handlers.ts index b524a91a1..d918fd90d 100644 --- a/surfsense_desktop/src/ipc/handlers.ts +++ b/surfsense_desktop/src/ipc/handlers.ts @@ -38,6 +38,7 @@ import { trackEvent, } from '../modules/analytics'; import { + listAgentFilesystemFiles, readAgentLocalFileText, writeAgentLocalFileText, getAgentFilesystemMounts, @@ -45,6 +46,11 @@ import { pickAgentFilesystemRoot, setAgentFilesystemSettings, } from '../modules/agent-filesystem'; +import { + startAgentFilesystemTreeWatch, + stopAgentFilesystemTreeWatch, + type AgentFilesystemTreeWatchOptions, +} from '../modules/agent-filesystem-tree-watcher'; let authTokens: { bearer: string; refresh: string } | null = null; @@ -136,21 +142,24 @@ export function registerIpcHandlers(): void { readLocalFiles(paths) ); - ipcMain.handle(IPC_CHANNELS.READ_AGENT_LOCAL_FILE_TEXT, async (_event, virtualPath: string) => { + ipcMain.handle( + IPC_CHANNELS.READ_AGENT_LOCAL_FILE_TEXT, + async (_event, virtualPath: string, searchSpaceId?: number | null) => { try { - const result = await readAgentLocalFileText(virtualPath); + const result = await readAgentLocalFileText(virtualPath, searchSpaceId); return { ok: true, path: result.path, content: result.content }; } catch (error) { const message = error instanceof Error ? error.message : 'Failed to read local file'; return { ok: false, path: virtualPath, error: message }; } - }); + } + ); ipcMain.handle( IPC_CHANNELS.WRITE_AGENT_LOCAL_FILE_TEXT, - async (_event, virtualPath: string, content: string) => { + async (_event, virtualPath: string, content: string, searchSpaceId?: number | null) => { try { - const result = await writeAgentLocalFileText(virtualPath, content); + const result = await writeAgentLocalFileText(virtualPath, content, searchSpaceId); return { ok: true, path: result.path }; } catch (error) { const message = error instanceof Error ? error.message : 'Failed to write local file'; @@ -233,21 +242,52 @@ export function registerIpcHandlers(): void { }; }); - ipcMain.handle(IPC_CHANNELS.AGENT_FILESYSTEM_GET_SETTINGS, () => - getAgentFilesystemSettings() + ipcMain.handle(IPC_CHANNELS.AGENT_FILESYSTEM_GET_SETTINGS, (_event, searchSpaceId?: number | null) => + getAgentFilesystemSettings(searchSpaceId) ); - ipcMain.handle(IPC_CHANNELS.AGENT_FILESYSTEM_GET_MOUNTS, () => - getAgentFilesystemMounts() + ipcMain.handle(IPC_CHANNELS.AGENT_FILESYSTEM_GET_MOUNTS, (_event, searchSpaceId?: number | null) => + getAgentFilesystemMounts(searchSpaceId) + ); + + ipcMain.handle( + IPC_CHANNELS.AGENT_FILESYSTEM_LIST_FILES, + ( + _event, + options: { + rootPath: string; + searchSpaceId?: number | null; + excludePatterns?: string[] | null; + fileExtensions?: string[] | null; + } + ) => + listAgentFilesystemFiles(options) ); ipcMain.handle( IPC_CHANNELS.AGENT_FILESYSTEM_SET_SETTINGS, - (_event, settings: { mode?: 'cloud' | 'desktop_local_folder'; localRootPaths?: string[] | null }) => - setAgentFilesystemSettings(settings) + ( + _event, + payload: { + searchSpaceId?: number | null; + settings: { mode?: 'cloud' | 'desktop_local_folder'; localRootPaths?: string[] | null }; + } + ) => setAgentFilesystemSettings(payload?.searchSpaceId, payload?.settings ?? {}) ); ipcMain.handle(IPC_CHANNELS.AGENT_FILESYSTEM_PICK_ROOT, () => pickAgentFilesystemRoot() ); + + ipcMain.handle( + IPC_CHANNELS.AGENT_FILESYSTEM_TREE_WATCH_START, + (_event, options: AgentFilesystemTreeWatchOptions) => + startAgentFilesystemTreeWatch(options) + ); + + ipcMain.handle( + IPC_CHANNELS.AGENT_FILESYSTEM_TREE_WATCH_STOP, + (_event, searchSpaceId?: number | null) => + stopAgentFilesystemTreeWatch(searchSpaceId) + ); } diff --git a/surfsense_desktop/src/modules/agent-filesystem-tree-watcher.ts b/surfsense_desktop/src/modules/agent-filesystem-tree-watcher.ts new file mode 100644 index 000000000..600f84fd5 --- /dev/null +++ b/surfsense_desktop/src/modules/agent-filesystem-tree-watcher.ts @@ -0,0 +1,302 @@ +import { BrowserWindow } from 'electron'; +import chokidar, { type FSWatcher } from 'chokidar'; +import { resolve } from 'node:path'; +import { IPC_CHANNELS } from '../ipc/channels'; +import { listAgentFilesystemFiles } from './agent-filesystem'; + +const SAFETY_POLL_MS = 60_000; +const EVENT_DEBOUNCE_MS = 700; + +export type AgentFilesystemTreeWatchOptions = { + searchSpaceId?: number | null; + rootPaths: string[]; + excludePatterns?: string[] | null; + fileExtensions?: string[] | null; +}; + +type TreeDirtyReason = 'watcher_event' | 'safety_poll'; + +type TreeDirtyEvent = { + searchSpaceId: number | null; + reason: TreeDirtyReason; + rootPath: string; + changedPath: string | null; + timestamp: number; +}; + +type WatchSession = { + searchSpaceId: number | null; + optionsSignature: string; + rootPaths: string[]; + excludePatterns: string[]; + fileExtensions: string[] | null; + watchers: FSWatcher[]; + pollTimer: NodeJS.Timeout | null; + emitTimer: NodeJS.Timeout | null; + rootSnapshotByPath: Map; + pendingDirtyByRoot: Map; + disposed: boolean; +}; + +const sessions = new Map(); + +function normalizeSearchSpaceId(searchSpaceId?: number | null): number | null { + if (typeof searchSpaceId === 'number' && Number.isFinite(searchSpaceId) && searchSpaceId > 0) { + return searchSpaceId; + } + return null; +} + +function getSessionKey(searchSpaceId?: number | null): string { + const normalized = normalizeSearchSpaceId(searchSpaceId); + return normalized === null ? 'default' : String(normalized); +} + +function normalizeRootPath(pathValue: string): string { + const normalized = resolve(pathValue.trim()); + return process.platform === 'win32' ? normalized.toLowerCase() : normalized; +} + +function normalizeList(value: string[] | null | undefined): string[] { + if (!value || value.length === 0) return []; + return value + .filter((entry): entry is string => typeof entry === 'string') + .map((entry) => entry.trim()) + .filter(Boolean); +} + +function normalizeExtensions(value: string[] | null | undefined): string[] | null { + const normalized = normalizeList(value).map((entry) => entry.toLowerCase()); + return normalized.length > 0 ? normalized : null; +} + +function buildOptionsSignature( + searchSpaceId: number | null, + rootPaths: string[], + excludePatterns: string[], + fileExtensions: string[] | null +): string { + return JSON.stringify({ + searchSpaceId, + rootPaths: [...rootPaths].sort(), + excludePatterns: [...excludePatterns].sort(), + fileExtensions: fileExtensions ? [...fileExtensions].sort() : null, + }); +} + +function hashText(input: string, seed: number): number { + let hash = seed >>> 0; + for (let i = 0; i < input.length; i += 1) { + hash ^= input.charCodeAt(i); + hash = Math.imul(hash, 16777619); + hash >>>= 0; + } + return hash; +} + +async function buildRootSnapshotSignature( + session: WatchSession, + rootPath: string +): Promise { + let hash = 2166136261; + hash = hashText(`space:${session.searchSpaceId ?? 'default'}|root:${rootPath}`, hash); + const files = await listAgentFilesystemFiles({ + rootPath, + searchSpaceId: session.searchSpaceId, + excludePatterns: session.excludePatterns, + fileExtensions: session.fileExtensions, + }); + const sortedFiles = [...files].sort((a, b) => a.relativePath.localeCompare(b.relativePath)); + hash = hashText(`count:${sortedFiles.length}`, hash); + for (const file of sortedFiles) { + hash = hashText( + `${file.relativePath}|${Math.round(file.mtimeMs)}|${file.size}`, + hash + ); + } + return hash.toString(16); +} + +function sendTreeDirtyEvent( + searchSpaceId: number | null, + reason: TreeDirtyReason, + rootPath: string, + changedPath: string | null +): void { + const payload: TreeDirtyEvent = { + searchSpaceId, + reason, + rootPath, + changedPath, + timestamp: Date.now(), + }; + for (const win of BrowserWindow.getAllWindows()) { + if (!win.isDestroyed()) { + win.webContents.send(IPC_CHANNELS.AGENT_FILESYSTEM_TREE_DIRTY, payload); + } + } +} + +function scheduleDirtyEmit( + session: WatchSession, + reason: TreeDirtyReason, + rootPath: string, + changedPath: string | null = null +): void { + if (session.disposed) return; + const existing = session.pendingDirtyByRoot.get(rootPath); + if (!existing || existing.reason === 'safety_poll') { + session.pendingDirtyByRoot.set(rootPath, { reason, changedPath }); + } + if (session.emitTimer) { + clearTimeout(session.emitTimer); + } + session.emitTimer = setTimeout(() => { + session.emitTimer = null; + if (session.disposed) return; + const pending = Array.from(session.pendingDirtyByRoot.entries()); + session.pendingDirtyByRoot.clear(); + for (const [pendingRootPath, payload] of pending) { + sendTreeDirtyEvent( + session.searchSpaceId, + payload.reason, + pendingRootPath, + payload.changedPath + ); + } + }, EVENT_DEBOUNCE_MS); +} + +async function closeSession(session: WatchSession): Promise { + session.disposed = true; + if (session.emitTimer) { + clearTimeout(session.emitTimer); + session.emitTimer = null; + } + if (session.pollTimer) { + clearInterval(session.pollTimer); + session.pollTimer = null; + } + await Promise.allSettled(session.watchers.map((watcher) => watcher.close())); +} + +export async function startAgentFilesystemTreeWatch( + options: AgentFilesystemTreeWatchOptions +): Promise<{ ok: true }> { + const searchSpaceId = normalizeSearchSpaceId(options.searchSpaceId); + const rootPaths = Array.from( + new Set(normalizeList(options.rootPaths).map((rootPath) => normalizeRootPath(rootPath))) + ); + const excludePatterns = Array.from(new Set(normalizeList(options.excludePatterns))); + const fileExtensions = normalizeExtensions(options.fileExtensions); + const sessionKey = getSessionKey(searchSpaceId); + + if (rootPaths.length === 0) { + await stopAgentFilesystemTreeWatch(searchSpaceId); + return { ok: true }; + } + + const optionsSignature = buildOptionsSignature( + searchSpaceId, + rootPaths, + excludePatterns, + fileExtensions + ); + const existing = sessions.get(sessionKey); + if (existing && existing.optionsSignature === optionsSignature) { + return { ok: true }; + } + if (existing) { + await closeSession(existing); + sessions.delete(sessionKey); + } + + const ignored = [ + /(^|[/\\])\../, + ...excludePatterns.map((pattern) => `**/${pattern}/**`), + ]; + const watchers = rootPaths.map((rootPath) => + chokidar.watch(rootPath, { + persistent: true, + ignoreInitial: true, + awaitWriteFinish: { + stabilityThreshold: 500, + pollInterval: 100, + }, + ignored, + }) + ); + + const session: WatchSession = { + searchSpaceId, + optionsSignature, + rootPaths, + excludePatterns, + fileExtensions, + watchers, + pollTimer: null, + emitTimer: null, + rootSnapshotByPath: new Map(), + pendingDirtyByRoot: new Map(), + disposed: false, + }; + + for (let index = 0; index < watchers.length; index += 1) { + const watcher = watchers[index]; + const rootPath = rootPaths[index]; + watcher.on('add', (filePath) => scheduleDirtyEmit(session, 'watcher_event', rootPath, filePath)); + watcher.on('change', (filePath) => + scheduleDirtyEmit(session, 'watcher_event', rootPath, filePath) + ); + watcher.on('unlink', (filePath) => + scheduleDirtyEmit(session, 'watcher_event', rootPath, filePath) + ); + watcher.on('addDir', (filePath) => + scheduleDirtyEmit(session, 'watcher_event', rootPath, filePath) + ); + watcher.on('unlinkDir', (filePath) => + scheduleDirtyEmit(session, 'watcher_event', rootPath, filePath) + ); + } + + for (const rootPath of rootPaths) { + try { + const signature = await buildRootSnapshotSignature(session, rootPath); + session.rootSnapshotByPath.set(rootPath, signature); + } catch { + session.rootSnapshotByPath.set(rootPath, ''); + } + } + + session.pollTimer = setInterval(() => { + void (async () => { + if (session.disposed) return; + for (const rootPath of session.rootPaths) { + try { + const nextSignature = await buildRootSnapshotSignature(session, rootPath); + const previousSignature = session.rootSnapshotByPath.get(rootPath) ?? ''; + if (nextSignature !== previousSignature) { + session.rootSnapshotByPath.set(rootPath, nextSignature); + scheduleDirtyEmit(session, 'safety_poll', rootPath, null); + } + } catch { + // Keep watcher resilient on transient IO errors. + } + } + })(); + }, SAFETY_POLL_MS); + + sessions.set(sessionKey, session); + return { ok: true }; +} + +export async function stopAgentFilesystemTreeWatch( + searchSpaceId?: number | null +): Promise<{ ok: true }> { + const sessionKey = getSessionKey(searchSpaceId); + const session = sessions.get(sessionKey); + if (!session) return { ok: true }; + sessions.delete(sessionKey); + await closeSession(session); + return { ok: true }; +} diff --git a/surfsense_desktop/src/modules/agent-filesystem.ts b/surfsense_desktop/src/modules/agent-filesystem.ts index 6db5fd6f7..608f8c4a4 100644 --- a/surfsense_desktop/src/modules/agent-filesystem.ts +++ b/surfsense_desktop/src/modules/agent-filesystem.ts @@ -1,6 +1,7 @@ import { app, dialog } from "electron"; -import { access, mkdir, readFile, writeFile } from "node:fs/promises"; -import { dirname, isAbsolute, join, relative, resolve } from "node:path"; +import type { Dirent } from "node:fs"; +import { access, mkdir, readdir, readFile, realpath, stat, writeFile } from "node:fs/promises"; +import { dirname, extname, isAbsolute, join, relative, resolve } from "node:path"; export type AgentFilesystemMode = "cloud" | "desktop_local_folder"; @@ -10,8 +11,60 @@ export interface AgentFilesystemSettings { updatedAt: string; } +type AgentFilesystemSettingsStore = { + version: 2; + spaces: Record; +}; + const SETTINGS_FILENAME = "agent-filesystem-settings.json"; -const MAX_LOCAL_ROOTS = 5; +const MAX_LOCAL_ROOTS = 10; +const DEFAULT_SPACE_KEY = "default"; +let cachedSettingsStore: AgentFilesystemSettingsStore | null = null; + +const LOCAL_OPENABLE_TEXT_EXTENSIONS = new Set([ + ".md", + ".markdown", + ".txt", + ".json", + ".yaml", + ".yml", + ".csv", + ".tsv", + ".xml", + ".html", + ".htm", + ".css", + ".scss", + ".sass", + ".sql", + ".toml", + ".ini", + ".conf", + ".log", + ".py", + ".js", + ".jsx", + ".mjs", + ".cjs", + ".ts", + ".tsx", + ".java", + ".kt", + ".kts", + ".go", + ".rs", + ".rb", + ".php", + ".swift", + ".r", + ".lua", + ".sh", + ".bash", + ".zsh", + ".fish", + ".env", + ".mk", +]); function getSettingsPath(): string { return join(app.getPath("userData"), SETTINGS_FILENAME); @@ -25,14 +78,23 @@ function getDefaultSettings(): AgentFilesystemSettings { }; } +async function canonicalizeRootPath(pathValue: string): Promise { + const resolvedPath = resolve(pathValue); + try { + return await realpath(resolvedPath); + } catch { + return resolvedPath; + } +} + function normalizeLocalRootPaths(paths: unknown): string[] { if (!Array.isArray(paths)) { return []; } const uniquePaths = new Set(); - for (const path of paths) { - if (typeof path !== "string") continue; - const trimmed = path.trim(); + for (const rawPath of paths) { + if (typeof rawPath !== "string") continue; + const trimmed = rawPath.trim(); if (!trimmed) continue; uniquePaths.add(trimmed); if (uniquePaths.size >= MAX_LOCAL_ROOTS) { @@ -42,30 +104,112 @@ function normalizeLocalRootPaths(paths: unknown): string[] { return [...uniquePaths]; } -export async function getAgentFilesystemSettings(): Promise { - try { - const raw = await readFile(getSettingsPath(), "utf8"); - const parsed = JSON.parse(raw) as Partial; - if (parsed.mode !== "cloud" && parsed.mode !== "desktop_local_folder") { - return getDefaultSettings(); +async function normalizeLocalRootPathsCanonical(paths: unknown): Promise { + const normalizedPaths = normalizeLocalRootPaths(paths); + const canonicalizedPaths = await Promise.all( + normalizedPaths.map((pathValue) => canonicalizeRootPath(pathValue)) + ); + const uniquePaths = new Set(); + for (const canonicalPath of canonicalizedPaths) { + uniquePaths.add(canonicalPath); + if (uniquePaths.size >= MAX_LOCAL_ROOTS) { + break; } - return { - mode: parsed.mode, - localRootPaths: normalizeLocalRootPaths(parsed.localRootPaths), - updatedAt: parsed.updatedAt ?? new Date().toISOString(), - }; + } + return [...uniquePaths]; +} + +function normalizeSearchSpaceKey(searchSpaceId?: number | null): string { + if (typeof searchSpaceId === "number" && Number.isFinite(searchSpaceId) && searchSpaceId > 0) { + return String(searchSpaceId); + } + return DEFAULT_SPACE_KEY; +} + +function toSettingsFromUnknown(value: unknown): AgentFilesystemSettings | null { + if (!value || typeof value !== "object") { + return null; + } + const parsed = value as Partial; + if (parsed.mode !== "cloud" && parsed.mode !== "desktop_local_folder") { + return null; + } + return { + mode: parsed.mode, + localRootPaths: normalizeLocalRootPaths(parsed.localRootPaths), + updatedAt: parsed.updatedAt ?? new Date().toISOString(), + }; +} + +function getDefaultStore(): AgentFilesystemSettingsStore { + return { version: 2, spaces: {} }; +} + +function getSettingsFromStore( + store: AgentFilesystemSettingsStore, + searchSpaceId?: number | null +): AgentFilesystemSettings { + const key = normalizeSearchSpaceKey(searchSpaceId); + return store.spaces[key] ?? getDefaultSettings(); +} + +async function loadAgentFilesystemSettingsStore(): Promise { + if (cachedSettingsStore) { + return cachedSettingsStore; + } + const settingsPath = getSettingsPath(); + try { + const raw = await readFile(settingsPath, "utf8"); + const parsed = JSON.parse(raw) as unknown; + const nextStore = getDefaultStore(); + if ( + parsed && + typeof parsed === "object" && + "version" in parsed && + "spaces" in parsed && + (parsed as { version?: unknown }).version === 2 + ) { + const parsedStore = parsed as { spaces?: Record; version: 2 }; + if (parsedStore.spaces && typeof parsedStore.spaces === "object") { + for (const [spaceKey, rawSettings] of Object.entries(parsedStore.spaces)) { + const normalizedSettings = toSettingsFromUnknown(rawSettings); + if (normalizedSettings) { + nextStore.spaces[String(spaceKey)] = normalizedSettings; + } + } + } + } else { + // Strict migration: reject legacy/non-scoped settings and reset. + await mkdir(dirname(settingsPath), { recursive: true }); + await writeFile(settingsPath, JSON.stringify(nextStore, null, 2), "utf8"); + } + cachedSettingsStore = nextStore; + return nextStore; } catch { - return getDefaultSettings(); + cachedSettingsStore = getDefaultStore(); + await mkdir(dirname(settingsPath), { recursive: true }); + await writeFile(settingsPath, JSON.stringify(cachedSettingsStore, null, 2), "utf8"); + return cachedSettingsStore; } } +export async function getAgentFilesystemSettings( + searchSpaceId?: number | null +): Promise { + const store = await loadAgentFilesystemSettingsStore(); + return getSettingsFromStore(store, searchSpaceId); +} + export async function setAgentFilesystemSettings( + searchSpaceId: number | null | undefined, settings: { mode?: AgentFilesystemMode; localRootPaths?: string[] | null; } ): Promise { - const current = await getAgentFilesystemSettings(); + const store = await loadAgentFilesystemSettingsStore(); + const key = normalizeSearchSpaceKey(searchSpaceId); + const current = getSettingsFromStore(store, searchSpaceId); const nextMode = settings.mode === "cloud" || settings.mode === "desktop_local_folder" ? settings.mode @@ -75,13 +219,21 @@ export async function setAgentFilesystemSettings( localRootPaths: settings.localRootPaths === undefined ? current.localRootPaths - : normalizeLocalRootPaths(settings.localRootPaths ?? []), + : await normalizeLocalRootPathsCanonical(settings.localRootPaths ?? []), updatedAt: new Date().toISOString(), }; const settingsPath = getSettingsPath(); await mkdir(dirname(settingsPath), { recursive: true }); - await writeFile(settingsPath, JSON.stringify(next, null, 2), "utf8"); + const nextStore: AgentFilesystemSettingsStore = { + version: 2, + spaces: { + ...store.spaces, + [key]: next, + }, + }; + await writeFile(settingsPath, JSON.stringify(nextStore, null, 2), "utf8"); + cachedSettingsStore = nextStore; return next; } @@ -122,11 +274,35 @@ function toVirtualPath(rootPath: string, absolutePath: string): string { return `/${rel.replace(/\\/g, "/")}`; } +function assertLocalOpenableTextFile(absolutePath: string): void { + const extension = extname(absolutePath).toLowerCase(); + if (!LOCAL_OPENABLE_TEXT_EXTENSIONS.has(extension)) { + throw new Error( + `Unsupported local file type '${extension || "(no extension)"}'. ` + + "Only text/code files can be opened in local mode." + ); + } +} + export type LocalRootMount = { mount: string; rootPath: string; }; +export type AgentFilesystemListOptions = { + rootPath: string; + searchSpaceId?: number | null; + excludePatterns?: string[] | null; + fileExtensions?: string[] | null; +}; + +export type AgentFilesystemFileEntry = { + relativePath: string; + fullPath: string; + size: number; + mtimeMs: number; +}; + function sanitizeMountName(rawMount: string): string { const normalized = rawMount .trim() @@ -155,11 +331,111 @@ function buildRootMounts(rootPaths: string[]): LocalRootMount[] { return mounts; } -export async function getAgentFilesystemMounts(): Promise { - const rootPaths = await resolveCurrentRootPaths(); +export async function getAgentFilesystemMounts( + searchSpaceId?: number | null +): Promise { + const rootPaths = await resolveCurrentRootPaths(searchSpaceId); return buildRootMounts(rootPaths); } +function normalizeComparablePath(pathValue: string): string { + const normalized = resolve(pathValue); + return process.platform === "win32" ? normalized.toLowerCase() : normalized; +} + +function normalizeExtensionSet(fileExtensions: string[] | null | undefined): Set | null { + if (!fileExtensions || fileExtensions.length === 0) { + return null; + } + const set = new Set(); + for (const extension of fileExtensions) { + if (typeof extension !== "string") continue; + const trimmed = extension.trim().toLowerCase(); + if (!trimmed) continue; + set.add(trimmed.startsWith(".") ? trimmed : `.${trimmed}`); + } + return set.size > 0 ? set : null; +} + +function normalizeExcludeSet(excludePatterns: string[] | null | undefined): Set { + const set = new Set(); + for (const pattern of excludePatterns ?? []) { + if (typeof pattern !== "string") continue; + const trimmed = pattern.trim(); + if (!trimmed) continue; + set.add(trimmed); + } + return set; +} + +export async function listAgentFilesystemFiles( + options: AgentFilesystemListOptions +): Promise { + const allowedRootPaths = await resolveCurrentRootPaths(options.searchSpaceId); + const requestedRootPath = await canonicalizeRootPath(options.rootPath); + const normalizedRequestedRoot = normalizeComparablePath(requestedRootPath); + const allowedRoots = new Set( + ( + await Promise.all(allowedRootPaths.map((rootPath) => canonicalizeRootPath(rootPath))) + ).map((rootPath) => normalizeComparablePath(rootPath)) + ); + if (!allowedRoots.has(normalizedRequestedRoot)) { + throw new Error("Selected path is not an allowed local root"); + } + + const excludePatterns = normalizeExcludeSet(options.excludePatterns); + const extensionSet = normalizeExtensionSet(options.fileExtensions); + const files: AgentFilesystemFileEntry[] = []; + const stack: string[] = [requestedRootPath]; + + while (stack.length > 0) { + const currentDir = stack.pop(); + if (!currentDir) continue; + let entries: Dirent[]; + try { + entries = await readdir(currentDir, { withFileTypes: true }); + } catch { + continue; + } + + for (const entry of entries) { + if (entry.name.startsWith(".") || excludePatterns.has(entry.name)) { + continue; + } + const absolutePath = join(currentDir, entry.name); + if (entry.isDirectory()) { + stack.push(absolutePath); + continue; + } + if (!entry.isFile()) { + continue; + } + if (extensionSet) { + const extension = extname(entry.name).toLowerCase(); + if (!extensionSet.has(extension)) { + continue; + } + } + try { + const fileStat = await stat(absolutePath); + if (!fileStat.isFile()) { + continue; + } + files.push({ + relativePath: relative(requestedRootPath, absolutePath).replace(/\\/g, "/"), + fullPath: absolutePath, + size: fileStat.size, + mtimeMs: fileStat.mtimeMs, + }); + } catch { + // Files can disappear while scanning. + } + } + } + + return files; +} + function parseMountedVirtualPath( virtualPath: string, mounts: LocalRootMount[] @@ -198,8 +474,8 @@ function toMountedVirtualPath(mount: string, rootPath: string, absolutePath: str return `/${mount}${relativePath}`; } -async function resolveCurrentRootPaths(): Promise { - const settings = await getAgentFilesystemSettings(); +async function resolveCurrentRootPaths(searchSpaceId?: number | null): Promise { + const settings = await getAgentFilesystemSettings(searchSpaceId); if (settings.localRootPaths.length === 0) { throw new Error("No local filesystem roots selected"); } @@ -207,9 +483,10 @@ async function resolveCurrentRootPaths(): Promise { } export async function readAgentLocalFileText( - virtualPath: string + virtualPath: string, + searchSpaceId?: number | null ): Promise<{ path: string; content: string }> { - const rootPaths = await resolveCurrentRootPaths(); + const rootPaths = await resolveCurrentRootPaths(searchSpaceId); const mounts = buildRootMounts(rootPaths); const { mount, subPath } = parseMountedVirtualPath(virtualPath, mounts); const rootMount = findMountByName(mounts, mount); @@ -219,6 +496,7 @@ export async function readAgentLocalFileText( ); } const absolutePath = resolveVirtualPath(rootMount.rootPath, subPath); + assertLocalOpenableTextFile(absolutePath); const content = await readFile(absolutePath, "utf8"); return { path: toMountedVirtualPath(rootMount.mount, rootMount.rootPath, absolutePath), @@ -228,9 +506,10 @@ export async function readAgentLocalFileText( export async function writeAgentLocalFileText( virtualPath: string, - content: string + content: string, + searchSpaceId?: number | null ): Promise<{ path: string }> { - const rootPaths = await resolveCurrentRootPaths(); + const rootPaths = await resolveCurrentRootPaths(searchSpaceId); const mounts = buildRootMounts(rootPaths); const { mount, subPath } = parseMountedVirtualPath(virtualPath, mounts); const rootMount = findMountByName(mounts, mount); diff --git a/surfsense_desktop/src/preload.ts b/surfsense_desktop/src/preload.ts index 7ce2cbcf8..7d72e9da5 100644 --- a/surfsense_desktop/src/preload.ts +++ b/surfsense_desktop/src/preload.ts @@ -66,10 +66,10 @@ contextBridge.exposeInMainWorld('electronAPI', { // Browse files via native dialog browseFiles: () => ipcRenderer.invoke(IPC_CHANNELS.BROWSE_FILES), readLocalFiles: (paths: string[]) => ipcRenderer.invoke(IPC_CHANNELS.READ_LOCAL_FILES, paths), - readAgentLocalFileText: (virtualPath: string) => - ipcRenderer.invoke(IPC_CHANNELS.READ_AGENT_LOCAL_FILE_TEXT, virtualPath), - writeAgentLocalFileText: (virtualPath: string, content: string) => - ipcRenderer.invoke(IPC_CHANNELS.WRITE_AGENT_LOCAL_FILE_TEXT, virtualPath, content), + readAgentLocalFileText: (virtualPath: string, searchSpaceId?: number | null) => + ipcRenderer.invoke(IPC_CHANNELS.READ_AGENT_LOCAL_FILE_TEXT, virtualPath, searchSpaceId), + writeAgentLocalFileText: (virtualPath: string, content: string, searchSpaceId?: number | null) => + ipcRenderer.invoke(IPC_CHANNELS.WRITE_AGENT_LOCAL_FILE_TEXT, virtualPath, content, searchSpaceId), // Auth token sync across windows getAuthTokens: () => ipcRenderer.invoke(IPC_CHANNELS.GET_AUTH_TOKENS), @@ -101,13 +101,52 @@ contextBridge.exposeInMainWorld('electronAPI', { ipcRenderer.invoke(IPC_CHANNELS.ANALYTICS_CAPTURE, { event, properties }), getAnalyticsContext: () => ipcRenderer.invoke(IPC_CHANNELS.ANALYTICS_GET_CONTEXT), // Agent filesystem mode - getAgentFilesystemSettings: () => - ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_GET_SETTINGS), - getAgentFilesystemMounts: () => - ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_GET_MOUNTS), + getAgentFilesystemSettings: (searchSpaceId?: number | null) => + ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_GET_SETTINGS, searchSpaceId), + getAgentFilesystemMounts: (searchSpaceId?: number | null) => + ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_GET_MOUNTS, searchSpaceId), + listAgentFilesystemFiles: (options: { + rootPath: string; + searchSpaceId?: number | null; + excludePatterns?: string[] | null; + fileExtensions?: string[] | null; + }) => ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_LIST_FILES, options), + startAgentFilesystemTreeWatch: (options: { + searchSpaceId?: number | null; + rootPaths: string[]; + excludePatterns?: string[] | null; + fileExtensions?: string[] | null; + }) => ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_TREE_WATCH_START, options), + stopAgentFilesystemTreeWatch: (searchSpaceId?: number | null) => + ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_TREE_WATCH_STOP, searchSpaceId), + onAgentFilesystemTreeDirty: ( + callback: (data: { + searchSpaceId: number | null; + reason: 'watcher_event' | 'safety_poll'; + rootPath: string; + changedPath: string | null; + timestamp: number; + }) => void + ) => { + const listener = ( + _event: unknown, + data: { + searchSpaceId: number | null; + reason: 'watcher_event' | 'safety_poll'; + rootPath: string; + changedPath: string | null; + timestamp: number; + } + ) => callback(data); + ipcRenderer.on(IPC_CHANNELS.AGENT_FILESYSTEM_TREE_DIRTY, listener); + return () => { + ipcRenderer.removeListener(IPC_CHANNELS.AGENT_FILESYSTEM_TREE_DIRTY, listener); + }; + }, setAgentFilesystemSettings: (settings: { mode?: "cloud" | "desktop_local_folder"; localRootPaths?: string[] | null; - }) => ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_SET_SETTINGS, settings), + }, searchSpaceId?: number | null) => + ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_SET_SETTINGS, { searchSpaceId, settings }), pickAgentFilesystemRoot: () => ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_PICK_ROOT), }); diff --git a/surfsense_obsidian/.editorconfig b/surfsense_obsidian/.editorconfig new file mode 100644 index 000000000..81f3ec354 --- /dev/null +++ b/surfsense_obsidian/.editorconfig @@ -0,0 +1,10 @@ +# top-most EditorConfig file +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +indent_style = tab +indent_size = 4 +tab_width = 4 diff --git a/surfsense_obsidian/.gitignore b/surfsense_obsidian/.gitignore new file mode 100644 index 000000000..386ac2bdb --- /dev/null +++ b/surfsense_obsidian/.gitignore @@ -0,0 +1,22 @@ +# vscode +.vscode + +# Intellij +*.iml +.idea + +# npm +node_modules + +# Don't include the compiled main.js file in the repo. +# They should be uploaded to GitHub releases instead. +main.js + +# Exclude sourcemaps +*.map + +# obsidian +data.json + +# Exclude macOS Finder (System Explorer) View States +.DS_Store diff --git a/surfsense_obsidian/.npmrc b/surfsense_obsidian/.npmrc new file mode 100644 index 000000000..b9737525f --- /dev/null +++ b/surfsense_obsidian/.npmrc @@ -0,0 +1 @@ +tag-version-prefix="" \ No newline at end of file diff --git a/surfsense_obsidian/AGENTS.md b/surfsense_obsidian/AGENTS.md new file mode 100644 index 000000000..3f4274ac6 --- /dev/null +++ b/surfsense_obsidian/AGENTS.md @@ -0,0 +1,251 @@ +# Obsidian community plugin + +## Project overview + +- Target: Obsidian Community Plugin (TypeScript → bundled JavaScript). +- Entry point: `main.ts` compiled to `main.js` and loaded by Obsidian. +- Required release artifacts: `main.js`, `manifest.json`, and optional `styles.css`. + +## Environment & tooling + +- Node.js: use current LTS (Node 18+ recommended). +- **Package manager: npm** (required for this sample - `package.json` defines npm scripts and dependencies). +- **Bundler: esbuild** (required for this sample - `esbuild.config.mjs` and build scripts depend on it). Alternative bundlers like Rollup or webpack are acceptable for other projects if they bundle all external dependencies into `main.js`. +- Types: `obsidian` type definitions. + +**Note**: This sample project has specific technical dependencies on npm and esbuild. If you're creating a plugin from scratch, you can choose different tools, but you'll need to replace the build configuration accordingly. + +### Install + +```bash +npm install +``` + +### Dev (watch) + +```bash +npm run dev +``` + +### Production build + +```bash +npm run build +``` + +## Linting + +- To use eslint install eslint from terminal: `npm install -g eslint` +- To use eslint to analyze this project use this command: `eslint main.ts` +- eslint will then create a report with suggestions for code improvement by file and line number. +- If your source code is in a folder, such as `src`, you can use eslint with this command to analyze all files in that folder: `eslint ./src/` + +## File & folder conventions + +- **Organize code into multiple files**: Split functionality across separate modules rather than putting everything in `main.ts`. +- Source lives in `src/`. Keep `main.ts` small and focused on plugin lifecycle (loading, unloading, registering commands). +- **Example file structure**: + ``` + src/ + main.ts # Plugin entry point, lifecycle management + settings.ts # Settings interface and defaults + commands/ # Command implementations + command1.ts + command2.ts + ui/ # UI components, modals, views + modal.ts + view.ts + utils/ # Utility functions, helpers + helpers.ts + constants.ts + types.ts # TypeScript interfaces and types + ``` +- **Do not commit build artifacts**: Never commit `node_modules/`, `main.js`, or other generated files to version control. +- Keep the plugin small. Avoid large dependencies. Prefer browser-compatible packages. +- Generated output should be placed at the plugin root or `dist/` depending on your build setup. Release artifacts must end up at the top level of the plugin folder in the vault (`main.js`, `manifest.json`, `styles.css`). + +## Manifest rules (`manifest.json`) + +- Must include (non-exhaustive): + - `id` (plugin ID; for local dev it should match the folder name) + - `name` + - `version` (Semantic Versioning `x.y.z`) + - `minAppVersion` + - `description` + - `isDesktopOnly` (boolean) + - Optional: `author`, `authorUrl`, `fundingUrl` (string or map) +- Never change `id` after release. Treat it as stable API. +- Keep `minAppVersion` accurate when using newer APIs. +- Canonical requirements are coded here: https://github.com/obsidianmd/obsidian-releases/blob/master/.github/workflows/validate-plugin-entry.yml + +## Testing + +- Manual install for testing: copy `main.js`, `manifest.json`, `styles.css` (if any) to: + ``` + /.obsidian/plugins// + ``` +- Reload Obsidian and enable the plugin in **Settings → Community plugins**. + +## Commands & settings + +- Any user-facing commands should be added via `this.addCommand(...)`. +- If the plugin has configuration, provide a settings tab and sensible defaults. +- Persist settings using `this.loadData()` / `this.saveData()`. +- Use stable command IDs; avoid renaming once released. + +## Versioning & releases + +- Bump `version` in `manifest.json` (SemVer) and update `versions.json` to map plugin version → minimum app version. +- Create a GitHub release whose tag exactly matches `manifest.json`'s `version`. Do not use a leading `v`. +- Attach `manifest.json`, `main.js`, and `styles.css` (if present) to the release as individual assets. +- After the initial release, follow the process to add/update your plugin in the community catalog as required. + +## Security, privacy, and compliance + +Follow Obsidian's **Developer Policies** and **Plugin Guidelines**. In particular: + +- Default to local/offline operation. Only make network requests when essential to the feature. +- No hidden telemetry. If you collect optional analytics or call third-party services, require explicit opt-in and document clearly in `README.md` and in settings. +- Never execute remote code, fetch and eval scripts, or auto-update plugin code outside of normal releases. +- Minimize scope: read/write only what's necessary inside the vault. Do not access files outside the vault. +- Clearly disclose any external services used, data sent, and risks. +- Respect user privacy. Do not collect vault contents, filenames, or personal information unless absolutely necessary and explicitly consented. +- Avoid deceptive patterns, ads, or spammy notifications. +- Register and clean up all DOM, app, and interval listeners using the provided `register*` helpers so the plugin unloads safely. + +## UX & copy guidelines (for UI text, commands, settings) + +- Prefer sentence case for headings, buttons, and titles. +- Use clear, action-oriented imperatives in step-by-step copy. +- Use **bold** to indicate literal UI labels. Prefer "select" for interactions. +- Use arrow notation for navigation: **Settings → Community plugins**. +- Keep in-app strings short, consistent, and free of jargon. + +## Performance + +- Keep startup light. Defer heavy work until needed. +- Avoid long-running tasks during `onload`; use lazy initialization. +- Batch disk access and avoid excessive vault scans. +- Debounce/throttle expensive operations in response to file system events. + +## Coding conventions + +- TypeScript with `"strict": true` preferred. +- **Keep `main.ts` minimal**: Focus only on plugin lifecycle (onload, onunload, addCommand calls). Delegate all feature logic to separate modules. +- **Split large files**: If any file exceeds ~200-300 lines, consider breaking it into smaller, focused modules. +- **Use clear module boundaries**: Each file should have a single, well-defined responsibility. +- Bundle everything into `main.js` (no unbundled runtime deps). +- Avoid Node/Electron APIs if you want mobile compatibility; set `isDesktopOnly` accordingly. +- Prefer `async/await` over promise chains; handle errors gracefully. + +## Mobile + +- Where feasible, test on iOS and Android. +- Don't assume desktop-only behavior unless `isDesktopOnly` is `true`. +- Avoid large in-memory structures; be mindful of memory and storage constraints. + +## Agent do/don't + +**Do** +- Add commands with stable IDs (don't rename once released). +- Provide defaults and validation in settings. +- Write idempotent code paths so reload/unload doesn't leak listeners or intervals. +- Use `this.register*` helpers for everything that needs cleanup. + +**Don't** +- Introduce network calls without an obvious user-facing reason and documentation. +- Ship features that require cloud services without clear disclosure and explicit opt-in. +- Store or transmit vault contents unless essential and consented. + +## Common tasks + +### Organize code across multiple files + +**main.ts** (minimal, lifecycle only): +```ts +import { Plugin } from "obsidian"; +import { MySettings, DEFAULT_SETTINGS } from "./settings"; +import { registerCommands } from "./commands"; + +export default class MyPlugin extends Plugin { + settings: MySettings; + + async onload() { + this.settings = Object.assign({}, DEFAULT_SETTINGS, await this.loadData()); + registerCommands(this); + } +} +``` + +**settings.ts**: +```ts +export interface MySettings { + enabled: boolean; + apiKey: string; +} + +export const DEFAULT_SETTINGS: MySettings = { + enabled: true, + apiKey: "", +}; +``` + +**commands/index.ts**: +```ts +import { Plugin } from "obsidian"; +import { doSomething } from "./my-command"; + +export function registerCommands(plugin: Plugin) { + plugin.addCommand({ + id: "do-something", + name: "Do something", + callback: () => doSomething(plugin), + }); +} +``` + +### Add a command + +```ts +this.addCommand({ + id: "your-command-id", + name: "Do the thing", + callback: () => this.doTheThing(), +}); +``` + +### Persist settings + +```ts +interface MySettings { enabled: boolean } +const DEFAULT_SETTINGS: MySettings = { enabled: true }; + +async onload() { + this.settings = Object.assign({}, DEFAULT_SETTINGS, await this.loadData()); + await this.saveData(this.settings); +} +``` + +### Register listeners safely + +```ts +this.registerEvent(this.app.workspace.on("file-open", f => { /* ... */ })); +this.registerDomEvent(window, "resize", () => { /* ... */ }); +this.registerInterval(window.setInterval(() => { /* ... */ }, 1000)); +``` + +## Troubleshooting + +- Plugin doesn't load after build: ensure `main.js` and `manifest.json` are at the top level of the plugin folder under `/.obsidian/plugins//`. +- Build issues: if `main.js` is missing, run `npm run build` or `npm run dev` to compile your TypeScript source code. +- Commands not appearing: verify `addCommand` runs after `onload` and IDs are unique. +- Settings not persisting: ensure `loadData`/`saveData` are awaited and you re-render the UI after changes. +- Mobile-only issues: confirm you're not using desktop-only APIs; check `isDesktopOnly` and adjust. + +## References + +- Obsidian sample plugin: https://github.com/obsidianmd/obsidian-sample-plugin +- API documentation: https://docs.obsidian.md +- Developer policies: https://docs.obsidian.md/Developer+policies +- Plugin guidelines: https://docs.obsidian.md/Plugins/Releasing/Plugin+guidelines +- Style guide: https://help.obsidian.md/style-guide diff --git a/surfsense_obsidian/LICENSE b/surfsense_obsidian/LICENSE new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/surfsense_obsidian/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/surfsense_obsidian/README.md b/surfsense_obsidian/README.md new file mode 100644 index 000000000..71cb8566e --- /dev/null +++ b/surfsense_obsidian/README.md @@ -0,0 +1,150 @@ +# SurfSense for Obsidian + +Sync your Obsidian vault to [SurfSense](https://github.com/MODSetter/SurfSense) +so your notes become searchable alongside the rest of your knowledge sources +(GitHub, Slack, Linear, Drive, web pages, etc.) from any SurfSense chat. + +The plugin runs inside Obsidian itself, on desktop and mobile, so it works +the same way for SurfSense Cloud and self-hosted deployments. There is no +server-side vault mount and no Electron-only path; everything goes over HTTPS. + +## What it does + +- Realtime sync as you create, edit, rename, or delete notes +- Initial scan + reconciliation against the server manifest on startup, + so vault edits made while the plugin was offline still show up +- Persistent upload queue, so a crash or offline window never loses changes +- Frontmatter, `[[wiki links]]`, `#tags`, headings, and resolved/unresolved + links are extracted and indexed +- Each chat citation links straight back into Obsidian via the + `obsidian://open?vault=…&file=…` deep link +- Multi-vault aware: each vault you enable the plugin in becomes its own + connector row in SurfSense, named after the vault + +## Install + +### Via [BRAT](https://github.com/TfTHacker/obsidian42-brat) (current) + +1. Install the BRAT community plugin. +2. Run **BRAT: Add a beta plugin for testing**. +3. Paste `MODSetter/SurfSense` and pick the latest release. +4. Enable **SurfSense** in *Settings → Community plugins*. + +### Manual sideload + +1. Download `main.js`, `manifest.json`, and `styles.css` from the latest + GitHub release tagged with the plugin version (e.g. `0.1.0`, with no `v` + prefix, matching the `version` field in `manifest.json`). +2. Copy them into `/.obsidian/plugins/surfsense/`. +3. Restart Obsidian and enable the plugin. + +### Community plugin store + +Submission to the official Obsidian community plugin store is in progress. +Once approved you will be able to install from *Settings → Community plugins* +inside Obsidian. + +## Configure + +Open **Settings → SurfSense** in Obsidian and fill in: + +| Setting | Value | +| --- | --- | +| Server URL | `https://surfsense.com` for SurfSense Cloud, or your self-hosted URL | +| API token | Copy from the *Connectors → Obsidian* dialog in the SurfSense web app | +| Search space | Pick the search space this vault should sync into | +| Vault name | Defaults to your Obsidian vault name; rename if you have multiple vaults | +| Sync mode | *Auto* (recommended) or *Manual* | +| Exclude patterns | Glob patterns of folders/files to skip (e.g. `.trash`, `_attachments`, `templates/**`) | +| Include attachments | Off by default; enable to sync non-`.md` files | + +The connector row appears automatically inside SurfSense the first time the +plugin successfully calls `/obsidian/connect`. You can manage or delete it +from *Connectors → Obsidian* in the web app. + +> **Token lifetime.** The web app currently issues 24-hour JWTs. If you see +> *"token expired"* in the plugin status bar, paste a fresh token from the +> SurfSense web app. Long-lived personal access tokens are coming in a future +> release. + +## Mobile + +The plugin works on Obsidian for iOS and Android. Sync runs whenever the +app is in the foreground and once more on app close. Mobile OSes +aggressively suspend background apps, so mobile sync is near-realtime rather +than instant. Desktop is the source of truth for live editing. + +## Privacy & safety + +The SurfSense backend qualifies as server-side telemetry under Obsidian's +[Developer policies](https://github.com/obsidianmd/obsidian-developer-docs/blob/main/en/Developer%20policies.md), +so here is the full list of what the plugin sends and stores. The +canonical SurfSense privacy policy lives at +; this section is the plugin-specific +addendum. + +**Sent on `/connect` (once per onload):** + +- `vault_id`: a random UUID minted in the plugin's `data.json` on first run +- `vault_name`: the Obsidian vault folder name +- `search_space_id`: the SurfSense search space you picked + +**Sent per note on `/sync`, `/rename`, `/delete`:** + +- `path`, `name`, `extension` +- `content` (plain text of the note) +- `frontmatter`, `tags`, `headings`, resolved and unresolved links, + `embeds`, `aliases` +- `content_hash` (SHA-256 of the note body), `mtime`, `ctime` + +**Stored server-side per vault:** + +- One connector row keyed by `vault_id` with `{vault_name, source: "plugin", + last_connect_at}`. Nothing per-device, no plugin version, no analytics. +- One `documents` row per note (soft-deleted rather than hard-deleted so + existing chat citations remain valid). + +**What never leaves the plugin:** + +- No remote code loading, no `eval`, no analytics. +- All network traffic goes to your configured **Server URL** only. +- The `Authorization: Bearer …` header is set per-request with the token + you paste; the plugin never reads cookies or other Obsidian state. +- The plugin uses Obsidian's `requestUrl` (no `fetch`, no `node:http`, + no `node:https`) and Web Crypto for hashing, per Obsidian's mobile guidance. + +For retention, deletion, and contact details see +. + +## Development + +This plugin lives in [`surfsense_obsidian/`](.) inside the SurfSense +monorepo. To work on it locally: + +```sh +cd surfsense_obsidian +npm install +npm run dev # esbuild in watch mode → main.js +``` + +Symlink the folder into a test vault's `.obsidian/plugins/surfsense/`, +enable the plugin, then **Cmd+R** in Obsidian whenever `main.js` rebuilds. + +Lint: + +```sh +npm run lint +``` + +The release pipeline lives at +[`.github/workflows/release-obsidian-plugin.yml`](../.github/workflows/release-obsidian-plugin.yml) +in the repo root and is triggered by tags of the form `obsidian-v0.1.0`. +It verifies the tag matches `manifest.json`, builds the plugin, attaches +`main.js` + `manifest.json` + `styles.css` to a GitHub release tagged with +the bare version (e.g. `0.1.0`, the form BRAT and the Obsidian community +store look for), and mirrors `manifest.json` + `versions.json` to the repo +root so Obsidian's community plugin browser can discover them. + +## License + +[Apache-2.0](LICENSE), same as the rest of SurfSense. diff --git a/surfsense_obsidian/esbuild.config.mjs b/surfsense_obsidian/esbuild.config.mjs new file mode 100644 index 000000000..1c74a149e --- /dev/null +++ b/surfsense_obsidian/esbuild.config.mjs @@ -0,0 +1,49 @@ +import esbuild from "esbuild"; +import process from "process"; +import { builtinModules } from 'node:module'; + +const banner = +`/* +THIS IS A GENERATED/BUNDLED FILE BY ESBUILD +if you want to view the source, please visit the github repository of this plugin +*/ +`; + +const prod = (process.argv[2] === "production"); + +const context = await esbuild.context({ + banner: { + js: banner, + }, + entryPoints: ["src/main.ts"], + bundle: true, + external: [ + "obsidian", + "electron", + "@codemirror/autocomplete", + "@codemirror/collab", + "@codemirror/commands", + "@codemirror/language", + "@codemirror/lint", + "@codemirror/search", + "@codemirror/state", + "@codemirror/view", + "@lezer/common", + "@lezer/highlight", + "@lezer/lr", + ...builtinModules], + format: "cjs", + target: "es2018", + logLevel: "info", + sourcemap: prod ? false : "inline", + treeShaking: true, + outfile: "main.js", + minify: prod, +}); + +if (prod) { + await context.rebuild(); + process.exit(0); +} else { + await context.watch(); +} diff --git a/surfsense_obsidian/eslint.config.mts b/surfsense_obsidian/eslint.config.mts new file mode 100644 index 000000000..a2615ae6d --- /dev/null +++ b/surfsense_obsidian/eslint.config.mts @@ -0,0 +1,55 @@ +import tseslint from 'typescript-eslint'; +import obsidianmd from "eslint-plugin-obsidianmd"; +import globals from "globals"; +import { globalIgnores } from "eslint/config"; + +export default tseslint.config( + { + languageOptions: { + globals: { + ...globals.browser, + }, + parserOptions: { + projectService: { + allowDefaultProject: [ + 'eslint.config.js', + 'manifest.json' + ] + }, + tsconfigRootDir: import.meta.dirname, + extraFileExtensions: ['.json'] + }, + }, + }, + ...obsidianmd.configs.recommended, + { + plugins: { obsidianmd }, + rules: { + "obsidianmd/ui/sentence-case": [ + "error", + { + brands: [ + "Surfsense", + "iOS", + "iPadOS", + "macOS", + "Windows", + "Android", + "Linux", + "Obsidian", + "Markdown", + ], + }, + ], + }, + }, + globalIgnores([ + "node_modules", + "dist", + "esbuild.config.mjs", + "eslint.config.js", + "version-bump.mjs", + "versions.json", + "main.js", + ]), +); diff --git a/surfsense_obsidian/manifest.json b/surfsense_obsidian/manifest.json new file mode 100644 index 000000000..d03a5b650 --- /dev/null +++ b/surfsense_obsidian/manifest.json @@ -0,0 +1,10 @@ +{ + "id": "surfsense-obsidian", + "name": "SurfSense", + "version": "0.1.0", + "minAppVersion": "1.5.4", + "description": "Turn your vault into a searchable second brain with SurfSense.", + "author": "SurfSense", + "authorUrl": "https://www.surfsense.com", + "isDesktopOnly": false +} diff --git a/surfsense_obsidian/package-lock.json b/surfsense_obsidian/package-lock.json new file mode 100644 index 000000000..e62b89885 --- /dev/null +++ b/surfsense_obsidian/package-lock.json @@ -0,0 +1,5170 @@ +{ + "name": "surfsense-obsidian", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "surfsense-obsidian", + "version": "0.1.0", + "license": "Apache-2.0", + "dependencies": { + "obsidian": "latest" + }, + "devDependencies": { + "@eslint/js": "9.30.1", + "@types/node": "^20.19.39", + "esbuild": "0.25.5", + "eslint-plugin-obsidianmd": "0.1.9", + "globals": "14.0.0", + "jiti": "2.6.1", + "tslib": "2.4.0", + "typescript": "^5.8.3", + "typescript-eslint": "8.35.1" + } + }, + "node_modules/@codemirror/state": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.5.0.tgz", + "integrity": "sha512-MwBHVK60IiIHDcoMet78lxt6iw5gJOGSbNbOIVBHWVXIH4/Nq1+GQgLLGgI1KlnN86WDXsPudVaqYHKBIx7Eyw==", + "license": "MIT", + "peer": true, + "dependencies": { + "@marijn/find-cluster-break": "^1.0.0" + } + }, + "node_modules/@codemirror/view": { + "version": "6.38.6", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.38.6.tgz", + "integrity": "sha512-qiS0z1bKs5WOvHIAC0Cybmv4AJSkAXgX5aD6Mqd2epSLlVJsQl8NG23jCVouIgkh4All/mrbdsf2UOLFnJw0tw==", + "license": "MIT", + "peer": true, + "dependencies": { + "@codemirror/state": "^6.5.0", + "crelt": "^1.0.6", + "style-mod": "^4.1.0", + "w3c-keyname": "^2.2.4" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.5.tgz", + "integrity": "sha512-9o3TMmpmftaCMepOdA5k/yDw8SfInyzWWTjYTFCX3kPSDJMROQTb8jg+h9Cnwnmm1vOzvxN7gIfB5V2ewpjtGA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.5.tgz", + "integrity": "sha512-AdJKSPeEHgi7/ZhuIPtcQKr5RQdo6OO2IL87JkianiMYMPbCtot9fxPbrMiBADOWWm3T2si9stAiVsGbTQFkbA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.5.tgz", + "integrity": "sha512-VGzGhj4lJO+TVGV1v8ntCZWJktV7SGCs3Pn1GRWI1SBFtRALoomm8k5E9Pmwg3HOAal2VDc2F9+PM/rEY6oIDg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.5.tgz", + "integrity": "sha512-D2GyJT1kjvO//drbRT3Hib9XPwQeWd9vZoBJn+bu/lVsOZ13cqNdDeqIF/xQ5/VmWvMduP6AmXvylO/PIc2isw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.5.tgz", + "integrity": "sha512-GtaBgammVvdF7aPIgH2jxMDdivezgFu6iKpmT+48+F8Hhg5J/sfnDieg0aeG/jfSvkYQU2/pceFPDKlqZzwnfQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.5.tgz", + "integrity": "sha512-1iT4FVL0dJ76/q1wd7XDsXrSW+oLoquptvh4CLR4kITDtqi2e/xwXwdCVH8hVHU43wgJdsq7Gxuzcs6Iq/7bxQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.5.tgz", + "integrity": "sha512-nk4tGP3JThz4La38Uy/gzyXtpkPW8zSAmoUhK9xKKXdBCzKODMc2adkB2+8om9BDYugz+uGV7sLmpTYzvmz6Sw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.5.tgz", + "integrity": "sha512-PrikaNjiXdR2laW6OIjlbeuCPrPaAl0IwPIaRv+SMV8CiM8i2LqVUHFC1+8eORgWyY7yhQY+2U2fA55mBzReaw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.5.tgz", + "integrity": "sha512-cPzojwW2okgh7ZlRpcBEtsX7WBuqbLrNXqLU89GxWbNt6uIg78ET82qifUy3W6OVww6ZWobWub5oqZOVtwolfw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.5.tgz", + "integrity": "sha512-Z9kfb1v6ZlGbWj8EJk9T6czVEjjq2ntSYLY2cw6pAZl4oKtfgQuS4HOq41M/BcoLPzrUbNd+R4BXFyH//nHxVg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.5.tgz", + "integrity": "sha512-sQ7l00M8bSv36GLV95BVAdhJ2QsIbCuCjh/uYrWiMQSUuV+LpXwIqhgJDcvMTj+VsQmqAHL2yYaasENvJ7CDKA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.5.tgz", + "integrity": "sha512-0ur7ae16hDUC4OL5iEnDb0tZHDxYmuQyhKhsPBV8f99f6Z9KQM02g33f93rNH5A30agMS46u2HP6qTdEt6Q1kg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.5.tgz", + "integrity": "sha512-kB/66P1OsHO5zLz0i6X0RxlQ+3cu0mkxS3TKFvkb5lin6uwZ/ttOkP3Z8lfR9mJOBk14ZwZ9182SIIWFGNmqmg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.5.tgz", + "integrity": "sha512-UZCmJ7r9X2fe2D6jBmkLBMQetXPXIsZjQJCjgwpVDz+YMcS6oFR27alkgGv3Oqkv07bxdvw7fyB71/olceJhkQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.5.tgz", + "integrity": "sha512-kTxwu4mLyeOlsVIFPfQo+fQJAV9mh24xL+y+Bm6ej067sYANjyEw1dNHmvoqxJUCMnkBdKpvOn0Ahql6+4VyeA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.5.tgz", + "integrity": "sha512-K2dSKTKfmdh78uJ3NcWFiqyRrimfdinS5ErLSn3vluHNeHVnBAFWC8a4X5N+7FgVE1EjXS1QDZbpqZBjfrqMTQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.5.tgz", + "integrity": "sha512-uhj8N2obKTE6pSZ+aMUbqq+1nXxNjZIIjCjGLfsWvVpy7gKCOL6rsY1MhRh9zLtUtAI7vpgLMK6DxjO8Qm9lJw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.5.tgz", + "integrity": "sha512-pwHtMP9viAy1oHPvgxtOv+OkduK5ugofNTVDilIzBLpoWAM16r7b/mxBvfpuQDpRQFMfuVr5aLcn4yveGvBZvw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.5.tgz", + "integrity": "sha512-WOb5fKrvVTRMfWFNCroYWWklbnXH0Q5rZppjq0vQIdlsQKuw6mdSihwSo4RV/YdQ5UCKKvBy7/0ZZYLBZKIbwQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.5.tgz", + "integrity": "sha512-7A208+uQKgTxHd0G0uqZO8UjK2R0DDb4fDmERtARjSHWxqMTye4Erz4zZafx7Di9Cv+lNHYuncAkiGFySoD+Mw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.5.tgz", + "integrity": "sha512-G4hE405ErTWraiZ8UiSoesH8DaCsMm0Cay4fsFWOOUcz8b8rC6uCvnagr+gnioEjWn0wC+o1/TAHt+It+MpIMg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.5.tgz", + "integrity": "sha512-l+azKShMy7FxzY0Rj4RCt5VD/q8mG/e+mDivgspo+yL8zW7qEwctQ6YqKX34DTEleFAvCIUviCFX1SDZRSyMQA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.5.tgz", + "integrity": "sha512-O2S7SNZzdcFG7eFKgvwUEZ2VG9D/sn/eIiz8XRZ1Q/DO5a3s76Xv0mdBzVM5j5R639lXQmPmSo0iRpHqUUrsxw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.5.tgz", + "integrity": "sha512-onOJ02pqs9h1iMJ1PQphR+VZv8qBMQ77Klcsqv9CNW2w6yLqoURLcgERAIurY6QE63bbLuqgP9ATqajFLK5AMQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.5.tgz", + "integrity": "sha512-TXv6YnJ8ZMVdX+SXWVBo/0p8LTcrUYngpWjvm91TMjjBQii7Oz11Lw5lbDV5Y0TzuhSJHwiH4hEtC1I42mMS0g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.9.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz", + "integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.2", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz", + "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/config-array": { + "version": "0.21.1", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.1.tgz", + "integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/object-schema": "^2.1.7", + "debug": "^4.3.1", + "minimatch": "^3.1.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/config-helpers": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.2.tgz", + "integrity": "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.17.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/core": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.17.0.tgz", + "integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz", + "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^10.0.1", + "globals": "^14.0.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/js": { + "version": "9.30.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.30.1.tgz", + "integrity": "sha512-zXhuECFlyep42KZUhWjfvsmXGX39W8K8LFb8AWXM9gSV9dQB+MrJGLKvW6Zw0Ggnbpw0VHTtrhFXYe3Gym18jg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + } + }, + "node_modules/@eslint/json": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@eslint/json/-/json-0.14.0.tgz", + "integrity": "sha512-rvR/EZtvUG3p9uqrSmcDJPYSH7atmWr0RnFWN6m917MAPx82+zQgPUmDu0whPFG6XTyM0vB/hR6c1Q63OaYtCQ==", + "dev": true, + "license": "Apache-2.0", + "peer": true, + "dependencies": { + "@eslint/core": "^0.17.0", + "@eslint/plugin-kit": "^0.4.1", + "@humanwhocodes/momoa": "^3.3.10", + "natural-compare": "^1.4.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/object-schema": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.7.tgz", + "integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/plugin-kit": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz", + "integrity": "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^0.17.0", + "levn": "^0.4.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@humanfs/core": { + "version": "0.19.1", + "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz", + "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node": { + "version": "0.16.7", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.7.tgz", + "integrity": "sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanfs/core": "^0.19.1", + "@humanwhocodes/retry": "^0.4.0" + }, + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/momoa": { + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/@humanwhocodes/momoa/-/momoa-3.3.10.tgz", + "integrity": "sha512-KWiFQpSAqEIyrTXko3hFNLeQvSK8zXlJQzhhxsyVn58WFRYXST99b3Nqnu+ttOtjds2Pl2grUHGpe2NzhPynuQ==", + "dev": true, + "license": "Apache-2.0", + "peer": true, + "engines": { + "node": ">=18" + } + }, + "node_modules/@humanwhocodes/retry": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@marijn/find-cluster-break": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@marijn/find-cluster-break/-/find-cluster-break-1.0.2.tgz", + "integrity": "sha512-l0h88YhZFyKdXIFNfSWpyjStDjGHwZ/U7iobcK1cQQD8sejsONdQtTVU+1wVN1PBw40PiiHB1vA5S7VTfQiP9g==", + "license": "MIT", + "peer": true + }, + "node_modules/@microsoft/eslint-plugin-sdl": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@microsoft/eslint-plugin-sdl/-/eslint-plugin-sdl-1.1.0.tgz", + "integrity": "sha512-dxdNHOemLnBhfY3eByrujX9KyLigcNtW8sU+axzWv5nLGcsSBeKW2YYyTpfPo1hV8YPOmIGnfA4fZHyKVtWqBQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-plugin-n": "17.10.3", + "eslint-plugin-react": "7.37.3", + "eslint-plugin-security": "1.4.0" + }, + "engines": { + "node": ">=18.0.0" + }, + "peerDependencies": { + "eslint": "^9" + } + }, + "node_modules/@microsoft/eslint-plugin-sdl/node_modules/eslint-plugin-security": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-security/-/eslint-plugin-security-1.4.0.tgz", + "integrity": "sha512-xlS7P2PLMXeqfhyf3NpqbvbnW04kN8M9NtmhpR3XGyOvt/vNKS7XPXT5EDbwKW9vCjWH4PpfQvgD/+JgN0VJKA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "safe-regex": "^1.1.0" + } + }, + "node_modules/@microsoft/eslint-plugin-sdl/node_modules/safe-regex": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", + "integrity": "sha512-aJXcif4xnaNUzvUuC5gcb46oTS7zvg4jpMTnuqtrEPlR3vFr4pxtdTwaF1Qs3Enjn9HK+ZlwQui+a7z0SywIzg==", + "dev": true, + "license": "MIT", + "dependencies": { + "ret": "~0.1.10" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@pkgr/core": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.1.2.tgz", + "integrity": "sha512-fdDH1LSGfZdTH2sxdpVMw31BanV28K/Gry0cVFxaNP77neJSkd82mM8ErPNYs9e+0O7SdHBLTDzDgwUuy18RnQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts" + } + }, + "node_modules/@rtsao/scc": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz", + "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/codemirror": { + "version": "5.60.8", + "resolved": "https://registry.npmjs.org/@types/codemirror/-/codemirror-5.60.8.tgz", + "integrity": "sha512-VjFgDF/eB+Aklcy15TtOTLQeMjTo07k7KAjql8OK5Dirr7a6sJY4T1uVBDuTVG9VEmn1uUsohOpYnVfgC6/jyw==", + "license": "MIT", + "dependencies": { + "@types/tern": "*" + } + }, + "node_modules/@types/eslint": { + "version": "8.56.2", + "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.56.2.tgz", + "integrity": "sha512-uQDwm1wFHmbBbCZCqAlq6Do9LYwByNZHWzXppSnay9SuwJ+VRbjkbLABer54kcPnMSlG6Fdiy2yaFXm/z9Z5gw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "*", + "@types/json-schema": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "license": "MIT" + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/json5": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "20.19.39", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.39.tgz", + "integrity": "sha512-orrrD74MBUyK8jOAD/r0+lfa1I2MO6I+vAkmAWzMYbCcgrN4lCrmK52gRFQq/JRxfYPfonkr4b0jcY7Olqdqbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@types/node/node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/tern": { + "version": "0.23.9", + "resolved": "https://registry.npmjs.org/@types/tern/-/tern-0.23.9.tgz", + "integrity": "sha512-ypzHFE/wBzh+BlH6rrBgS5I/Z7RD21pGhZ2rltb/+ZrVM1awdZwjx7hE5XfuYgHWk9uvV5HLZN3SloevCAp3Bw==", + "license": "MIT", + "dependencies": { + "@types/estree": "*" + } + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.35.1.tgz", + "integrity": "sha512-9XNTlo7P7RJxbVeICaIIIEipqxLKguyh+3UbXuT2XQuFp6d8VOeDEGuz5IiX0dgZo8CiI6aOFLg4e8cF71SFVg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/regexpp": "^4.10.0", + "@typescript-eslint/scope-manager": "8.35.1", + "@typescript-eslint/type-utils": "8.35.1", + "@typescript-eslint/utils": "8.35.1", + "@typescript-eslint/visitor-keys": "8.35.1", + "graphemer": "^1.4.0", + "ignore": "^7.0.0", + "natural-compare": "^1.4.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^8.35.1", + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": { + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz", + "integrity": "sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.35.1.tgz", + "integrity": "sha512-3MyiDfrfLeK06bi/g9DqJxP5pV74LNv4rFTyvGDmT3x2p1yp1lOd+qYZfiRPIOf/oON+WRZR5wxxuF85qOar+w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/scope-manager": "8.35.1", + "@typescript-eslint/types": "8.35.1", + "@typescript-eslint/typescript-estree": "8.35.1", + "@typescript-eslint/visitor-keys": "8.35.1", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/project-service": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.35.1.tgz", + "integrity": "sha512-VYxn/5LOpVxADAuP3NrnxxHYfzVtQzLKeldIhDhzC8UHaiQvYlXvKuVho1qLduFbJjjy5U5bkGwa3rUGUb1Q6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/tsconfig-utils": "^8.35.1", + "@typescript-eslint/types": "^8.35.1", + "debug": "^4.3.4" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.35.1.tgz", + "integrity": "sha512-s/Bpd4i7ht2934nG+UoSPlYXd08KYz3bmjLEb7Ye1UVob0d1ENiT3lY8bsCmik4RqfSbPw9xJJHbugpPpP5JUg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.35.1", + "@typescript-eslint/visitor-keys": "8.35.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/tsconfig-utils": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.35.1.tgz", + "integrity": "sha512-K5/U9VmT9dTHoNowWZpz+/TObS3xqC5h0xAIjXPw+MNcKV9qg6eSatEnmeAwkjHijhACH0/N7bkhKvbt1+DXWQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/type-utils": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.35.1.tgz", + "integrity": "sha512-HOrUBlfVRz5W2LIKpXzZoy6VTZzMu2n8q9C2V/cFngIC5U1nStJgv0tMV4sZPzdf4wQm9/ToWUFPMN9Vq9VJQQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/typescript-estree": "8.35.1", + "@typescript-eslint/utils": "8.35.1", + "debug": "^4.3.4", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/types": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.35.1.tgz", + "integrity": "sha512-q/O04vVnKHfrrhNAscndAn1tuQhIkwqnaW+eu5waD5IPts2eX1dgJxgqcPx5BX109/qAz7IG6VrEPTOYKCNfRQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.35.1.tgz", + "integrity": "sha512-Vvpuvj4tBxIka7cPs6Y1uvM7gJgdF5Uu9F+mBJBPY4MhvjrjWGK4H0lVgLJd/8PWZ23FTqsaJaLEkBCFUk8Y9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/project-service": "8.35.1", + "@typescript-eslint/tsconfig-utils": "8.35.1", + "@typescript-eslint/types": "8.35.1", + "@typescript-eslint/visitor-keys": "8.35.1", + "debug": "^4.3.4", + "fast-glob": "^3.3.2", + "is-glob": "^4.0.3", + "minimatch": "^9.0.4", + "semver": "^7.6.0", + "ts-api-utils": "^2.1.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/utils": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.35.1.tgz", + "integrity": "sha512-lhnwatFmOFcazAsUm3ZnZFpXSxiwoa1Lj50HphnDe1Et01NF4+hrdXONSUHIcbVu2eFb1bAf+5yjXkGVkXBKAQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.7.0", + "@typescript-eslint/scope-manager": "8.35.1", + "@typescript-eslint/types": "8.35.1", + "@typescript-eslint/typescript-estree": "8.35.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.35.1.tgz", + "integrity": "sha512-VRwixir4zBWCSTP/ljEo091lbpypz57PoeAQ9imjG+vbeof9LplljsL1mos4ccG6H9IjfrVGM359RozUnuFhpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/types": "8.35.1", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true, + "license": "Python-2.0" + }, + "node_modules/array-buffer-byte-length": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz", + "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "is-array-buffer": "^3.0.5" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array-includes": { + "version": "3.1.9", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.9.tgz", + "integrity": "sha512-FmeCCAenzH0KH381SPT5FZmiA/TmpndpcaShhfgEN9eCVjnFBqq3l1xrI42y8+PPLI6hypzou4GXw00WHmPBLQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.24.0", + "es-object-atoms": "^1.1.1", + "get-intrinsic": "^1.3.0", + "is-string": "^1.1.1", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.findlast": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlast/-/array.prototype.findlast-1.2.5.tgz", + "integrity": "sha512-CVvd6FHg1Z3POpBLxO6E6zr+rSKEQ9L6rZHAaY7lLfhKsWYUBBOuMs0e9o24oopj6H+geRCX0YJ+TJLBK2eHyQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.findlastindex": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.6.tgz", + "integrity": "sha512-F/TKATkzseUExPlfvmwQKGITM3DGTK+vkAsCZoDc5daVygbJBnjEUCbgkAvVFsgfXfX4YIqZ/27G3k3tdXrTxQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.9", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "es-shim-unscopables": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flat": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.3.tgz", + "integrity": "sha512-rwG/ja1neyLqCuGZ5YYrznA62D4mZXg0i1cIskIUKSiqF3Cje9/wXAls9B9s1Wa2fomMsIv8czB8jZcPmxCXFg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flatmap": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.3.tgz", + "integrity": "sha512-Y7Wt51eKJSyi80hFrJCePGGNo5ktJCslFuboqJsbf57CCPcm5zztluPlc4/aD8sWsKvlwatezpV4U1efk8kpjg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.tosorted": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.4.tgz", + "integrity": "sha512-p6Fx8B7b7ZhL/gmUsAy0D15WhvDccw3mnGNbZpi3pmeJdxtWsj2jEaI4Y6oo3XiHfzuSgPwKc04MYt6KgvC/wA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.3", + "es-errors": "^1.3.0", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/arraybuffer.prototype.slice": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz", + "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/async-function": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz", + "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/call-bind": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.0", + "es-define-property": "^1.0.0", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz", + "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, + "license": "MIT" + }, + "node_modules/crelt": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz", + "integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==", + "license": "MIT", + "peer": true + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/data-view-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz", + "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-length": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz", + "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/inspect-js" + } + }, + "node_modules/data-view-byte-offset": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz", + "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/empathic": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/empathic/-/empathic-2.0.0.tgz", + "integrity": "sha512-i6UzDscO/XfAcNYD75CfICkmfLedpyPDdozrLMmQc5ORaQcdMoc21OnlEylMIqI7U8eniKrPMxxtj8k0vhmJhA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + } + }, + "node_modules/enhanced-resolve": { + "version": "5.18.3", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz", + "integrity": "sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww==", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/es-abstract": { + "version": "1.24.0", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz", + "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-buffer-byte-length": "^1.0.2", + "arraybuffer.prototype.slice": "^1.0.4", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "data-view-buffer": "^1.0.2", + "data-view-byte-length": "^1.0.2", + "data-view-byte-offset": "^1.0.1", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "es-set-tostringtag": "^2.1.0", + "es-to-primitive": "^1.3.0", + "function.prototype.name": "^1.1.8", + "get-intrinsic": "^1.3.0", + "get-proto": "^1.0.1", + "get-symbol-description": "^1.1.0", + "globalthis": "^1.0.4", + "gopd": "^1.2.0", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "internal-slot": "^1.1.0", + "is-array-buffer": "^3.0.5", + "is-callable": "^1.2.7", + "is-data-view": "^1.0.2", + "is-negative-zero": "^2.0.3", + "is-regex": "^1.2.1", + "is-set": "^2.0.3", + "is-shared-array-buffer": "^1.0.4", + "is-string": "^1.1.1", + "is-typed-array": "^1.1.15", + "is-weakref": "^1.1.1", + "math-intrinsics": "^1.1.0", + "object-inspect": "^1.13.4", + "object-keys": "^1.1.1", + "object.assign": "^4.1.7", + "own-keys": "^1.0.1", + "regexp.prototype.flags": "^1.5.4", + "safe-array-concat": "^1.1.3", + "safe-push-apply": "^1.0.0", + "safe-regex-test": "^1.1.0", + "set-proto": "^1.0.0", + "stop-iteration-iterator": "^1.1.0", + "string.prototype.trim": "^1.2.10", + "string.prototype.trimend": "^1.0.9", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.3", + "typed-array-byte-length": "^1.0.3", + "typed-array-byte-offset": "^1.0.4", + "typed-array-length": "^1.0.7", + "unbox-primitive": "^1.1.0", + "which-typed-array": "^1.1.19" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-iterator-helpers": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.2.1.tgz", + "integrity": "sha512-uDn+FE1yrDzyC0pCo961B2IHbdM8y/ACZsKD4dG6WqrjV53BADjwa7D+1aom2rsNVfLyDgU/eigvlJGJ08OQ4w==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.6", + "es-errors": "^1.3.0", + "es-set-tostringtag": "^2.0.3", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.6", + "globalthis": "^1.0.4", + "gopd": "^1.2.0", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.2.0", + "has-symbols": "^1.1.0", + "internal-slot": "^1.1.0", + "iterator.prototype": "^1.1.4", + "safe-array-concat": "^1.1.3" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-shim-unscopables": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.1.0.tgz", + "integrity": "sha512-d9T8ucsEhh8Bi1woXCf+TIKDIROLG5WCkxg8geBCbvk22kzwC5G2OnXVMO6FUsvQlgUUXQ2itephWDLqDzbeCw==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-to-primitive": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz", + "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-callable": "^1.2.7", + "is-date-object": "^1.0.5", + "is-symbol": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/esbuild": { + "version": "0.25.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.5.tgz", + "integrity": "sha512-P8OtKZRv/5J5hhz0cUAdu/cLuPIKXpQl1R9pZtvmHWQvrAUVd0UNIPT4IB4W3rNOqVO0rlqHmCIbSwxh/c9yUQ==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.5", + "@esbuild/android-arm": "0.25.5", + "@esbuild/android-arm64": "0.25.5", + "@esbuild/android-x64": "0.25.5", + "@esbuild/darwin-arm64": "0.25.5", + "@esbuild/darwin-x64": "0.25.5", + "@esbuild/freebsd-arm64": "0.25.5", + "@esbuild/freebsd-x64": "0.25.5", + "@esbuild/linux-arm": "0.25.5", + "@esbuild/linux-arm64": "0.25.5", + "@esbuild/linux-ia32": "0.25.5", + "@esbuild/linux-loong64": "0.25.5", + "@esbuild/linux-mips64el": "0.25.5", + "@esbuild/linux-ppc64": "0.25.5", + "@esbuild/linux-riscv64": "0.25.5", + "@esbuild/linux-s390x": "0.25.5", + "@esbuild/linux-x64": "0.25.5", + "@esbuild/netbsd-arm64": "0.25.5", + "@esbuild/netbsd-x64": "0.25.5", + "@esbuild/openbsd-arm64": "0.25.5", + "@esbuild/openbsd-x64": "0.25.5", + "@esbuild/sunos-x64": "0.25.5", + "@esbuild/win32-arm64": "0.25.5", + "@esbuild/win32-ia32": "0.25.5", + "@esbuild/win32-x64": "0.25.5" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "9.39.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.1.tgz", + "integrity": "sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.8.0", + "@eslint-community/regexpp": "^4.12.1", + "@eslint/config-array": "^0.21.1", + "@eslint/config-helpers": "^0.4.2", + "@eslint/core": "^0.17.0", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.39.1", + "@eslint/plugin-kit": "^0.4.1", + "@humanfs/node": "^0.16.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.4.2", + "@types/estree": "^1.0.6", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.6", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } + } + }, + "node_modules/eslint-compat-utils": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/eslint-compat-utils/-/eslint-compat-utils-0.5.1.tgz", + "integrity": "sha512-3z3vFexKIEnjHE3zCMRo6fn/e44U7T1khUjg+Hp0ZQMCigh28rALD0nPFBcGZuiLC5rLZa2ubQHDRln09JfU2Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "eslint": ">=6.0.0" + } + }, + "node_modules/eslint-compat-utils/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint-import-resolver-node": { + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", + "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^3.2.7", + "is-core-module": "^2.13.0", + "resolve": "^1.22.4" + } + }, + "node_modules/eslint-import-resolver-node/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-module-utils": { + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.12.1.tgz", + "integrity": "sha512-L8jSWTze7K2mTg0vos/RuLRS5soomksDPoJLXIslC7c8Wmut3bx7CPpJijDcBZtxQ5lrbUdM+s0OlNbz0DCDNw==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^3.2.7" + }, + "engines": { + "node": ">=4" + }, + "peerDependenciesMeta": { + "eslint": { + "optional": true + } + } + }, + "node_modules/eslint-module-utils/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-plugin-depend": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-depend/-/eslint-plugin-depend-1.3.1.tgz", + "integrity": "sha512-1uo2rFAr9vzNrCYdp7IBZRB54LiyVxfaIso0R6/QV3t6Dax6DTbW/EV2Hktf0f4UtmGHK8UyzJWI382pwW04jw==", + "dev": true, + "license": "MIT", + "dependencies": { + "empathic": "^2.0.0", + "module-replacements": "^2.8.0", + "semver": "^7.6.3" + } + }, + "node_modules/eslint-plugin-depend/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint-plugin-es-x": { + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es-x/-/eslint-plugin-es-x-7.8.0.tgz", + "integrity": "sha512-7Ds8+wAAoV3T+LAKeu39Y5BzXCrGKrcISfgKEqTS4BDN8SFEDQd0S43jiQ8vIa3wUKD07qitZdfzlenSi8/0qQ==", + "dev": true, + "funding": [ + "https://github.com/sponsors/ota-meshi", + "https://opencollective.com/eslint" + ], + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.1.2", + "@eslint-community/regexpp": "^4.11.0", + "eslint-compat-utils": "^0.5.1" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": ">=8" + } + }, + "node_modules/eslint-plugin-import": { + "version": "2.32.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.32.0.tgz", + "integrity": "sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@rtsao/scc": "^1.1.0", + "array-includes": "^3.1.9", + "array.prototype.findlastindex": "^1.2.6", + "array.prototype.flat": "^1.3.3", + "array.prototype.flatmap": "^1.3.3", + "debug": "^3.2.7", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.9", + "eslint-module-utils": "^2.12.1", + "hasown": "^2.0.2", + "is-core-module": "^2.16.1", + "is-glob": "^4.0.3", + "minimatch": "^3.1.2", + "object.fromentries": "^2.0.8", + "object.groupby": "^1.0.3", + "object.values": "^1.2.1", + "semver": "^6.3.1", + "string.prototype.trimend": "^1.0.9", + "tsconfig-paths": "^3.15.0" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9" + } + }, + "node_modules/eslint-plugin-import/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-plugin-json-schema-validator": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-json-schema-validator/-/eslint-plugin-json-schema-validator-5.1.0.tgz", + "integrity": "sha512-ZmVyxRIjm58oqe2kTuy90PpmZPrrKvOjRPXKzq8WCgRgAkidCgm5X8domL2KSfadZ3QFAmifMgGTcVNhZ5ez2g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.3.0", + "ajv": "^8.0.0", + "debug": "^4.3.1", + "eslint-compat-utils": "^0.5.0", + "json-schema-migrate": "^2.0.0", + "jsonc-eslint-parser": "^2.0.0", + "minimatch": "^8.0.0", + "synckit": "^0.9.0", + "toml-eslint-parser": "^0.9.0", + "tunnel-agent": "^0.6.0", + "yaml-eslint-parser": "^1.0.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ota-meshi" + }, + "peerDependencies": { + "eslint": ">=6.0.0" + } + }, + "node_modules/eslint-plugin-json-schema-validator/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/eslint-plugin-json-schema-validator/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/eslint-plugin-json-schema-validator/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, + "node_modules/eslint-plugin-json-schema-validator/node_modules/minimatch": { + "version": "8.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-8.0.4.tgz", + "integrity": "sha512-W0Wvr9HyFXZRGIDgCicunpQ299OKXs9RgZfaukz4qAW/pJhcpUfupc9c+OObPOFueNy8VSrZgEmDtk6Kh4WzDA==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/eslint-plugin-n": { + "version": "17.10.3", + "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-17.10.3.tgz", + "integrity": "sha512-ySZBfKe49nQZWR1yFaA0v/GsH6Fgp8ah6XV0WDz6CN8WO0ek4McMzb7A2xnf4DCYV43frjCygvb9f/wx7UUxRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.4.0", + "enhanced-resolve": "^5.17.0", + "eslint-plugin-es-x": "^7.5.0", + "get-tsconfig": "^4.7.0", + "globals": "^15.8.0", + "ignore": "^5.2.4", + "minimatch": "^9.0.5", + "semver": "^7.5.3" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": ">=8.23.0" + } + }, + "node_modules/eslint-plugin-n/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/eslint-plugin-n/node_modules/globals": { + "version": "15.15.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-15.15.0.tgz", + "integrity": "sha512-7ACyT3wmyp3I61S4fG682L0VA2RGD9otkqGJIwNUMF1SWUombIIk+af1unuDYgMm082aHYwD+mzJvv9Iu8dsgg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint-plugin-n/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/eslint-plugin-n/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint-plugin-obsidianmd": { + "version": "0.1.9", + "resolved": "https://registry.npmjs.org/eslint-plugin-obsidianmd/-/eslint-plugin-obsidianmd-0.1.9.tgz", + "integrity": "sha512-/gyo5vky3Y7re4BtT/8MQbHU5Wes4o6VRqas3YmXE7aTCnMsdV0kfzV1GDXJN9Hrsc9UQPoeKUMiapKL0aGE4g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@microsoft/eslint-plugin-sdl": "^1.1.0", + "@types/eslint": "8.56.2", + "@types/node": "20.12.12", + "eslint": ">=9.0.0 <10.0.0", + "eslint-plugin-depend": "1.3.1", + "eslint-plugin-import": "^2.31.0", + "eslint-plugin-json-schema-validator": "5.1.0", + "eslint-plugin-security": "2.1.1", + "globals": "14.0.0", + "obsidian": "1.8.7", + "typescript": "5.4.5" + }, + "bin": { + "eslint-plugin-obsidian": "dist/lib/index.js" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@eslint/js": "^9.30.1", + "@eslint/json": "0.14.0", + "eslint": ">=9.0.0 <10.0.0", + "obsidian": "1.8.7", + "typescript-eslint": "^8.35.1" + } + }, + "node_modules/eslint-plugin-obsidianmd/node_modules/@types/node": { + "version": "20.12.12", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.12.tgz", + "integrity": "sha512-eWLDGF/FOSPtAvEqeRAQ4C8LSA7M1I7i0ky1I8U7kD1J5ITyW3AsRhQrKVoWf5pFKZ2kILsEGJhsI9r93PYnOw==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/eslint-plugin-obsidianmd/node_modules/obsidian": { + "version": "1.8.7", + "resolved": "https://registry.npmjs.org/obsidian/-/obsidian-1.8.7.tgz", + "integrity": "sha512-h4bWwNFAGRXlMlMAzdEiIM2ppTGlrh7uGOJS6w4gClrsjc+ei/3YAtU2VdFUlCiPuTHpY4aBpFJJW75S1Tl/JA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/codemirror": "5.60.8", + "moment": "2.29.4" + }, + "peerDependencies": { + "@codemirror/state": "^6.0.0", + "@codemirror/view": "^6.0.0" + } + }, + "node_modules/eslint-plugin-obsidianmd/node_modules/typescript": { + "version": "5.4.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz", + "integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/eslint-plugin-react": { + "version": "7.37.3", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.37.3.tgz", + "integrity": "sha512-DomWuTQPFYZwF/7c9W2fkKkStqZmBd3uugfqBYLdkZ3Hii23WzZuOLUskGxB8qkSKqftxEeGL1TB2kMhrce0jA==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-includes": "^3.1.8", + "array.prototype.findlast": "^1.2.5", + "array.prototype.flatmap": "^1.3.3", + "array.prototype.tosorted": "^1.1.4", + "doctrine": "^2.1.0", + "es-iterator-helpers": "^1.2.1", + "estraverse": "^5.3.0", + "hasown": "^2.0.2", + "jsx-ast-utils": "^2.4.1 || ^3.0.0", + "minimatch": "^3.1.2", + "object.entries": "^1.1.8", + "object.fromentries": "^2.0.8", + "object.values": "^1.2.1", + "prop-types": "^15.8.1", + "resolve": "^2.0.0-next.5", + "semver": "^6.3.1", + "string.prototype.matchall": "^4.0.12", + "string.prototype.repeat": "^1.0.0" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7" + } + }, + "node_modules/eslint-plugin-react/node_modules/resolve": { + "version": "2.0.0-next.5", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.5.tgz", + "integrity": "sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/eslint-plugin-security": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-security/-/eslint-plugin-security-2.1.1.tgz", + "integrity": "sha512-7cspIGj7WTfR3EhaILzAPcfCo5R9FbeWvbgsPYWivSurTBKW88VQxtP3c4aWMG9Hz/GfJlJVdXEJ3c8LqS+u2w==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "safe-regex": "^2.1.1" + } + }, + "node_modules/eslint-scope": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/@eslint/js": { + "version": "9.39.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.1.tgz", + "integrity": "sha512-S26Stp4zCy88tH94QbBv3XCuzRQiZ9yXofEILmglYTh/Ug/a9/umqvgFtYBAo3Lp0nsI/5/qH1CCrbdK3AP1Tw==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + } + }, + "node_modules/espree": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.15.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.2.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz", + "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/fastq": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", + "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/file-entry-cache": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^4.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.4" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz", + "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/for-each": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", + "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/function.prototype.name": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz", + "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "functions-have-names": "^1.2.3", + "hasown": "^2.0.2", + "is-callable": "^1.2.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/generator-function": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/generator-function/-/generator-function-2.0.1.tgz", + "integrity": "sha512-SFdFmIJi+ybC0vjlHN0ZGVGHc3lgE0DxPAT0djjVg+kjOnSqclqmj0KQ7ykTOLP6YxoqOvuAODGdcHJn+43q3g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-symbol-description": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz", + "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-tsconfig": { + "version": "4.13.0", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.0.tgz", + "integrity": "sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/globals": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globalthis": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", + "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-properties": "^1.2.1", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true, + "license": "MIT" + }, + "node_modules/has-bigints": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.1.0.tgz", + "integrity": "sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-proto": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz", + "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/internal-slot": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", + "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "hasown": "^2.0.2", + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/is-array-buffer": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz", + "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-async-function": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz", + "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "async-function": "^1.0.0", + "call-bound": "^1.0.3", + "get-proto": "^1.0.1", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-bigint": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz", + "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-bigints": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-boolean-object": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz", + "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-data-view": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz", + "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-date-object": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz", + "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-finalizationregistry": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz", + "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-generator-function": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.2.tgz", + "integrity": "sha512-upqt1SkGkODW9tsGNG5mtXTXtECizwtS2kA161M+gJPc1xdb/Ax629af6YrTwcOeQHbewrPNlE5Dx7kzvXTizA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.4", + "generator-function": "^2.0.0", + "get-proto": "^1.0.1", + "has-tostringtag": "^1.0.2", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-map": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", + "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-negative-zero": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-number-object": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz", + "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-regex": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz", + "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-set": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", + "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-shared-array-buffer": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz", + "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-string": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz", + "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-symbol": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz", + "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "has-symbols": "^1.1.0", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakmap": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", + "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakref": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz", + "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakset": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz", + "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true, + "license": "MIT" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, + "license": "ISC" + }, + "node_modules/iterator.prototype": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.5.tgz", + "integrity": "sha512-H0dkQoCa3b2VEeKQBOxFph+JAbcrQdE7KC0UkqwpLmv2EC4P41QXP+rqo9wYodACiG5/WM5s9oDApTU8utwj9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.6", + "get-proto": "^1.0.0", + "has-symbols": "^1.1.0", + "set-function-name": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/jiti": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", + "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/js-yaml": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", + "dev": true, + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-migrate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/json-schema-migrate/-/json-schema-migrate-2.0.0.tgz", + "integrity": "sha512-r38SVTtojDRp4eD6WsCqiE0eNDt4v1WalBXb9cyZYw9ai5cGtBwzRNWjHzJl38w6TxFkXAIA7h+fyX3tnrAFhQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^8.0.0" + } + }, + "node_modules/json-schema-migrate/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/json-schema-migrate/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/json5": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", + "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } + }, + "node_modules/jsonc-eslint-parser": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/jsonc-eslint-parser/-/jsonc-eslint-parser-2.4.1.tgz", + "integrity": "sha512-uuPNLJkKN8NXAlZlQ6kmUF9qO+T6Kyd7oV4+/7yy8Jz6+MZNyhPq8EdLpdfnPVzUC8qSf1b4j1azKaGnFsjmsw==", + "dev": true, + "license": "MIT", + "dependencies": { + "acorn": "^8.5.0", + "eslint-visitor-keys": "^3.0.0", + "espree": "^9.0.0", + "semver": "^7.3.5" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ota-meshi" + } + }, + "node_modules/jsonc-eslint-parser/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/jsonc-eslint-parser/node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/jsonc-eslint-parser/node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jsx-ast-utils": { + "version": "3.3.5", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz", + "integrity": "sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "array-includes": "^3.1.6", + "array.prototype.flat": "^1.3.1", + "object.assign": "^4.1.4", + "object.values": "^1.1.6" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/module-replacements": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/module-replacements/-/module-replacements-2.10.1.tgz", + "integrity": "sha512-qkKuLpMHDqRSM676OPL7HUpCiiP3NSxgf8NNR1ga2h/iJLNKTsOSjMEwrcT85DMSti2vmOqxknOVBGWj6H6etQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/moment": { + "version": "2.29.4", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz", + "integrity": "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==", + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.assign": { + "version": "4.1.7", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz", + "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0", + "has-symbols": "^1.1.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.entries": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.9.tgz", + "integrity": "sha512-8u/hfXFRBD1O0hPUjioLhoWFHRmt6tKA4/vZPyckBr18l1KE9uHrFaFaUi8MDRTpi4uak2goyPTSNJLXX2k2Hw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.fromentries": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", + "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.groupby": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz", + "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.values": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.1.tgz", + "integrity": "sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/obsidian": { + "version": "1.10.3", + "resolved": "https://registry.npmjs.org/obsidian/-/obsidian-1.10.3.tgz", + "integrity": "sha512-VP+ZSxNMG7y6Z+sU9WqLvJAskCfkFrTz2kFHWmmzis+C+4+ELjk/sazwcTHrHXNZlgCeo8YOlM6SOrAFCynNew==", + "license": "MIT", + "dependencies": { + "@types/codemirror": "5.60.8", + "moment": "2.29.4" + }, + "peerDependencies": { + "@codemirror/state": "6.5.0", + "@codemirror/view": "6.38.6" + } + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/own-keys": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz", + "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "get-intrinsic": "^1.2.6", + "object-keys": "^1.1.1", + "safe-push-apply": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/possible-typed-array-names": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", + "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prop-types": { + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", + "dev": true, + "license": "MIT", + "dependencies": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/reflect.getprototypeof": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", + "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.9", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.7", + "get-proto": "^1.0.1", + "which-builtin-type": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/regexp-tree": { + "version": "0.1.27", + "resolved": "https://registry.npmjs.org/regexp-tree/-/regexp-tree-0.1.27.tgz", + "integrity": "sha512-iETxpjK6YoRWJG5o6hXLwvjYAoW+FEZn9os0PD/b6AP6xQwsa/Y7lCVgIixBbUPMfhu+i2LtdeAqVTgGlQarfA==", + "dev": true, + "license": "MIT", + "bin": { + "regexp-tree": "bin/regexp-tree" + } + }, + "node_modules/regexp.prototype.flags": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz", + "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "set-function-name": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/ret": { + "version": "0.1.15", + "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", + "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-array-concat": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz", + "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "get-intrinsic": "^1.2.6", + "has-symbols": "^1.1.0", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">=0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safe-push-apply": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz", + "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safe-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-2.1.1.tgz", + "integrity": "sha512-rx+x8AMzKb5Q5lQ95Zoi6ZbJqwCLkqi3XuJXp5P3rT8OEc6sZCJG5AE5dU3lsgRr/F4Bs31jSlVN+j5KrsGu9A==", + "dev": true, + "license": "MIT", + "dependencies": { + "regexp-tree": "~0.1.1" + } + }, + "node_modules/safe-regex-test": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz", + "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "is-regex": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-function-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-proto": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz", + "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/stop-iteration-iterator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz", + "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "internal-slot": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/string.prototype.matchall": { + "version": "4.0.12", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.12.tgz", + "integrity": "sha512-6CC9uyBL+/48dYizRf7H7VAYCMCNTBeM78x/VTUe9bFEaxBepPJDa1Ow99LqI/1yF7kuy7Q3cQsYMrcjGUcskA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.6", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.6", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "internal-slot": "^1.1.0", + "regexp.prototype.flags": "^1.5.3", + "set-function-name": "^2.0.2", + "side-channel": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.repeat": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/string.prototype.repeat/-/string.prototype.repeat-1.0.0.tgz", + "integrity": "sha512-0u/TldDbKD8bFCQ/4f5+mNRrXwZ8hg2w7ZR8wa16e8z9XpePWl3eGEcUD0OXpEH/VJH/2G3gjUtR3ZOiBe2S/w==", + "dev": true, + "license": "MIT", + "dependencies": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + } + }, + "node_modules/string.prototype.trim": { + "version": "1.2.10", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz", + "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "define-data-property": "^1.1.4", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.5", + "es-object-atoms": "^1.0.0", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimend": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz", + "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "call-bound": "^1.0.2", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimstart": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/style-mod": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.3.tgz", + "integrity": "sha512-i/n8VsZydrugj3Iuzll8+x/00GH2vnYsk1eomD8QiRrSAeW6ItbCQDtfXCeJHd0iwiNagqjQkvpvREEPtW3IoQ==", + "license": "MIT", + "peer": true + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/synckit": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.9.3.tgz", + "integrity": "sha512-JJoOEKTfL1urb1mDoEblhD9NhEbWmq9jHEMEnxoC4ujUaZ4itA8vKgwkFAyNClgxplLi9tsUKX+EduK0p/l7sg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@pkgr/core": "^0.1.0", + "tslib": "^2.6.2" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/unts" + } + }, + "node_modules/synckit/node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "dev": true, + "license": "0BSD" + }, + "node_modules/tapable": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz", + "integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toml-eslint-parser": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/toml-eslint-parser/-/toml-eslint-parser-0.9.3.tgz", + "integrity": "sha512-moYoCvkNUAPCxSW9jmHmRElhm4tVJpHL8ItC/+uYD0EpPSFXbck7yREz9tNdJVTSpHVod8+HoipcpbQ0oE6gsw==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.0.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ota-meshi" + } + }, + "node_modules/toml-eslint-parser/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/ts-api-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz", + "integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18.12" + }, + "peerDependencies": { + "typescript": ">=4.8.4" + } + }, + "node_modules/tsconfig-paths": { + "version": "3.15.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", + "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/json5": "^0.0.29", + "json5": "^1.0.2", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + } + }, + "node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==", + "dev": true, + "license": "0BSD" + }, + "node_modules/tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/typed-array-buffer": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz", + "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/typed-array-byte-length": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz", + "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.14" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-byte-offset": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz", + "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-proto": "^1.2.0", + "is-typed-array": "^1.1.15", + "reflect.getprototypeof": "^1.0.9" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-length": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz", + "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0", + "reflect.getprototypeof": "^1.0.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typescript": { + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/typescript-eslint": { + "version": "8.35.1", + "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.35.1.tgz", + "integrity": "sha512-xslJjFzhOmHYQzSB/QTeASAHbjmxOGEP6Coh93TXmUBFQoJ1VU35UHIDmG06Jd6taf3wqqC1ntBnCMeymy5Ovw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/eslint-plugin": "8.35.1", + "@typescript-eslint/parser": "8.35.1", + "@typescript-eslint/utils": "8.35.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/unbox-primitive": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz", + "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "has-bigints": "^1.0.2", + "has-symbols": "^1.1.0", + "which-boxed-primitive": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "dev": true, + "license": "MIT" + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/w3c-keyname": { + "version": "2.2.8", + "resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz", + "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==", + "license": "MIT", + "peer": true + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/which-boxed-primitive": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz", + "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-bigint": "^1.1.0", + "is-boolean-object": "^1.2.1", + "is-number-object": "^1.1.1", + "is-string": "^1.1.1", + "is-symbol": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-builtin-type": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz", + "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "function.prototype.name": "^1.1.6", + "has-tostringtag": "^1.0.2", + "is-async-function": "^2.0.0", + "is-date-object": "^1.1.0", + "is-finalizationregistry": "^1.1.0", + "is-generator-function": "^1.0.10", + "is-regex": "^1.2.1", + "is-weakref": "^1.0.2", + "isarray": "^2.0.5", + "which-boxed-primitive": "^1.1.0", + "which-collection": "^1.0.2", + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-collection": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", + "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-map": "^2.0.3", + "is-set": "^2.0.3", + "is-weakmap": "^2.0.2", + "is-weakset": "^2.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-typed-array": { + "version": "1.1.19", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", + "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", + "dev": true, + "license": "MIT", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.4", + "for-each": "^0.3.5", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/yaml": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", + "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==", + "dev": true, + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + } + }, + "node_modules/yaml-eslint-parser": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/yaml-eslint-parser/-/yaml-eslint-parser-1.3.0.tgz", + "integrity": "sha512-E/+VitOorXSLiAqtTd7Yqax0/pAS3xaYMP+AUUJGOK1OZG3rhcj9fcJOM5HJ2VrP1FrStVCWr1muTfQCdj4tAA==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.0.0", + "yaml": "^2.0.0" + }, + "engines": { + "node": "^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ota-meshi" + } + }, + "node_modules/yaml-eslint-parser/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/surfsense_obsidian/package.json b/surfsense_obsidian/package.json new file mode 100644 index 000000000..21504cd5a --- /dev/null +++ b/surfsense_obsidian/package.json @@ -0,0 +1,34 @@ +{ + "name": "surfsense-obsidian", + "version": "0.1.0", + "description": "SurfSense plugin for Obsidian: sync your vault to SurfSense for AI-powered search.", + "main": "main.js", + "type": "module", + "scripts": { + "dev": "node esbuild.config.mjs", + "build": "tsc -noEmit -skipLibCheck && node esbuild.config.mjs production", + "version": "node version-bump.mjs && git add manifest.json versions.json", + "lint": "eslint ." + }, + "keywords": [ + "obsidian", + "surfsense", + "sync", + "search" + ], + "license": "Apache-2.0", + "devDependencies": { + "@eslint/js": "9.30.1", + "@types/node": "^20.19.39", + "esbuild": "0.25.5", + "eslint-plugin-obsidianmd": "0.1.9", + "globals": "14.0.0", + "jiti": "2.6.1", + "tslib": "2.4.0", + "typescript": "^5.8.3", + "typescript-eslint": "8.35.1" + }, + "dependencies": { + "obsidian": "latest" + } +} diff --git a/surfsense_obsidian/src/api-client.ts b/surfsense_obsidian/src/api-client.ts new file mode 100644 index 000000000..37f5ebb65 --- /dev/null +++ b/surfsense_obsidian/src/api-client.ts @@ -0,0 +1,296 @@ +import { requestUrl, type RequestUrlParam, type RequestUrlResponse } from "obsidian"; +import type { + ConnectResponse, + DeleteAck, + HealthResponse, + ManifestResponse, + NotePayload, + RenameAck, + RenameItem, + SearchSpace, + SyncAck, +} from "./types"; + +/** + * SurfSense backend client used by the Obsidian plugin. + * + * Mobile-safety contract (must hold for every transitive import): + * - Use Obsidian `requestUrl` only — no `fetch`, no `axios`, no + * `node:http`, no `node:https`. CORS is bypassed and mobile works. + * - No top-level `node:*` imports anywhere reachable from this file. + * - Hashing happens elsewhere via Web Crypto, not `node:crypto`. + * + * Auth + wire contract: + * - Every request carries `Authorization: Bearer ` only. No + * custom headers — the backend identifies the caller from the JWT + * and feature-detects the API via the `capabilities` array on + * `/health` and `/connect`. + * - 401 surfaces as `AuthError` so the orchestrator can show the + * "token expired, paste a fresh one" UX. + * - HealthResponse / ConnectResponse use index signatures so any + * additive backend field (e.g. new capabilities) parses without + * breaking the decoder. This mirrors `ConfigDict(extra='ignore')` + * on the server side. + */ + +export class AuthError extends Error { + constructor(message: string) { + super(message); + this.name = "AuthError"; + } +} + +export class TransientError extends Error { + readonly status: number; + constructor(status: number, message: string) { + super(message); + this.name = "TransientError"; + this.status = status; + } +} + +export class PermanentError extends Error { + readonly status: number; + constructor(status: number, message: string) { + super(message); + this.name = "PermanentError"; + this.status = status; + } +} + +/** 404 `VAULT_NOT_REGISTERED` — `/connect` hasn't committed yet; retry after reconnect. */ +export class VaultNotRegisteredError extends TransientError { + constructor(message: string) { + super(404, message); + this.name = "VaultNotRegisteredError"; + } +} + +export interface ApiClientOptions { + getServerUrl: () => string; + getToken: () => string; + onAuthError?: () => void; +} + +const AUTH_BLOCK_MS = 60_000; + +export class SurfSenseApiClient { + private readonly opts: ApiClientOptions; + private authBlockedUntil = 0; + + constructor(opts: ApiClientOptions) { + this.opts = opts; + } + + updateOptions(partial: Partial): void { + Object.assign(this.opts, partial); + } + + resetAuthBlock(): void { + this.authBlockedUntil = 0; + } + + async health(): Promise { + return await this.request("GET", "/api/v1/obsidian/health"); + } + + async listSearchSpaces(): Promise { + const resp = await this.request( + "GET", + "/api/v1/searchspaces/" + ); + if (Array.isArray(resp)) return resp; + if (resp && Array.isArray((resp as { items?: SearchSpace[] }).items)) { + return (resp as { items: SearchSpace[] }).items; + } + return []; + } + + async verifyToken(): Promise<{ ok: true; health: HealthResponse }> { + // /health is gated by current_active_user, so a successful response + // transitively proves the token works. Cheaper than fetching a list. + const health = await this.health(); + return { ok: true, health }; + } + + async connect(input: { + searchSpaceId: number; + vaultId: string; + vaultName: string; + vaultFingerprint: string; + }): Promise { + return await this.request( + "POST", + "/api/v1/obsidian/connect", + { + vault_id: input.vaultId, + vault_name: input.vaultName, + search_space_id: input.searchSpaceId, + vault_fingerprint: input.vaultFingerprint, + } + ); + } + + /** POST /sync — `failed[]` are paths whose `status === "error"` for retry. */ + async syncBatch(input: { + vaultId: string; + notes: NotePayload[]; + }): Promise<{ indexed: number; failed: string[] }> { + const resp = await this.request( + "POST", + "/api/v1/obsidian/sync", + { vault_id: input.vaultId, notes: input.notes } + ); + const failed = resp.items + .filter((it) => it.status === "error") + .map((it) => it.path); + return { indexed: resp.indexed, failed }; + } + + /** POST /rename — `"missing"` counts as success; only `"error"` is retried. */ + async renameBatch(input: { + vaultId: string; + renames: Pick[]; + }): Promise<{ + renamed: number; + failed: Array<{ oldPath: string; newPath: string }>; + }> { + const resp = await this.request( + "POST", + "/api/v1/obsidian/rename", + { + vault_id: input.vaultId, + renames: input.renames.map((r) => ({ + old_path: r.oldPath, + new_path: r.newPath, + })), + } + ); + const failed = resp.items + .filter((it) => it.status === "error") + .map((it) => ({ oldPath: it.old_path, newPath: it.new_path })); + return { renamed: resp.renamed, failed }; + } + + /** DELETE /notes — `"missing"` counts as success; only `"error"` is retried. */ + async deleteBatch(input: { + vaultId: string; + paths: string[]; + }): Promise<{ deleted: number; failed: string[] }> { + const resp = await this.request( + "DELETE", + "/api/v1/obsidian/notes", + { vault_id: input.vaultId, paths: input.paths } + ); + const failed = resp.items + .filter((it) => it.status === "error") + .map((it) => it.path); + return { deleted: resp.deleted, failed }; + } + + async getManifest(vaultId: string): Promise { + return await this.request( + "GET", + `/api/v1/obsidian/manifest?vault_id=${encodeURIComponent(vaultId)}` + ); + } + + private async request( + method: RequestUrlParam["method"], + path: string, + body?: unknown + ): Promise { + const baseUrl = this.opts.getServerUrl().replace(/\/+$/, ""); + const token = this.opts.getToken(); + if (!token) { + throw new AuthError("Missing API token. Open plugin settings to paste one."); + } + if (Date.now() < this.authBlockedUntil) { + throw new AuthError("Token rejected. Paste a fresh one in settings."); + } + const headers: Record = { + Authorization: `Bearer ${token}`, + Accept: "application/json", + }; + if (body !== undefined) headers["Content-Type"] = "application/json"; + + let resp: RequestUrlResponse; + try { + resp = await requestUrl({ + url: `${baseUrl}${path}`, + method, + headers, + body: body === undefined ? undefined : JSON.stringify(body), + throw: false, + }); + } catch (err) { + throw new TransientError(0, `Network error: ${(err as Error).message}`); + } + + if (resp.status >= 200 && resp.status < 300) { + return parseJson(resp); + } + + const detail = extractDetail(resp); + + if (resp.status === 401) { + this.authBlockedUntil = Date.now() + AUTH_BLOCK_MS; + this.opts.onAuthError?.(); + throw new AuthError(detail || "Unauthorized"); + } + + if (resp.status >= 500 || resp.status === 429) { + throw new TransientError(resp.status, detail || `HTTP ${resp.status}`); + } + + if (resp.status === 404 && extractCode(resp) === "VAULT_NOT_REGISTERED") { + throw new VaultNotRegisteredError(detail || "Vault not registered yet"); + } + + throw new PermanentError(resp.status, detail || `HTTP ${resp.status}`); + } +} + +function parseJson(resp: RequestUrlResponse): T { + // Plugin endpoints always return JSON; non-JSON 2xx is usually a + // captive portal or CDN page — surface as transient so we back off. + const text = resp.text ?? ""; + try { + return JSON.parse(text) as T; + } catch { + throw new TransientError( + resp.status, + `Invalid JSON from server (got: ${text.slice(0, 80)})` + ); + } +} + +function safeJson(resp: RequestUrlResponse): Record { + try { + return resp.text ? (JSON.parse(resp.text) as Record) : {}; + } catch { + return {}; + } +} + +function extractDetail(resp: RequestUrlResponse): string { + const json = safeJson(resp); + if (typeof json.detail === "string") return json.detail; + if (typeof json.message === "string") return json.message; + const detailObj = json.detail; + if (detailObj && typeof detailObj === "object") { + const obj = detailObj as Record; + if (typeof obj.message === "string") return obj.message; + } + return resp.text?.slice(0, 200) ?? ""; +} + +function extractCode(resp: RequestUrlResponse): string | undefined { + const json = safeJson(resp); + const detailObj = json.detail; + if (detailObj && typeof detailObj === "object") { + const code = (detailObj as Record).code; + if (typeof code === "string") return code; + } + return undefined; +} diff --git a/surfsense_obsidian/src/attachments-confirm-modal.ts b/surfsense_obsidian/src/attachments-confirm-modal.ts new file mode 100644 index 000000000..1a79fd2bd --- /dev/null +++ b/surfsense_obsidian/src/attachments-confirm-modal.ts @@ -0,0 +1,61 @@ +import { type App, Modal, Setting } from "obsidian"; + +/** + * Confirmation modal shown before enabling attachment sync. + * Attachment files can be large and increase sync latency/cost. + */ +export class AttachmentsConfirmModal extends Modal { + private resolver: ((confirmed: boolean) => void) | null = null; + + constructor(app: App) { + super(app); + } + + onOpen(): void { + this.setTitle("Enable attachment sync?"); + this.contentEl.empty(); + + new Setting(this.contentEl).setDesc( + "Syncing attachments (images & PDFs) can make indexing slower, especially on large vaults." + ); + new Setting(this.contentEl).setDesc( + "Syncing attachments can make indexing slower on large vaults. You can disable this anytime.", + ); + + new Setting(this.contentEl) + .addButton((btn) => + btn + .setButtonText("Cancel") + .onClick(() => this.resolveAndClose(false)), + ) + .addButton((btn) => + btn + .setButtonText("Enable") + .setCta() + .onClick(() => this.resolveAndClose(true)), + ); + } + + onClose(): void { + this.contentEl.empty(); + if (this.resolver) { + this.resolver(false); + this.resolver = null; + } + } + + waitForConfirmation(): Promise { + this.open(); + return new Promise((resolve) => { + this.resolver = resolve; + }); + } + + private resolveAndClose(confirmed: boolean): void { + if (this.resolver) { + this.resolver(confirmed); + this.resolver = null; + } + this.close(); + } +} diff --git a/surfsense_obsidian/src/excludes.ts b/surfsense_obsidian/src/excludes.ts new file mode 100644 index 000000000..1f47170b1 --- /dev/null +++ b/surfsense_obsidian/src/excludes.ts @@ -0,0 +1,94 @@ +/** + * Tiny glob matcher for exclude patterns. + * + * Supports `*` (any chars except `/`), `**` (any chars including `/`), and + * literal segments. Patterns without a slash are matched against any path + * segment (so `templates` excludes `templates/foo.md` and `notes/templates/x.md`). + * + * Intentionally not a full minimatch — Obsidian users overwhelmingly type + * folder names ("templates", ".trash") and the obvious wildcards. Avoiding + * the dependency keeps the bundle small and the mobile attack surface tiny. + */ + +const cache = new Map(); + +function compile(pattern: string): RegExp { + const cached = cache.get(pattern); + if (cached) return cached; + + let body = ""; + let i = 0; + while (i < pattern.length) { + const ch = pattern[i] ?? ""; + if (ch === "*") { + if (pattern[i + 1] === "*") { + body += ".*"; + i += 2; + if (pattern[i] === "/") i += 1; + continue; + } + body += "[^/]*"; + i += 1; + continue; + } + if (".+^${}()|[]\\".includes(ch)) { + body += "\\" + ch; + i += 1; + continue; + } + body += ch; + i += 1; + } + + const anchored = pattern.includes("/") + ? `^${body}(/.*)?$` + : `(^|/)${body}(/.*)?$`; + const re = new RegExp(anchored); + cache.set(pattern, re); + return re; +} + +export function isExcluded(path: string, patterns: string[]): boolean { + if (!patterns.length) return false; + for (const raw of patterns) { + const trimmed = raw.trim(); + if (!trimmed || trimmed.startsWith("#")) continue; + if (compile(trimmed).test(path)) return true; + } + return false; +} + +export function parseExcludePatterns(raw: string): string[] { + return raw + .split(/\r?\n/) + .map((line) => line.trim()) + .filter((line) => line.length > 0 && !line.startsWith("#")); +} + +/** Normalize a folder path: strip leading/trailing slashes; "" or "/" means vault root. */ +export function normalizeFolder(folder: string): string { + return folder.replace(/^\/+|\/+$/g, ""); +} + +/** True if `path` lives inside `folder` (or `folder` is the vault root). */ +export function isInFolder(path: string, folder: string): boolean { + const f = normalizeFolder(folder); + if (f === "") return true; + return path === f || path.startsWith(`${f}/`); +} + +/** Exclude wins over include. Empty includeFolders means "include everything". */ +export function isFolderFiltered( + path: string, + includeFolders: string[], + excludeFolders: string[], +): boolean { + for (const f of excludeFolders) { + if (isInFolder(path, f)) return true; + } + if (includeFolders.length === 0) return false; + for (const f of includeFolders) { + if (isInFolder(path, f)) return false; + } + return true; +} diff --git a/surfsense_obsidian/src/folder-suggest-modal.ts b/surfsense_obsidian/src/folder-suggest-modal.ts new file mode 100644 index 000000000..a037a620f --- /dev/null +++ b/surfsense_obsidian/src/folder-suggest-modal.ts @@ -0,0 +1,32 @@ +import { type App, FuzzySuggestModal, type TFolder } from "obsidian"; + +/** Folder picker built on Obsidian's stock {@link FuzzySuggestModal}. */ +export class FolderSuggestModal extends FuzzySuggestModal { + private readonly onPick: (path: string) => void; + private readonly excluded: Set; + + constructor(app: App, onPick: (path: string) => void, excluded: string[] = []) { + super(app); + this.onPick = onPick; + this.excluded = new Set(excluded.map((p) => p.replace(/^\/+|\/+$/g, ""))); + this.setPlaceholder("Type to filter folders…"); + } + + getItems(): TFolder[] { + return this.app.vault + .getAllFolders(true) + .filter((f) => !this.excluded.has(this.toPath(f))); + } + + getItemText(folder: TFolder): string { + return this.toPath(folder) || "/"; + } + + onChooseItem(folder: TFolder): void { + this.onPick(this.toPath(folder)); + } + + private toPath(folder: TFolder): string { + return folder.isRoot() ? "" : folder.path; + } +} diff --git a/surfsense_obsidian/src/main.ts b/surfsense_obsidian/src/main.ts new file mode 100644 index 000000000..1dea47b95 --- /dev/null +++ b/surfsense_obsidian/src/main.ts @@ -0,0 +1,292 @@ +import { Notice, Platform, Plugin } from "obsidian"; +import { SurfSenseApiClient } from "./api-client"; +import { PersistentQueue } from "./queue"; +import { SurfSenseSettingTab } from "./settings"; +import { StatusBar } from "./status-bar"; +import { StatusModal } from "./status-modal"; +import { SyncEngine } from "./sync-engine"; +import { + DEFAULT_SETTINGS, + type QueueItem, + type StatusState, + type SurfsensePluginSettings, +} from "./types"; +import { generateVaultUuid } from "./vault-identity"; + +/** SurfSense plugin entry point. */ +export default class SurfSensePlugin extends Plugin { + settings!: SurfsensePluginSettings; + api!: SurfSenseApiClient; + queue!: PersistentQueue; + engine!: SyncEngine; + private statusBar: StatusBar | null = null; + lastStatus: StatusState = { kind: "needs-setup", queueDepth: 0 }; + serverCapabilities: string[] = []; + private settingTab: SurfSenseSettingTab | null = null; + private statusListeners = new Set<() => void>(); + private reconcileTimerId: number | null = null; + private lastAuthToastAt = 0; + + async onload() { + await this.loadSettings(); + this.seedIdentity(); + await this.saveSettings(); + + this.api = new SurfSenseApiClient({ + getServerUrl: () => this.settings.serverUrl, + getToken: () => this.settings.apiToken, + onAuthError: () => this.notifyAuthError(), + }); + + this.queue = new PersistentQueue(this.settings.queue ?? [], { + persist: async (items) => { + this.settings.queue = items; + await this.saveData(this.settings); + }, + }); + + this.engine = new SyncEngine({ + app: this.app, + apiClient: this.api, + queue: this.queue, + getSettings: () => this.settings, + saveSettings: async (mut) => { + mut(this.settings); + await this.saveSettings(); + this.notifyStatusChange(); + }, + setStatus: (s) => { + this.lastStatus = s; + this.statusBar?.update(s); + this.notifyStatusChange(); + }, + onCapabilities: (caps) => { + this.serverCapabilities = [...caps]; + this.notifyStatusChange(); + }, + onReconcileBackoffChanged: () => { + this.restartReconcileTimer(); + }, + }); + + this.queue.setFlushHandler(() => { + if (!this.shouldAutoSync()) return; + void this.engine.flushQueue(); + }); + + this.settingTab = new SurfSenseSettingTab(this.app, this); + this.addSettingTab(this.settingTab); + + const statusHost = this.addStatusBarItem(); + this.statusBar = new StatusBar(statusHost, () => this.openStatusModal()); + this.statusBar.update(this.lastStatus); + + this.registerEvent( + this.app.vault.on("create", (file) => this.engine.onCreate(file)), + ); + this.registerEvent( + this.app.vault.on("modify", (file) => this.engine.onModify(file)), + ); + this.registerEvent( + this.app.vault.on("delete", (file) => this.engine.onDelete(file)), + ); + this.registerEvent( + this.app.vault.on("rename", (file, oldPath) => + this.engine.onRename(file, oldPath), + ), + ); + this.registerEvent( + this.app.metadataCache.on("changed", (file, data, cache) => + this.engine.onMetadataChanged(file, data, cache), + ), + ); + + this.addCommand({ + id: "resync-vault", + name: "Re-sync entire vault", + callback: async () => { + try { + await this.engine.maybeReconcile(true); + new Notice("Surfsense: re-sync started."); + } catch (err) { + new Notice(`Surfsense: re-sync failed — ${(err as Error).message}`); + } + }, + }); + + this.addCommand({ + id: "sync-current-note", + name: "Sync current note", + checkCallback: (checking) => { + const file = this.app.workspace.getActiveFile(); + if (!file || file.extension.toLowerCase() !== "md") return false; + if (checking) return true; + this.queue.enqueueUpsert(file.path); + void this.engine.flushQueue(); + return true; + }, + }); + + this.addCommand({ + id: "open-status", + name: "Open sync status", + callback: () => this.openStatusModal(), + }); + + this.addCommand({ + id: "open-settings", + name: "Open settings", + callback: () => { + // `app.setting` isn't in the d.ts; fall back silently if it moves. + type SettingHost = { + open?: () => void; + openTabById?: (id: string) => void; + }; + const setting = (this.app as unknown as { setting?: SettingHost }).setting; + if (setting?.open) setting.open(); + if (setting?.openTabById) setting.openTabById(this.manifest.id); + }, + }); + + const onNetChange = () => { + void this.engine.recoverConnectivityStatus(); + if (this.shouldAutoSync()) void this.engine.flushQueue(); + }; + this.registerDomEvent(window, "online", onNetChange); + const conn = (navigator as unknown as { connection?: NetworkConnection }).connection; + if (conn && typeof conn.addEventListener === "function") { + conn.addEventListener("change", onNetChange); + this.register(() => conn.removeEventListener?.("change", onNetChange)); + } + + // Wait for layout so the metadataCache is warm before reconcile. + this.app.workspace.onLayoutReady(() => { + void this.engine.start(); + this.restartReconcileTimer(); + }); + } + + onunload() { + this.queue?.cancelFlush(); + this.queue?.requestStop(); + } + + /** + * Obsidian fires this when another device rewrites our data.json. + * If the synced vault_id differs from ours, adopt it and + * re-handshake so the server routes us to the right row. + */ + async onExternalSettingsChange(): Promise { + const previousVaultId = this.settings.vaultId; + const previousConnectorId = this.settings.connectorId; + await this.loadSettings(); + const changed = + this.settings.vaultId !== previousVaultId || + this.settings.connectorId !== previousConnectorId; + if (!changed) return; + this.engine?.refreshStatus(); + this.notifyStatusChange(); + if (this.settings.searchSpaceId !== null) { + void this.engine.ensureConnected(); + } + } + + get queueDepth(): number { + return this.queue?.size ?? 0; + } + + openStatusModal(): void { + new StatusModal(this.app, this).open(); + } + + restartReconcileTimer(): void { + if (this.reconcileTimerId !== null) { + window.clearInterval(this.reconcileTimerId); + this.reconcileTimerId = null; + } + const minutes = this.settings.syncIntervalMinutes ?? 10; + if (minutes <= 0) return; + const baseMs = minutes * 60 * 1000; + // Idle vaults back off (×2 → ×4 → ×8); resets on the first edit or non-empty reconcile. + const effectiveMs = this.engine?.getReconcileBackoffMs(baseMs) ?? baseMs; + const id = window.setInterval( + () => { + if (!this.shouldAutoSync()) return; + void this.engine.maybeReconcile(); + }, + effectiveMs, + ); + this.reconcileTimerId = id; + this.registerInterval(id); + } + + /** Gate for background network activity; per-edit flush + periodic reconcile both consult this. */ + shouldAutoSync(): boolean { + if (!this.settings.wifiOnly) return true; + if (!Platform.isMobileApp) return true; + // navigator.connection is supported on Android Capacitor; undefined on iOS. + // When unavailable, behave permissively so iOS users aren't blocked outright. + const conn = (navigator as unknown as { connection?: NetworkConnection }).connection; + if (!conn || typeof conn.type !== "string") return true; + return conn.type === "wifi" || conn.type === "ethernet"; + } + + onStatusChange(listener: () => void): void { + this.statusListeners.add(listener); + } + + offStatusChange(listener: () => void): void { + this.statusListeners.delete(listener); + } + + private notifyStatusChange(): void { + for (const fn of this.statusListeners) fn(); + } + + private notifyAuthError(): void { + this.engine?.reportAuthError(); + const now = Date.now(); + if (now - this.lastAuthToastAt < 10_000) return; + this.lastAuthToastAt = now; + new Notice("Surfsense: API token expired or invalid. Paste a fresh token in settings.", 8000); + } + + async loadSettings() { + const data = (await this.loadData()) as Partial | null; + this.settings = { + ...DEFAULT_SETTINGS, + ...(data ?? {}), + queue: (data?.queue ?? []).map((i: QueueItem) => ({ ...i })), + tombstones: { ...(data?.tombstones ?? {}) }, + includeFolders: [...(data?.includeFolders ?? [])], + excludeFolders: [...(data?.excludeFolders ?? [])], + excludePatterns: data?.excludePatterns?.length + ? [...data.excludePatterns] + : [...DEFAULT_SETTINGS.excludePatterns], + }; + } + + async saveSettings() { + await this.saveData(this.settings); + this.engine?.refreshStatus(); + } + + /** + * Mint a tentative vault_id locally on first run. The server's + * fingerprint dedup (see /obsidian/connect) may overwrite it on the + * first /connect when another device of the same vault has already + * registered; we always trust the server's response. + */ + private seedIdentity(): void { + if (!this.settings.vaultId) { + this.settings.vaultId = generateVaultUuid(); + } + } +} + +/** Subset of the Network Information API used to detect WiFi vs cellular on Android. */ +interface NetworkConnection { + type?: string; + addEventListener?: (event: string, handler: () => void) => void; + removeEventListener?: (event: string, handler: () => void) => void; +} diff --git a/surfsense_obsidian/src/payload.ts b/surfsense_obsidian/src/payload.ts new file mode 100644 index 000000000..3294d62df --- /dev/null +++ b/surfsense_obsidian/src/payload.ts @@ -0,0 +1,163 @@ +import { + type App, + type CachedMetadata, + type FrontMatterCache, + type HeadingCache, + type ReferenceCache, + type TFile, +} from "obsidian"; +import type { HeadingRef, NotePayload } from "./types"; + +/** + * Build a NotePayload from an Obsidian TFile. + * + * Mobile-safety contract: + * - No top-level `node:fs` / `node:path` / `node:crypto` imports. + * File IO uses `vault.cachedRead` (works on the mobile WASM adapter). + * Hashing uses Web Crypto `subtle.digest`. + * - Caller MUST first wait for `metadataCache.changed` before calling + * this for a `.md` file, otherwise `frontmatter`/`tags`/`headings` + * can lag the actual file contents. + */ +export async function buildNotePayload( + app: App, + file: TFile, + vaultId: string, +): Promise { + const content = await app.vault.cachedRead(file); + const cache: CachedMetadata | null = app.metadataCache.getFileCache(file); + + const frontmatter = normalizeFrontmatter(cache?.frontmatter); + const tags = collectTags(cache); + const headings = collectHeadings(cache?.headings ?? []); + const aliases = collectAliases(frontmatter); + const { embeds, internalLinks } = collectLinks(cache); + const { resolved, unresolved } = resolveLinkTargets( + app, + file.path, + internalLinks, + ); + const contentHash = await computeContentHash(content); + + return { + vault_id: vaultId, + path: file.path, + name: file.basename, + extension: file.extension, + content, + frontmatter, + tags, + headings, + resolved_links: resolved, + unresolved_links: unresolved, + embeds, + aliases, + content_hash: contentHash, + size: file.stat.size, + mtime: file.stat.mtime, + ctime: file.stat.ctime, + }; +} + +export async function computeContentHash(content: string): Promise { + const bytes = new TextEncoder().encode(content); + const digest = await crypto.subtle.digest("SHA-256", bytes); + return bufferToHex(digest); +} + +function bufferToHex(buf: ArrayBuffer): string { + const view = new Uint8Array(buf); + let hex = ""; + for (let i = 0; i < view.length; i++) { + hex += (view[i] ?? 0).toString(16).padStart(2, "0"); + } + return hex; +} + +function normalizeFrontmatter( + fm: FrontMatterCache | undefined, +): Record { + if (!fm) return {}; + // FrontMatterCache extends a plain object; strip the `position` key + // the cache adds so the wire payload stays clean. + const rest: Record = { ...(fm as Record) }; + delete rest.position; + return rest; +} + +function collectTags(cache: CachedMetadata | null): string[] { + const out = new Set(); + for (const t of cache?.tags ?? []) { + const tag = t.tag.startsWith("#") ? t.tag.slice(1) : t.tag; + if (tag) out.add(tag); + } + const fmTags: unknown = + cache?.frontmatter?.tags ?? cache?.frontmatter?.tag; + if (Array.isArray(fmTags)) { + for (const t of fmTags) { + if (typeof t === "string" && t) out.add(t.replace(/^#/, "")); + } + } else if (typeof fmTags === "string" && fmTags) { + for (const t of fmTags.split(/[\s,]+/)) { + if (t) out.add(t.replace(/^#/, "")); + } + } + return [...out]; +} + +function collectHeadings(items: HeadingCache[]): HeadingRef[] { + return items.map((h) => ({ heading: h.heading, level: h.level })); +} + +function collectAliases(frontmatter: Record): string[] { + const raw = frontmatter.aliases ?? frontmatter.alias; + if (Array.isArray(raw)) { + return raw.filter((x): x is string => typeof x === "string" && x.length > 0); + } + if (typeof raw === "string" && raw) return [raw]; + return []; +} + +function collectLinks(cache: CachedMetadata | null): { + embeds: string[]; + internalLinks: ReferenceCache[]; +} { + const linkRefs: ReferenceCache[] = [ + ...((cache?.links) ?? []), + ...((cache?.embeds as ReferenceCache[] | undefined) ?? []), + ]; + const embeds = ((cache?.embeds as ReferenceCache[] | undefined) ?? []).map( + (e) => e.link, + ); + return { embeds, internalLinks: linkRefs }; +} + +function resolveLinkTargets( + app: App, + sourcePath: string, + links: ReferenceCache[], +): { resolved: string[]; unresolved: string[] } { + const resolved = new Set(); + const unresolved = new Set(); + for (const link of links) { + const target = app.metadataCache.getFirstLinkpathDest( + stripSubpath(link.link), + sourcePath, + ); + if (target) { + resolved.add(target.path); + } else { + unresolved.add(link.link); + } + } + return { resolved: [...resolved], unresolved: [...unresolved] }; +} + +function stripSubpath(link: string): string { + const hashIdx = link.indexOf("#"); + const pipeIdx = link.indexOf("|"); + let end = link.length; + if (hashIdx !== -1) end = Math.min(end, hashIdx); + if (pipeIdx !== -1) end = Math.min(end, pipeIdx); + return link.slice(0, end); +} diff --git a/surfsense_obsidian/src/queue.ts b/surfsense_obsidian/src/queue.ts new file mode 100644 index 000000000..0f7082456 --- /dev/null +++ b/surfsense_obsidian/src/queue.ts @@ -0,0 +1,228 @@ +import { type Debouncer, debounce } from "obsidian"; +import type { QueueItem } from "./types"; + +/** + * Persistent upload queue. + * + * Mobile-safety contract: + * - Persistence is delegated to a save callback (which the plugin wires + * to `plugin.saveData()`); never `node:fs`. Items also live in the + * plugin's settings JSON so a crash mid-flight loses nothing. + * - No top-level `node:*` imports. + * + * Behavioural contract: + * - Per-file debounce: enqueueing the same path coalesces, the latest + * `enqueuedAt` wins so we don't ship a stale snapshot. + * - `delete` for a path drops any pending `upsert` for that path + * (otherwise we'd resurrect a note the user just deleted). + * - `rename` is a first-class op so the backend can update + * `unique_identifier_hash` instead of "delete + create" (which would + * blow away document versions, citations, and the document_id used + * in chat history). + * - Drain takes a worker, returns once the worker either succeeds for + * every batch or hits a stop signal (transient error, mid-drain + * stop request). + */ + +export interface QueueWorker { + processBatch(batch: QueueItem[]): Promise; +} + +export interface BatchResult { + /** Items that succeeded; they will be ack'd off the queue. */ + acked: QueueItem[]; + /** Items that should be retried; their `attempt` is bumped. */ + retry: QueueItem[]; + /** Items that failed permanently (4xx). They get dropped. */ + dropped: QueueItem[]; + /** If true, the drain loop stops (e.g. transient/network error). */ + stop: boolean; + /** Optional retry-after for transient errors (ms). */ + backoffMs?: number; +} + +export interface PersistentQueueOptions { + debounceMs?: number; + batchSize?: number; + maxAttempts?: number; + persist: (items: QueueItem[]) => Promise | void; + now?: () => number; +} + +const DEFAULTS = { + debounceMs: 2000, + batchSize: 15, + maxAttempts: 8, +}; + +export class PersistentQueue { + private items: QueueItem[]; + private readonly opts: Required< + Omit + > & { + persist: PersistentQueueOptions["persist"]; + now: () => number; + }; + private draining = false; + private stopRequested = false; + private debouncedFlush: Debouncer<[], void> | null = null; + + constructor(initial: QueueItem[], opts: PersistentQueueOptions) { + this.items = [...initial]; + this.opts = { + debounceMs: opts.debounceMs ?? DEFAULTS.debounceMs, + batchSize: opts.batchSize ?? DEFAULTS.batchSize, + maxAttempts: opts.maxAttempts ?? DEFAULTS.maxAttempts, + persist: opts.persist, + now: opts.now ?? (() => Date.now()), + }; + } + + get size(): number { + return this.items.length; + } + + snapshot(): QueueItem[] { + return this.items.map((i) => ({ ...i })); + } + + setFlushHandler(handler: () => void): void { + // resetTimer: true → each enqueue postpones the flush. + this.debouncedFlush = debounce(handler, this.opts.debounceMs, true); + } + + enqueueUpsert(path: string): void { + const now = this.opts.now(); + this.items = this.items.filter( + (i) => !(i.op === "upsert" && i.path === path), + ); + this.items.push({ op: "upsert", path, enqueuedAt: now, attempt: 0 }); + void this.persist(); + this.scheduleFlush(); + } + + enqueueDelete(path: string): void { + const now = this.opts.now(); + // A delete supersedes any pending upsert for the same path. + this.items = this.items.filter( + (i) => + !( + (i.op === "upsert" && i.path === path) || + (i.op === "delete" && i.path === path) + ), + ); + this.items.push({ op: "delete", path, enqueuedAt: now, attempt: 0 }); + void this.persist(); + this.scheduleFlush(); + } + + enqueueRename(oldPath: string, newPath: string): void { + const now = this.opts.now(); + this.items = this.items.filter( + (i) => + !( + (i.op === "upsert" && (i.path === oldPath || i.path === newPath)) || + (i.op === "rename" && i.oldPath === oldPath && i.newPath === newPath) + ), + ); + this.items.push({ + op: "rename", + oldPath, + newPath, + enqueuedAt: now, + attempt: 0, + }); + // Pair with an upsert — content may have changed alongside the rename. + this.items.push({ op: "upsert", path: newPath, enqueuedAt: now, attempt: 0 }); + void this.persist(); + this.scheduleFlush(); + } + + requestStop(): void { + this.stopRequested = true; + } + + cancelFlush(): void { + this.debouncedFlush?.cancel(); + } + + private scheduleFlush(): void { + this.debouncedFlush?.(); + } + + async drain(worker: QueueWorker): Promise { + if (this.draining) return { batches: 0, acked: 0, dropped: 0, stopped: false }; + this.draining = true; + this.stopRequested = false; + const summary: DrainSummary = { + batches: 0, + acked: 0, + dropped: 0, + stopped: false, + }; + try { + while (this.items.length > 0 && !this.stopRequested) { + const batch = this.takeBatch(); + summary.batches += 1; + + const result = await worker.processBatch(batch); + summary.acked += result.acked.length; + summary.dropped += result.dropped.length; + + const ackKeys = new Set(result.acked.map(itemKey)); + const dropKeys = new Set(result.dropped.map(itemKey)); + const retryKeys = new Set(result.retry.map(itemKey)); + + // Items the worker didn't classify get retried — never silently dropped. + const unhandled = batch.filter( + (b) => + !ackKeys.has(itemKey(b)) && + !dropKeys.has(itemKey(b)) && + !retryKeys.has(itemKey(b)), + ); + const retry = [...result.retry, ...unhandled].map((i) => ({ + ...i, + attempt: i.attempt + 1, + })); + const survivors = retry.filter((i) => i.attempt <= this.opts.maxAttempts); + summary.dropped += retry.length - survivors.length; + + this.items = [...survivors, ...this.items]; + await this.persist(); + + if (result.stop) { + summary.stopped = true; + if (result.backoffMs) summary.backoffMs = result.backoffMs; + break; + } + } + if (this.stopRequested) summary.stopped = true; + return summary; + } finally { + this.draining = false; + } + } + + private takeBatch(): QueueItem[] { + const head = this.items.slice(0, this.opts.batchSize); + this.items = this.items.slice(this.opts.batchSize); + return head; + } + + private async persist(): Promise { + await this.opts.persist(this.snapshot()); + } +} + +export interface DrainSummary { + batches: number; + acked: number; + dropped: number; + stopped: boolean; + backoffMs?: number; +} + +export function itemKey(i: QueueItem): string { + if (i.op === "rename") return `rename:${i.oldPath}=>${i.newPath}`; + return `${i.op}:${i.path}`; +} diff --git a/surfsense_obsidian/src/settings.ts b/surfsense_obsidian/src/settings.ts new file mode 100644 index 000000000..6a01f2fd1 --- /dev/null +++ b/surfsense_obsidian/src/settings.ts @@ -0,0 +1,389 @@ +import { + type App, + type ButtonComponent, + Notice, + Platform, + PluginSettingTab, + Setting, + setIcon, +} from "obsidian"; +import { AuthError } from "./api-client"; +import { AttachmentsConfirmModal } from "./attachments-confirm-modal"; +import { normalizeFolder, parseExcludePatterns } from "./excludes"; +import { FolderSuggestModal } from "./folder-suggest-modal"; +import type SurfSensePlugin from "./main"; +import { STATUS_VISUALS } from "./status-visuals"; +import type { SearchSpace } from "./types"; + +/** Plugin settings tab. */ + +export class SurfSenseSettingTab extends PluginSettingTab { + private readonly plugin: SurfSensePlugin; + private searchSpaces: SearchSpace[] = []; + private loadingSpaces = false; + private connectionIndicator: HTMLElement | null = null; + private readonly onStatusChange = (): void => this.updateConnectionIndicator(); + + constructor(app: App, plugin: SurfSensePlugin) { + super(app, plugin); + this.plugin = plugin; + } + + display(): void { + const { containerEl } = this; + containerEl.empty(); + this.plugin.onStatusChange(this.onStatusChange); + + const settings = this.plugin.settings; + + this.renderConnectionHeading(containerEl); + + new Setting(containerEl) + .setName("Server URL") + .setDesc( + "https://surfsense.com for SurfSense Cloud, or your self-hosted URL.", + ) + .addText((text) => + text + .setPlaceholder("https://surfsense.com") + .setValue(settings.serverUrl) + .onChange(async (value) => { + const next = value.trim(); + const previous = this.plugin.settings.serverUrl; + if (previous !== "" && next !== previous) { + this.plugin.settings.searchSpaceId = null; + this.plugin.settings.connectorId = null; + } + this.plugin.settings.serverUrl = next; + await this.plugin.saveSettings(); + }), + ); + + let verifyButton: ButtonComponent | null = null; + const updateVerifyDisabled = (): void => { + verifyButton?.setDisabled(this.plugin.settings.apiToken.trim().length === 0); + }; + + new Setting(containerEl) + .setName("API token") + .setDesc( + "Paste your Surfsense API token (expires after 24 hours; re-paste when you see an auth error).", + ) + .addText((text) => { + text.inputEl.type = "password"; + text.inputEl.autocomplete = "off"; + text.inputEl.spellcheck = false; + text + .setPlaceholder("Paste token") + .setValue(settings.apiToken) + .onChange(async (value) => { + const next = value.trim(); + const previous = this.plugin.settings.apiToken; + if (previous !== "" && next !== previous) { + this.plugin.settings.searchSpaceId = null; + this.plugin.settings.connectorId = null; + } + this.plugin.settings.apiToken = next; + updateVerifyDisabled(); + await this.plugin.saveSettings(); + this.plugin.api.resetAuthBlock(); + }); + }) + .addButton((btn) => { + verifyButton = btn; + updateVerifyDisabled(); + btn.setButtonText("Verify").setCta().onClick(async () => { + if (this.plugin.settings.apiToken.trim().length === 0) { + new Notice("Surfsense: paste an API token before verifying."); + return; + } + btn.setDisabled(true); + try { + await this.plugin.api.verifyToken(); + new Notice("Surfsense: token verified."); + this.plugin.engine.refreshStatus({ force: true }); + await this.refreshSearchSpaces(); + this.display(); + } catch (err) { + this.handleApiError(err); + } finally { + updateVerifyDisabled(); + } + }); + }); + + new Setting(containerEl) + .setName("Search space") + .setDesc( + "Which Surfsense search space this vault syncs into. Reload after changing your token.", + ) + .addDropdown((drop) => { + drop.addOption("", this.loadingSpaces ? "Loading…" : "Select a search space"); + for (const space of this.searchSpaces) { + drop.addOption(String(space.id), space.name); + } + if (settings.searchSpaceId !== null) { + drop.setValue(String(settings.searchSpaceId)); + } + drop.onChange(async (value) => { + this.plugin.settings.searchSpaceId = value ? Number(value) : null; + this.plugin.settings.connectorId = null; + await this.plugin.saveSettings(); + if (this.plugin.settings.searchSpaceId !== null) { + try { + await this.plugin.engine.ensureConnected(); + await this.plugin.engine.maybeReconcile(true); + new Notice("Surfsense: vault connected."); + this.display(); + } catch (err) { + this.handleApiError(err); + } + } + }); + }) + .addExtraButton((btn) => + btn + .setIcon("refresh-ccw") + .setTooltip("Reload search spaces") + .onClick(async () => { + await this.refreshSearchSpaces(); + this.display(); + }), + ); + + new Setting(containerEl).setName("Vault").setHeading(); + + new Setting(containerEl) + .setName("Sync interval") + .setDesc( + "How often to check for changes made outside Obsidian.", + ) + .addDropdown((drop) => { + const options: Array<[number, string]> = [ + [0, "Off"], + [5, "5 minutes"], + [10, "10 minutes"], + [15, "15 minutes"], + [30, "30 minutes"], + [60, "60 minutes"], + [120, "2 hours"], + [360, "6 hours"], + [720, "12 hours"], + [1440, "24 hours"], + ]; + for (const [value, label] of options) { + drop.addOption(String(value), label); + } + drop.setValue(String(settings.syncIntervalMinutes)); + drop.onChange(async (value) => { + this.plugin.settings.syncIntervalMinutes = Number(value); + await this.plugin.saveSettings(); + this.plugin.restartReconcileTimer(); + }); + }); + + this.renderFolderList( + containerEl, + "Include folders", + "Folders to sync (leave empty to sync entire vault).", + settings.includeFolders, + (next) => { + this.plugin.settings.includeFolders = next; + }, + ); + + this.renderFolderList( + containerEl, + "Exclude folders", + "Folders to exclude from sync (takes precedence over includes).", + settings.excludeFolders, + (next) => { + this.plugin.settings.excludeFolders = next; + }, + ); + + new Setting(containerEl) + .setName("Advanced exclude patterns") + .setDesc( + "Glob fallback for power users. One pattern per line, supports * and **. Lines starting with # are comments. Applied on top of the folder lists above.", + ) + .addTextArea((area) => { + area.inputEl.rows = 4; + area + .setPlaceholder(".trash\n_attachments\ntemplates/**") + .setValue(settings.excludePatterns.join("\n")) + .onChange(async (value) => { + this.plugin.settings.excludePatterns = parseExcludePatterns(value); + await this.plugin.saveSettings(); + }); + }); + + new Setting(containerEl) + .setName("Include attachments") + .setDesc( + "Also sync non-Markdown files such as images and PDFs. Other file types are skipped.", + ) + .addToggle((toggle) => + toggle + .setValue(settings.includeAttachments) + .onChange(async (value) => { + const isEnabling = + value && !this.plugin.settings.includeAttachments; + if (isEnabling) { + const confirmed = await new AttachmentsConfirmModal( + this.app, + ).waitForConfirmation(); + if (!confirmed) { + this.display(); + return; + } + } + this.plugin.settings.includeAttachments = value; + await this.plugin.saveSettings(); + }), + ); + + if (Platform.isAndroidApp) { + new Setting(containerEl) + .setName("Sync only on WiFi") + .setDesc("Pause automatic syncing on cellular.") + .addToggle((toggle) => + toggle + .setValue(settings.wifiOnly) + .onChange(async (value) => { + this.plugin.settings.wifiOnly = value; + await this.plugin.saveSettings(); + }), + ); + } + + new Setting(containerEl) + .setName("Force sync") + .setDesc("Manually re-index the entire vault now.") + .addButton((btn) => + btn.setButtonText("Update").onClick(async () => { + btn.setDisabled(true); + try { + await this.plugin.engine.maybeReconcile(true); + new Notice("Surfsense: re-sync requested."); + } catch (err) { + this.handleApiError(err); + } finally { + btn.setDisabled(false); + } + }), + ); + + new Setting(containerEl) + .addButton((btn) => + btn + .setButtonText("View sync status") + .setCta() + .onClick(() => this.plugin.openStatusModal()), + ) + .addButton((btn) => + btn.setButtonText("Open releases").onClick(() => { + window.open( + "https://github.com/MODSetter/SurfSense/releases?q=obsidian", + "_blank", + ); + }), + ); + } + + hide(): void { + this.plugin.offStatusChange(this.onStatusChange); + this.connectionIndicator = null; + } + + private renderConnectionHeading(containerEl: HTMLElement): void { + const heading = new Setting(containerEl).setName("Connection").setHeading(); + heading.nameEl.addClass("surfsense-connection-heading"); + this.connectionIndicator = heading.nameEl.createSpan({ + cls: "surfsense-connection-indicator", + }); + this.updateConnectionIndicator(); + } + + private updateConnectionIndicator(): void { + const indicator = this.connectionIndicator; + if (!indicator) return; + const visual = STATUS_VISUALS[this.plugin.lastStatus.kind]; + indicator.empty(); + indicator.removeClass("surfsense-connection-indicator--err"); + if (visual.isError) { + indicator.addClass("surfsense-connection-indicator--err"); + } + setIcon(indicator, visual.icon); + indicator.setAttr("aria-label", visual.label); + indicator.setAttr("title", visual.label); + } + + private async refreshSearchSpaces(): Promise { + this.loadingSpaces = true; + try { + this.searchSpaces = await this.plugin.api.listSearchSpaces(); + } catch (err) { + this.handleApiError(err); + this.searchSpaces = []; + } finally { + this.loadingSpaces = false; + } + } + + private renderFolderList( + containerEl: HTMLElement, + title: string, + desc: string, + current: string[], + write: (next: string[]) => void, + ): void { + const setting = new Setting(containerEl).setName(title).setDesc(desc); + + const persist = async (next: string[]): Promise => { + const dedup = Array.from(new Set(next.map(normalizeFolder))); + write(dedup); + await this.plugin.saveSettings(); + this.display(); + }; + + setting.addButton((btn) => + btn + .setButtonText("Add folder") + .setCta() + .onClick(() => { + new FolderSuggestModal( + this.app, + (picked) => { + void persist([...current, picked]); + }, + current, + ).open(); + }), + ); + + for (const folder of current) { + new Setting(containerEl).setName(folder || "/").addExtraButton((btn) => + btn + .setIcon("cross") + .setTooltip("Remove") + .onClick(() => { + void persist(current.filter((f) => f !== folder)); + }), + ); + } + } + + private handleApiError(err: unknown): void { + if (err instanceof AuthError) { + if (err.message.startsWith("Missing API token")) { + new Notice("Surfsense: paste an API token before verifying."); + } + return; + } + this.plugin.engine.reportError(err); + new Notice( + `SurfSense: request failed — ${(err as Error).message ?? "unknown error"}`, + ); + } +} diff --git a/surfsense_obsidian/src/status-bar.ts b/surfsense_obsidian/src/status-bar.ts new file mode 100644 index 000000000..30abea50c --- /dev/null +++ b/surfsense_obsidian/src/status-bar.ts @@ -0,0 +1,46 @@ +import { setIcon } from "obsidian"; +import { STATUS_VISUALS } from "./status-visuals"; +import type { StatusState } from "./types"; + +/** + * Tiny status-bar adornment. + * + * Plain DOM (no HTML strings, no CSS-in-JS) so it stays cheap on mobile + * and Obsidian's lint doesn't complain about innerHTML. + */ + +export class StatusBar { + private readonly el: HTMLElement; + private readonly icon: HTMLElement; + private readonly text: HTMLElement; + + constructor(host: HTMLElement, onClick?: () => void) { + this.el = host; + this.el.addClass("surfsense-status"); + this.icon = this.el.createSpan({ cls: "surfsense-status__icon" }); + this.text = this.el.createSpan({ cls: "surfsense-status__text" }); + if (onClick) { + this.el.addClass("surfsense-status--clickable"); + this.el.addEventListener("click", onClick); + } + this.update({ kind: "idle", queueDepth: 0 }); + } + + update(state: StatusState): void { + const visual = STATUS_VISUALS[state.kind]; + this.el.removeClass("surfsense-status--err"); + if (visual.isError) this.el.addClass("surfsense-status--err"); + setIcon(this.icon, visual.icon); + + let label = `SurfSense: ${visual.label}`; + if (state.queueDepth > 0 && state.kind !== "idle") { + label += ` (${state.queueDepth})`; + } + this.text.setText(label); + this.el.setAttr( + "aria-label", + state.detail ? `${label} — ${state.detail}` : label, + ); + this.el.setAttr("title", state.detail ?? label); + } +} diff --git a/surfsense_obsidian/src/status-modal.ts b/surfsense_obsidian/src/status-modal.ts new file mode 100644 index 000000000..e05b3a5bc --- /dev/null +++ b/surfsense_obsidian/src/status-modal.ts @@ -0,0 +1,77 @@ +import { type App, Modal, Notice, Setting } from "obsidian"; +import type SurfSensePlugin from "./main"; +import { STATUS_VISUALS } from "./status-visuals"; + +/** Live status panel reachable from the status bar / command palette. */ +export class StatusModal extends Modal { + private readonly plugin: SurfSensePlugin; + private readonly onChange = (): void => this.render(); + + constructor(app: App, plugin: SurfSensePlugin) { + super(app); + this.plugin = plugin; + } + + onOpen(): void { + this.setTitle("Surfsense status"); + this.plugin.onStatusChange(this.onChange); + this.render(); + } + + onClose(): void { + this.plugin.offStatusChange(this.onChange); + this.contentEl.empty(); + } + + private render(): void { + const { contentEl, plugin } = this; + contentEl.empty(); + const s = plugin.settings; + + const rows: Array<[string, string]> = [ + ["Status", STATUS_VISUALS[plugin.lastStatus.kind].label], + [ + "Last sync", + s.lastSyncAt ? new Date(s.lastSyncAt).toLocaleString() : "—", + ], + [ + "Last reconcile", + s.lastReconcileAt + ? new Date(s.lastReconcileAt).toLocaleString() + : "—", + ], + ["Files synced", String(s.filesSynced ?? 0)], + ["Queue depth", String(plugin.queueDepth)], + [ + "Capabilities", + plugin.serverCapabilities.length + ? plugin.serverCapabilities.join(", ") + : "(not yet handshaken)", + ], + ]; + for (const [label, value] of rows) { + new Setting(contentEl).setName(label).setDesc(value); + } + + new Setting(contentEl) + .addButton((btn) => + btn + .setButtonText("Re-sync entire vault") + .setCta() + .onClick(async () => { + btn.setDisabled(true); + try { + await plugin.engine.maybeReconcile(true); + new Notice("Surfsense: re-sync requested."); + } catch (err) { + new Notice( + `Surfsense: re-sync failed — ${(err as Error).message}`, + ); + } finally { + btn.setDisabled(false); + } + }), + ) + .addButton((btn) => btn.setButtonText("Close").onClick(() => this.close())); + } +} diff --git a/surfsense_obsidian/src/status-visuals.ts b/surfsense_obsidian/src/status-visuals.ts new file mode 100644 index 000000000..96a3c8f34 --- /dev/null +++ b/surfsense_obsidian/src/status-visuals.ts @@ -0,0 +1,18 @@ +import type { StatusKind } from "./types"; + +/** Shared by the status bar and the settings "Connection" heading. */ +export interface StatusVisual { + icon: string; + label: string; + isError: boolean; +} + +export const STATUS_VISUALS: Record = { + idle: { icon: "check-circle", label: "Synced", isError: false }, + syncing: { icon: "refresh-ccw", label: "Syncing", isError: false }, + queued: { icon: "clock", label: "Queued", isError: false }, + "needs-setup": { icon: "cloud-off", label: "Setup required", isError: false }, + offline: { icon: "wifi-off", label: "Offline", isError: false }, + "auth-error": { icon: "alert-circle", label: "Reauthenticate", isError: true }, + error: { icon: "alert-circle", label: "Error", isError: true }, +}; diff --git a/surfsense_obsidian/src/sync-engine.ts b/surfsense_obsidian/src/sync-engine.ts new file mode 100644 index 000000000..80594dd9e --- /dev/null +++ b/surfsense_obsidian/src/sync-engine.ts @@ -0,0 +1,751 @@ +import { + type App, + type CachedMetadata, + type Debouncer, + Notice, + type TAbstractFile, + TFile, + debounce, +} from "obsidian"; +import { + AuthError, + PermanentError, + type SurfSenseApiClient, + TransientError, + VaultNotRegisteredError, +} from "./api-client"; +import { isExcluded, isFolderFiltered } from "./excludes"; +import { buildNotePayload } from "./payload"; +import { type BatchResult, PersistentQueue } from "./queue"; +import type { + HealthResponse, + ManifestEntry, + NotePayload, + QueueItem, + StatusKind, + StatusState, +} from "./types"; +import { computeVaultFingerprint } from "./vault-identity"; + +/** + * Reconciles vault state with the server. + * Start order: connect (or /health) → drain queue → reconcile → subscribe events. + */ + +export interface SyncEngineDeps { + app: App; + apiClient: SurfSenseApiClient; + queue: PersistentQueue; + getSettings: () => SyncEngineSettings; + saveSettings: (mut: (s: SyncEngineSettings) => void) => Promise; + setStatus: (s: StatusState) => void; + onCapabilities: (caps: string[]) => void; + /** Fired when the adaptive backoff multiplier may have changed; main.ts uses it to reschedule. */ + onReconcileBackoffChanged?: () => void; +} + +export interface SyncEngineSettings { + vaultId: string; + apiToken: string; + connectorId: number | null; + searchSpaceId: number | null; + includeFolders: string[]; + excludeFolders: string[]; + excludePatterns: string[]; + includeAttachments: boolean; + lastReconcileAt: number | null; + lastSyncAt: number | null; + filesSynced: number; + tombstones: Record; +} + +export const RECONCILE_MIN_INTERVAL_MS = 5 * 60 * 1000; +const TOMBSTONE_TTL_MS = 24 * 60 * 60 * 1000; // 1 day +const PENDING_DEBOUNCE_MS = 1500; + +export class SyncEngine { + private readonly deps: SyncEngineDeps; + private capabilities: string[] = []; + private pendingMdEdits = new Map>(); + /** Consecutive reconciles that found no work; powers the adaptive interval. */ + private idleReconcileStreak = 0; + /** 2^streak is capped at this value (e.g. 8 → max ×8 backoff). */ + private readonly maxBackoffMultiplier = 8; + private lastAppliedKind: StatusKind = "needs-setup"; + + constructor(deps: SyncEngineDeps) { + this.deps = deps; + } + + /** Returns the next-tick interval given the user's base, scaled by the idle streak. */ + getReconcileBackoffMs(baseMs: number): number { + const multiplier = Math.min(2 ** this.idleReconcileStreak, this.maxBackoffMultiplier); + return baseMs * multiplier; + } + + getCapabilities(): readonly string[] { + return this.capabilities; + } + + supports(capability: string): boolean { + return this.capabilities.includes(capability); + } + + /** Run the onload sequence described in this file's docstring. */ + async start(): Promise { + this.setStatus("syncing", "Connecting to SurfSense…"); + + const settings = this.deps.getSettings(); + if (!settings.searchSpaceId) { + // No target yet — /health still surfaces auth/network errors. + try { + const health = await this.deps.apiClient.health(); + this.applyHealth(health); + } catch (err) { + this.handleStartupError(err); + return; + } + this.setStatus("idle"); + return; + } + + // Re-announce so the backend sees the latest vault_name + last_connect_at. + // flushQueue gates on connectorId, so a failed connect leaves the queue intact. + await this.ensureConnected(); + + await this.flushQueue(); + await this.maybeReconcile(); + this.setStatus(this.queueStatusKind(), undefined); + } + + /** + * (Re)register the vault. Adopts server's `vault_id` in case fingerprint + * dedup routed us to an existing row from another device. + */ + async ensureConnected(): Promise { + const settings = this.deps.getSettings(); + if (!settings.searchSpaceId) { + this.setStatus("idle"); + return false; + } + this.setStatus("syncing", "Connecting to SurfSense"); + try { + const fingerprint = await computeVaultFingerprint(this.deps.app); + const resp = await this.deps.apiClient.connect({ + searchSpaceId: settings.searchSpaceId, + vaultId: settings.vaultId, + vaultName: this.deps.app.vault.getName(), + vaultFingerprint: fingerprint, + }); + this.applyHealth(resp); + await this.deps.saveSettings((s) => { + s.vaultId = resp.vault_id; + s.connectorId = resp.connector_id; + }); + this.setStatus(this.queueStatusKind(), this.statusDetail()); + return true; + } catch (err) { + this.handleStartupError(err); + return false; + } + } + + applyHealth(h: HealthResponse): void { + this.capabilities = Array.isArray(h.capabilities) ? [...h.capabilities] : []; + this.deps.onCapabilities(this.capabilities); + } + + // ---- vault event handlers -------------------------------------------- + + onCreate(file: TAbstractFile): void { + if (!this.shouldTrack(file)) return; + const settings = this.deps.getSettings(); + if (this.isExcluded(file.path, settings)) return; + this.resetIdleStreak(); + if (this.isMarkdown(file)) { + this.scheduleMdUpsert(file.path); + return; + } + this.deps.queue.enqueueUpsert(file.path); + } + + onModify(file: TAbstractFile): void { + if (!this.shouldTrack(file)) return; + const settings = this.deps.getSettings(); + if (this.isExcluded(file.path, settings)) return; + this.resetIdleStreak(); + if (this.isMarkdown(file)) { + // Wait for metadataCache.changed so the payload sees fresh metadata. + this.scheduleMdUpsert(file.path); + return; + } + this.deps.queue.enqueueUpsert(file.path); + } + + onDelete(file: TAbstractFile): void { + if (!this.shouldTrack(file)) return; + this.resetIdleStreak(); + this.deps.queue.enqueueDelete(file.path); + void this.deps.saveSettings((s) => { + s.tombstones[file.path] = Date.now(); + }); + } + + onRename(file: TAbstractFile, oldPath: string): void { + if (!this.shouldTrack(file)) return; + this.resetIdleStreak(); + const settings = this.deps.getSettings(); + if (this.isExcluded(file.path, settings)) { + this.deps.queue.enqueueDelete(oldPath); + void this.deps.saveSettings((s) => { + s.tombstones[oldPath] = Date.now(); + }); + return; + } + this.deps.queue.enqueueRename(oldPath, file.path); + } + + onMetadataChanged(file: TFile, _data: string, _cache: CachedMetadata): void { + if (!this.shouldTrack(file)) return; + const settings = this.deps.getSettings(); + if (this.isExcluded(file.path, settings)) return; + if (!this.isMarkdown(file)) return; + // Metadata is fresh now — cancel the deferred upsert and enqueue immediately. + const pending = this.pendingMdEdits.get(file.path); + if (pending) { + pending.cancel(); + this.pendingMdEdits.delete(file.path); + } + this.deps.queue.enqueueUpsert(file.path); + } + + private scheduleMdUpsert(path: string): void { + let pending = this.pendingMdEdits.get(path); + if (!pending) { + // resetTimer: true → each edit pushes the upsert out by another PENDING_DEBOUNCE_MS. + pending = debounce( + () => { + this.pendingMdEdits.delete(path); + this.deps.queue.enqueueUpsert(path); + }, + PENDING_DEBOUNCE_MS, + true, + ); + this.pendingMdEdits.set(path, pending); + } + pending(); + } + + // ---- queue draining --------------------------------------------------- + + async flushQueue(): Promise { + if (this.deps.queue.size === 0) { + await this.recoverStatusIfNeeded(); + return; + } + // Shared gate for every flush trigger so the first /sync can't race /connect. + if (!this.deps.getSettings().connectorId) { + const connected = await this.ensureConnected(); + if (!connected) return; + if (!this.deps.getSettings().connectorId) return; + } + this.setStatus("syncing", `Syncing ${this.deps.queue.size} item(s)…`); + const summary = await this.deps.queue.drain({ + processBatch: (batch) => this.processBatch(batch), + }); + if (summary.acked > 0) { + await this.deps.saveSettings((s) => { + s.lastSyncAt = Date.now(); + s.filesSynced = (s.filesSynced ?? 0) + summary.acked; + }); + } + this.setStatus(this.queueStatusKind(), this.statusDetail()); + } + + /** + * Lightweight status recovery path used after network-change signals. + * Clears stale offline/auth/error only when connectivity/auth is explicitly re-validated. + */ + async recoverConnectivityStatus(): Promise { + const settings = this.deps.getSettings(); + if (!settings.apiToken) { + this.refreshStatus({ force: true }); + return; + } + if (!settings.searchSpaceId) { + try { + const health = await this.deps.apiClient.health(); + this.applyHealth(health); + this.refreshStatus({ force: true }); + } catch (err) { + this.handleStartupError(err); + } + return; + } + const connected = await this.ensureConnected(); + if (!connected) return; + this.refreshStatus({ force: true }); + } + + private async processBatch(batch: QueueItem[]): Promise { + const settings = this.deps.getSettings(); + const upserts = batch.filter((b): b is QueueItem & { op: "upsert" } => b.op === "upsert"); + const renames = batch.filter((b): b is QueueItem & { op: "rename" } => b.op === "rename"); + const deletes = batch.filter((b): b is QueueItem & { op: "delete" } => b.op === "delete"); + + const acked: QueueItem[] = []; + const retry: QueueItem[] = []; + const dropped: QueueItem[] = []; + + // Renames first so paths line up before content upserts. + if (renames.length > 0) { + try { + const resp = await this.deps.apiClient.renameBatch({ + vaultId: settings.vaultId, + renames: renames.map((r) => ({ oldPath: r.oldPath, newPath: r.newPath })), + }); + const failed = new Set( + resp.failed.map((f) => `${f.oldPath}\u0000${f.newPath}`), + ); + for (const r of renames) { + if (failed.has(`${r.oldPath}\u0000${r.newPath}`)) retry.push(r); + else acked.push(r); + } + } catch (err) { + if (await this.handleVaultNotRegistered(err)) { + retry.push(...renames); + } else { + const verdict = this.classify(err); + if (verdict === "stop") return { acked, retry: [...retry, ...renames], dropped, stop: true }; + if (verdict === "retry") retry.push(...renames); + else dropped.push(...renames); + } + } + } + + if (deletes.length > 0) { + try { + const resp = await this.deps.apiClient.deleteBatch({ + vaultId: settings.vaultId, + paths: deletes.map((d) => d.path), + }); + const failed = new Set(resp.failed); + for (const d of deletes) { + if (failed.has(d.path)) retry.push(d); + else acked.push(d); + } + } catch (err) { + if (await this.handleVaultNotRegistered(err)) { + retry.push(...deletes); + } else { + const verdict = this.classify(err); + if (verdict === "stop") return { acked, retry: [...retry, ...deletes], dropped, stop: true }; + if (verdict === "retry") retry.push(...deletes); + else dropped.push(...deletes); + } + } + } + + if (upserts.length > 0) { + const payloads: NotePayload[] = []; + for (const item of upserts) { + const file = this.deps.app.vault.getFileByPath(item.path); + if (!file) { + // Vanished — ack now; the delete event will follow if needed. + acked.push(item); + continue; + } + try { + const payload = this.isMarkdown(file) + ? await buildNotePayload(this.deps.app, file, settings.vaultId) + : await this.buildBinaryPayload(file, settings.vaultId); + payloads.push(payload); + } catch (err) { + console.error("SurfSense: failed to build payload", item.path, err); + retry.push(item); + } + } + + if (payloads.length > 0) { + try { + const resp = await this.deps.apiClient.syncBatch({ + vaultId: settings.vaultId, + notes: payloads, + }); + // Per-note failures retry; queue maxAttempts drops poison pills. + const failed = new Set(resp.failed); + for (const item of upserts) { + if (retry.find((r) => r === item)) continue; + if (failed.has(item.path)) retry.push(item); + else acked.push(item); + } + } catch (err) { + if (await this.handleVaultNotRegistered(err)) { + for (const item of upserts) { + if (retry.find((r) => r === item)) continue; + retry.push(item); + } + } else { + const verdict = this.classify(err); + if (verdict === "stop") + return { acked, retry: [...retry, ...upserts], dropped, stop: true }; + if (verdict === "retry") retry.push(...upserts); + else dropped.push(...upserts); + } + } + } + } + + return { acked, retry, dropped, stop: false }; + } + + private async buildBinaryPayload(file: TFile, vaultId: string): Promise { + // Attachments skip buildNotePayload (no markdown metadata) but still + // need raw bytes + hash + stat so the backend can ETL-extract text + // and manifest diff still works. + const buf = await this.deps.app.vault.readBinary(file); + const digest = await crypto.subtle.digest("SHA-256", buf); + const hash = bufferToHex(digest); + const binaryBase64 = arrayBufferToBase64(buf); + return { + vault_id: vaultId, + path: file.path, + name: file.basename, + extension: file.extension, + content: "", + frontmatter: {}, + tags: [], + headings: [], + resolved_links: [], + unresolved_links: [], + embeds: [], + aliases: [], + content_hash: hash, + size: file.stat.size, + mtime: file.stat.mtime, + ctime: file.stat.ctime, + is_binary: true, + binary_base64: binaryBase64, + mime_type: mimeTypeFor(file.extension), + }; + } + + // ---- reconcile -------------------------------------------------------- + + async maybeReconcile(force = false): Promise { + const settings = this.deps.getSettings(); + if (!settings.connectorId) return; + if (!force && settings.lastReconcileAt) { + if (Date.now() - settings.lastReconcileAt < RECONCILE_MIN_INTERVAL_MS) return; + } + + // Re-handshake first: if the vault grew enough to match another + // device's fingerprint, the server merges and routes us to the + // survivor row, which the /manifest call below then uses. + const connected = await this.ensureConnected(); + if (!connected) return; + const refreshed = this.deps.getSettings(); + if (!refreshed.connectorId) return; + + this.setStatus("syncing", "Reconciling vault with server…"); + try { + const manifest = await this.deps.apiClient.getManifest(refreshed.vaultId); + const remote = manifest.items ?? {}; + const enqueued = this.diffAndQueue(refreshed, remote); + await this.deps.saveSettings((s) => { + s.lastReconcileAt = Date.now(); + s.tombstones = pruneTombstones(s.tombstones); + }); + this.updateIdleStreak(enqueued); + await this.flushQueue(); + this.refreshStatus({ force: true }); + } catch (err) { + this.classifyAndStatus(err, "Reconcile failed"); + } + } + + /** + * Diff local vault vs server manifest and enqueue work. Skips disk reads + * on idle reconciles by short-circuiting on `mtime + size`; false positives + * collapse to a no-op upsert via the server's `content_hash` check. + * Returns the enqueued count to drive adaptive backoff. + */ + private diffAndQueue( + settings: SyncEngineSettings, + remote: Record, + ): number { + const localFiles = this.deps.app.vault.getFiles().filter((f) => { + if (!this.shouldTrack(f)) return false; + if (this.isExcluded(f.path, settings)) return false; + return true; + }); + const localPaths = new Set(localFiles.map((f) => f.path)); + let enqueued = 0; + + for (const file of localFiles) { + const remoteEntry = remote[file.path]; + if (!remoteEntry) { + this.deps.queue.enqueueUpsert(file.path); + enqueued++; + continue; + } + const remoteMtimeMs = toMillis(remoteEntry.mtime); + const mtimeMatches = file.stat.mtime <= remoteMtimeMs + 1000; + // Older server rows lack `size` — treat as unknown and re-upsert. + const sizeMatches = + typeof remoteEntry.size === "number" && file.stat.size === remoteEntry.size; + if (mtimeMatches && sizeMatches) continue; + this.deps.queue.enqueueUpsert(file.path); + enqueued++; + } + + // Remote-only → delete, unless a fresh tombstone is already in the queue. + for (const path of Object.keys(remote)) { + if (localPaths.has(path)) continue; + const tombstone = settings.tombstones[path]; + if (tombstone && Date.now() - tombstone < TOMBSTONE_TTL_MS) continue; + this.deps.queue.enqueueDelete(path); + enqueued++; + } + + return enqueued; + } + + /** Bump (idle) or reset (active) the streak; notify only when the capped multiplier changes. */ + private updateIdleStreak(enqueued: number): void { + const previousStreak = this.idleReconcileStreak; + if (enqueued === 0) this.idleReconcileStreak++; + else this.idleReconcileStreak = 0; + const cap = Math.log2(this.maxBackoffMultiplier); + const cappedPrev = Math.min(previousStreak, cap); + const cappedNow = Math.min(this.idleReconcileStreak, cap); + if (cappedPrev !== cappedNow) this.deps.onReconcileBackoffChanged?.(); + } + + /** Vault edit — drop back to base interval immediately. */ + private resetIdleStreak(): void { + if (this.idleReconcileStreak === 0) return; + this.idleReconcileStreak = 0; + this.deps.onReconcileBackoffChanged?.(); + } + + // ---- status helpers --------------------------------------------------- + + /** + * Conservative by default: real errors are preserved while setup is + * complete, so unrelated edits don't optimistically clear the indicator. + * Pass `force: true` after an explicit verify/reconcile confirmation. + */ + refreshStatus(opts: { force?: boolean } = {}): void { + if (!opts.force) { + const last = this.lastAppliedKind; + if (last === "syncing") return; + const isError = + last === "auth-error" || last === "offline" || last === "error"; + const s = this.deps.getSettings(); + const setupComplete = !!(s.apiToken && s.searchSpaceId && s.connectorId); + if (isError && setupComplete) return; + } + this.setStatus(this.queueStatusKind(), this.statusDetail()); + } + + reportAuthError(message?: string): void { + this.setStatus("auth-error", message ?? "API token expired or invalid"); + } + + reportError(err: unknown): void { + if (err instanceof AuthError) { + this.reportAuthError(err.message); + return; + } + if (err instanceof TransientError) { + this.setStatus("offline", err.message); + return; + } + this.setStatus("error", (err as Error).message ?? "Unknown error"); + } + + private setStatus(kind: StatusKind, detail?: string): void { + const s = this.deps.getSettings(); + if (!s.apiToken) { + kind = "needs-setup"; + detail = this.setupHint(s); + } else if (kind !== "auth-error" && kind !== "offline" && kind !== "error") { + if (!s.searchSpaceId || !s.connectorId) { + kind = "needs-setup"; + detail = this.setupHint(s); + } + } + this.lastAppliedKind = kind; + this.deps.setStatus({ kind, detail, queueDepth: this.deps.queue.size }); + } + + private setupHint(s: SyncEngineSettings): string { + if (!s.apiToken) return "Paste your API token in settings."; + if (!s.searchSpaceId) return "Pick a search space in settings."; + return "Connecting…"; + } + + private queueStatusKind(): StatusKind { + if (this.deps.queue.size > 0) return "queued"; + return "idle"; + } + + private statusDetail(): string | undefined { + const settings = this.deps.getSettings(); + if (settings.lastSyncAt) { + return `Last sync ${formatRelative(settings.lastSyncAt)}`; + } + return undefined; + } + + private handleStartupError(err: unknown): void { + if (err instanceof AuthError) { + this.setStatus("auth-error", err.message); + return; + } + if (err instanceof TransientError) { + this.setStatus("offline", err.message); + return; + } + this.setStatus("error", (err as Error).message ?? "Unknown error"); + } + + /** Re-connect on VAULT_NOT_REGISTERED so the next drain sees the new row. */ + private async handleVaultNotRegistered(err: unknown): Promise { + if (!(err instanceof VaultNotRegisteredError)) return false; + console.warn("SurfSense: vault not registered, re-connecting before retry", err); + await this.ensureConnected(); + return true; + } + + private classify(err: unknown): "ack" | "retry" | "drop" | "stop" { + if (err instanceof AuthError) { + this.setStatus("auth-error", err.message); + return "stop"; + } + if (err instanceof TransientError) { + this.setStatus("offline", err.message); + return "stop"; + } + if (err instanceof PermanentError) { + console.warn("SurfSense: permanent error, dropping batch", err); + new Notice(`Surfsense: ${err.message}`); + return "drop"; + } + console.error("SurfSense: unknown error", err); + return "retry"; + } + + private classifyAndStatus(err: unknown, prefix: string): void { + const verdict = this.classify(err); + if (verdict === "stop") return; + this.setStatus(this.queueStatusKind(), `${prefix}: ${(err as Error).message}`); + } + + private async recoverStatusIfNeeded(): Promise { + if (!this.isRecoverableErrorState()) return; + await this.recoverConnectivityStatus(); + } + + private isRecoverableErrorState(): boolean { + return ( + this.lastAppliedKind === "offline" || + this.lastAppliedKind === "auth-error" || + this.lastAppliedKind === "error" + ); + } + + // ---- predicates ------------------------------------------------------- + + private shouldTrack(file: TAbstractFile): boolean { + if (!isTFile(file)) return false; + if (this.isMarkdown(file)) return true; + const settings = this.deps.getSettings(); + if (!settings.includeAttachments) return false; + return ALLOWED_ATTACHMENT_EXTENSIONS.has(file.extension.toLowerCase()); + } + + private isExcluded(path: string, settings: SyncEngineSettings): boolean { + if (isFolderFiltered(path, settings.includeFolders, settings.excludeFolders)) { + return true; + } + return isExcluded(path, settings.excludePatterns); + } + + private isMarkdown(file: TAbstractFile): boolean { + return isTFile(file) && file.extension.toLowerCase() === "md"; + } +} + +function isTFile(f: TAbstractFile): f is TFile { + return f instanceof TFile; +} + +function bufferToHex(buf: ArrayBuffer): string { + const view = new Uint8Array(buf); + let hex = ""; + for (let i = 0; i < view.length; i++) hex += (view[i] ?? 0).toString(16).padStart(2, "0"); + return hex; +} + +function arrayBufferToBase64(buf: ArrayBuffer): string { + const bytes = new Uint8Array(buf); + const chunkSize = 0x8000; + let binary = ""; + for (let i = 0; i < bytes.length; i += chunkSize) { + const chunk = bytes.subarray(i, i + chunkSize); + binary += String.fromCharCode(...Array.from(chunk)); + } + return btoa(binary); +} + +/** Source of truth for the attachment whitelist. Mirrors ATTACHMENT_MIME_TYPES on the backend. */ +export const MIME_BY_EXTENSION = { + pdf: "application/pdf", + png: "image/png", + jpg: "image/jpeg", + jpeg: "image/jpeg", + gif: "image/gif", + webp: "image/webp", + svg: "image/svg+xml", + txt: "text/plain", +} as const satisfies Record; + +export const ALLOWED_ATTACHMENT_EXTENSIONS: ReadonlySet = new Set( + Object.keys(MIME_BY_EXTENSION), +); + +function mimeTypeFor(extension: string): string { + const ext = extension.toLowerCase() as keyof typeof MIME_BY_EXTENSION; + const mime = MIME_BY_EXTENSION[ext]; + if (!mime) { + throw new Error(`Unsupported attachment extension: .${extension}`); + } + return mime; +} + +function formatRelative(ts: number): string { + const diff = Date.now() - ts; + if (diff < 60_000) return "just now"; + if (diff < 3600_000) return `${Math.round(diff / 60_000)}m ago`; + if (diff < 86_400_000) return `${Math.round(diff / 3600_000)}h ago`; + return `${Math.round(diff / 86_400_000)}d ago`; +} + +/** Manifest mtimes arrive as ISO strings, vault stats as epoch ms — normalise. */ +function toMillis(value: number | string | Date): number { + if (typeof value === "number") return value; + if (value instanceof Date) return value.getTime(); + const parsed = Date.parse(value); + return Number.isFinite(parsed) ? parsed : 0; +} + +function pruneTombstones(tombstones: Record): Record { + const out: Record = {}; + const cutoff = Date.now() - TOMBSTONE_TTL_MS; + for (const [k, v] of Object.entries(tombstones)) { + if (v >= cutoff) out[k] = v; + } + return out; +} diff --git a/surfsense_obsidian/src/types.ts b/surfsense_obsidian/src/types.ts new file mode 100644 index 000000000..192d34dc8 --- /dev/null +++ b/surfsense_obsidian/src/types.ts @@ -0,0 +1,202 @@ +/** Shared types for the SurfSense Obsidian plugin. Leaf module — no src/ imports. */ + +export interface SurfsensePluginSettings { + serverUrl: string; + apiToken: string; + searchSpaceId: number | null; + connectorId: number | null; + /** UUID for the vault — lives here so Obsidian Sync replicates it across devices. */ + vaultId: string; + /** 0 disables periodic reconcile (Force sync still works). */ + syncIntervalMinutes: number; + /** Mobile-only: pause auto-sync when on cellular. iOS can't detect network type, so the toggle is a no-op there. */ + wifiOnly: boolean; + includeFolders: string[]; + excludeFolders: string[]; + excludePatterns: string[]; + includeAttachments: boolean; + lastSyncAt: number | null; + lastReconcileAt: number | null; + filesSynced: number; + queue: QueueItem[]; + tombstones: Record; +} + +export const DEFAULT_SETTINGS: SurfsensePluginSettings = { + serverUrl: "https://surfsense.com", + apiToken: "", + searchSpaceId: null, + connectorId: null, + vaultId: "", + syncIntervalMinutes: 10, + wifiOnly: false, + includeFolders: [], + excludeFolders: [], + excludePatterns: [".trash", "_attachments", "templates"], + includeAttachments: false, + lastSyncAt: null, + lastReconcileAt: null, + filesSynced: 0, + queue: [], + tombstones: {}, +}; + +export type QueueOp = "upsert" | "delete" | "rename"; + +export interface UpsertItem { + op: "upsert"; + path: string; + enqueuedAt: number; + attempt: number; +} + +export interface DeleteItem { + op: "delete"; + path: string; + enqueuedAt: number; + attempt: number; +} + +export interface RenameItem { + op: "rename"; + oldPath: string; + newPath: string; + enqueuedAt: number; + attempt: number; +} + +export type QueueItem = UpsertItem | DeleteItem | RenameItem; + +interface NotePayloadBase { + vault_id: string; + path: string; + name: string; + extension: string; + content: string; + frontmatter: Record; + tags: string[]; + headings: HeadingRef[]; + resolved_links: string[]; + unresolved_links: string[]; + embeds: string[]; + aliases: string[]; + content_hash: string; + /** Byte size of the local file; pairs with mtime for the reconcile short-circuit. */ + size: number; + mtime: number; + ctime: number; +} + +export interface MarkdownNotePayload extends NotePayloadBase { + is_binary?: false; +} + +export interface BinaryNotePayload extends NotePayloadBase { + /** Non-markdown attachment marker; enables backend ETL path. */ + is_binary: true; + /** Base64-encoded file bytes for binary attachments. */ + binary_base64: string; + /** Canonical MIME type for the extension; required by the backend. */ + mime_type: string; +} + +export type NotePayload = MarkdownNotePayload | BinaryNotePayload; + +export interface HeadingRef { + heading: string; + level: number; +} + +export interface SearchSpace { + id: number; + name: string; + description?: string; + [key: string]: unknown; +} + +export interface ConnectResponse { + connector_id: number; + vault_id: string; + search_space_id: number; + capabilities: string[]; + server_time_utc: string; + [key: string]: unknown; +} + +export interface HealthResponse { + capabilities: string[]; + server_time_utc: string; + [key: string]: unknown; +} + +export interface ManifestEntry { + hash: string; + mtime: number; + /** Optional: byte size of stored content. Enables mtime+size short-circuit; falls back to upsert when missing. */ + size?: number; + [key: string]: unknown; +} + +export interface ManifestResponse { + vault_id: string; + items: Record; + [key: string]: unknown; +} + +/** Per-item ack shapes — mirror `app/schemas/obsidian_plugin.py` 1:1. */ +export interface SyncAckItem { + path: string; + status: "ok" | "queued" | "error"; + document_id?: number; + error?: string; +} + +export interface SyncAck { + vault_id: string; + indexed: number; + failed: number; + items: SyncAckItem[]; +} + +export interface RenameAckItem { + old_path: string; + new_path: string; + status: "ok" | "error" | "missing"; + document_id?: number; + error?: string; +} + +export interface RenameAck { + vault_id: string; + renamed: number; + missing: number; + items: RenameAckItem[]; +} + +export interface DeleteAckItem { + path: string; + status: "ok" | "error" | "missing"; + error?: string; +} + +export interface DeleteAck { + vault_id: string; + deleted: number; + missing: number; + items: DeleteAckItem[]; +} + +export type StatusKind = + | "idle" + | "syncing" + | "queued" + | "needs-setup" + | "offline" + | "auth-error" + | "error"; + +export interface StatusState { + kind: StatusKind; + detail?: string; + queueDepth: number; +} diff --git a/surfsense_obsidian/src/vault-identity.ts b/surfsense_obsidian/src/vault-identity.ts new file mode 100644 index 000000000..86ae8b3b5 --- /dev/null +++ b/surfsense_obsidian/src/vault-identity.ts @@ -0,0 +1,43 @@ +import type { App } from "obsidian"; + +/** + * Deterministic SHA-256 over the vault name + sorted markdown paths. + * + * Two devices observing the same vault content compute the same value, + * regardless of how it was synced (iCloud, Syncthing, Obsidian Sync, …). + * The server uses this as the cross-device dedup key on /connect. + */ +export async function computeVaultFingerprint(app: App): Promise { + const vaultName = app.vault.getName(); + const paths = app.vault + .getMarkdownFiles() + .map((f) => f.path) + .sort(); + const payload = `${vaultName}\n${paths.join("\n")}`; + const bytes = new TextEncoder().encode(payload); + const digest = await crypto.subtle.digest("SHA-256", bytes); + return bufferToHex(digest); +} + +function bufferToHex(buf: ArrayBuffer): string { + const view = new Uint8Array(buf); + let hex = ""; + for (let i = 0; i < view.length; i++) { + hex += (view[i] ?? 0).toString(16).padStart(2, "0"); + } + return hex; +} + +export function generateVaultUuid(): string { + const c = globalThis.crypto; + if (c?.randomUUID) return c.randomUUID(); + const buf = new Uint8Array(16); + c.getRandomValues(buf); + buf[6] = ((buf[6] ?? 0) & 0x0f) | 0x40; + buf[8] = ((buf[8] ?? 0) & 0x3f) | 0x80; + const hex = Array.from(buf, (b) => b.toString(16).padStart(2, "0")).join(""); + return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice( + 16, + 20, + )}-${hex.slice(20)}`; +} diff --git a/surfsense_obsidian/styles.css b/surfsense_obsidian/styles.css new file mode 100644 index 000000000..4aa831e6c --- /dev/null +++ b/surfsense_obsidian/styles.css @@ -0,0 +1,48 @@ +/* + * SurfSense Obsidian plugin styles. Status-bar widget only — the settings + * tab uses Obsidian's stock Setting rows, no custom CSS needed. + */ + +.surfsense-status { + gap: 6px; +} + +.surfsense-status--clickable { + cursor: pointer; +} + +.surfsense-status__icon { + display: inline-flex; + width: 14px; + height: 14px; +} + +.surfsense-status__icon svg { + width: 14px; + height: 14px; +} + +.surfsense-status--err .surfsense-status__icon { + color: var(--color-red); +} + +.surfsense-connection-indicator { + display: inline-flex; + width: 14px; + height: 14px; +} + +.surfsense-connection-heading { + display: inline-flex; + align-items: center; + gap: 8px; +} + +.surfsense-connection-indicator svg { + width: 14px; + height: 14px; +} + +.surfsense-connection-indicator--err { + color: var(--color-red); +} diff --git a/surfsense_obsidian/tsconfig.json b/surfsense_obsidian/tsconfig.json new file mode 100644 index 000000000..222535dee --- /dev/null +++ b/surfsense_obsidian/tsconfig.json @@ -0,0 +1,30 @@ +{ + "compilerOptions": { + "baseUrl": "src", + "inlineSourceMap": true, + "inlineSources": true, + "module": "ESNext", + "target": "ES6", + "allowJs": true, + "noImplicitAny": true, + "noImplicitThis": true, + "noImplicitReturns": true, + "moduleResolution": "node", + "importHelpers": true, + "noUncheckedIndexedAccess": true, + "isolatedModules": true, + "strictNullChecks": true, + "strictBindCallApply": true, + "allowSyntheticDefaultImports": true, + "useUnknownInCatchVariables": true, + "lib": [ + "DOM", + "ES5", + "ES6", + "ES7" + ] + }, + "include": [ + "src/**/*.ts" + ] +} diff --git a/surfsense_obsidian/version-bump.mjs b/surfsense_obsidian/version-bump.mjs new file mode 100644 index 000000000..55d631fb6 --- /dev/null +++ b/surfsense_obsidian/version-bump.mjs @@ -0,0 +1,17 @@ +import { readFileSync, writeFileSync } from "fs"; + +const targetVersion = process.env.npm_package_version; + +// read minAppVersion from manifest.json and bump version to target version +const manifest = JSON.parse(readFileSync("manifest.json", "utf8")); +const { minAppVersion } = manifest; +manifest.version = targetVersion; +writeFileSync("manifest.json", JSON.stringify(manifest, null, "\t")); + +// update versions.json with target version and minAppVersion from manifest.json +// but only if the target version is not already in versions.json +const versions = JSON.parse(readFileSync('versions.json', 'utf8')); +if (!Object.values(versions).includes(minAppVersion)) { + versions[targetVersion] = minAppVersion; + writeFileSync('versions.json', JSON.stringify(versions, null, '\t')); +} diff --git a/surfsense_obsidian/versions.json b/surfsense_obsidian/versions.json new file mode 100644 index 000000000..9a3c3429d --- /dev/null +++ b/surfsense_obsidian/versions.json @@ -0,0 +1,3 @@ +{ + "0.1.0": "1.5.4" +} diff --git a/surfsense_web/.env.example b/surfsense_web/.env.example index 417181ccc..b121daf0b 100644 --- a/surfsense_web/.env.example +++ b/surfsense_web/.env.example @@ -1,4 +1,8 @@ NEXT_PUBLIC_FASTAPI_BACKEND_URL=http://localhost:8000 + +# Server-only. Internal backend URL used by Next.js server code. +FASTAPI_BACKEND_INTERNAL_URL=https://your-internal-backend.example.com + NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE=LOCAL or GOOGLE NEXT_PUBLIC_ETL_SERVICE=UNSTRUCTURED or LLAMACLOUD or DOCLING NEXT_PUBLIC_ZERO_CACHE_URL=http://localhost:4848 diff --git a/surfsense_web/app/(home)/announcements/layout.tsx b/surfsense_web/app/(home)/announcements/layout.tsx new file mode 100644 index 000000000..072db2c3f --- /dev/null +++ b/surfsense_web/app/(home)/announcements/layout.tsx @@ -0,0 +1,25 @@ +import type { Metadata } from "next"; +import type { ReactNode } from "react"; + +export const metadata: Metadata = { + title: "Announcements | SurfSense", + description: "Latest product updates, feature releases, and news from SurfSense.", + alternates: { + canonical: "https://surfsense.com/announcements", + }, + openGraph: { + title: "Announcements | SurfSense", + description: "Latest product updates, feature releases, and news from SurfSense.", + url: "https://surfsense.com/announcements", + type: "website", + }, + twitter: { + card: "summary_large_image", + title: "Announcements | SurfSense", + description: "Latest product updates, feature releases, and news from SurfSense.", + }, +}; + +export default function AnnouncementsLayout({ children }: { children: ReactNode }) { + return <>{children}; +} diff --git a/surfsense_web/app/api/v1/[...path]/route.ts b/surfsense_web/app/api/v1/[...path]/route.ts new file mode 100644 index 000000000..418bf1a33 --- /dev/null +++ b/surfsense_web/app/api/v1/[...path]/route.ts @@ -0,0 +1,70 @@ +import type { NextRequest } from "next/server"; + +export const dynamic = "force-dynamic"; + +const HOP_BY_HOP_HEADERS = new Set([ + "connection", + "keep-alive", + "proxy-authenticate", + "proxy-authorization", + "te", + "trailer", + "transfer-encoding", + "upgrade", +]); + +function getBackendBaseUrl() { + const base = process.env.FASTAPI_BACKEND_INTERNAL_URL || "http://localhost:8000"; + return base.endsWith("/") ? base.slice(0, -1) : base; +} + +function toUpstreamHeaders(headers: Headers) { + const nextHeaders = new Headers(headers); + nextHeaders.delete("host"); + nextHeaders.delete("content-length"); + return nextHeaders; +} + +function toClientHeaders(headers: Headers) { + const nextHeaders = new Headers(headers); + for (const header of HOP_BY_HOP_HEADERS) { + nextHeaders.delete(header); + } + return nextHeaders; +} + +async function proxy(request: NextRequest, context: { params: Promise<{ path?: string[] }> }) { + const params = await context.params; + const path = params.path?.join("/") || ""; + const upstreamUrl = new URL(`${getBackendBaseUrl()}/api/v1/${path}`); + upstreamUrl.search = request.nextUrl.search; + + const hasBody = request.method !== "GET" && request.method !== "HEAD"; + + const response = await fetch(upstreamUrl, { + method: request.method, + headers: toUpstreamHeaders(request.headers), + body: hasBody ? request.body : undefined, + // `duplex: "half"` is required by the Fetch spec when streaming a + // ReadableStream as the request body. Avoids buffering uploads in heap. + // @ts-expect-error - `duplex` is not yet in lib.dom RequestInit types. + duplex: hasBody ? "half" : undefined, + redirect: "manual", + }); + + return new Response(response.body, { + status: response.status, + statusText: response.statusText, + headers: toClientHeaders(response.headers), + }); +} + +export { + proxy as GET, + proxy as POST, + proxy as PUT, + proxy as PATCH, + proxy as DELETE, + proxy as OPTIONS, + proxy as HEAD, +}; diff --git a/surfsense_web/app/dashboard/[search_space_id]/new-chat/[[...chat_id]]/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/new-chat/[[...chat_id]]/page.tsx index 10abe13b1..645b59010 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/new-chat/[[...chat_id]]/page.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/new-chat/[[...chat_id]]/page.tsx @@ -680,7 +680,7 @@ export default function NewChatPage() { try { const backendUrl = process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL || "http://localhost:8000"; - const selection = await getAgentFilesystemSelection(); + const selection = await getAgentFilesystemSelection(searchSpaceId); if ( selection.filesystem_mode === "desktop_local_folder" && (!selection.local_filesystem_mounts || selection.local_filesystem_mounts.length === 0) @@ -1106,7 +1106,7 @@ export default function NewChatPage() { try { const backendUrl = process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL || "http://localhost:8000"; - const selection = await getAgentFilesystemSelection(); + const selection = await getAgentFilesystemSelection(searchSpaceId); const response = await fetch(`${backendUrl}/api/v1/threads/${resumeThreadId}/resume`, { method: "POST", headers: { @@ -1427,9 +1427,7 @@ export default function NewChatPage() { id: userMsgId, role: "user", content: isEdit - ? (editExtras?.userMessageContent ?? [ - { type: "text", text: newUserQuery ?? "" }, - ]) + ? (editExtras?.userMessageContent ?? [{ type: "text", text: newUserQuery ?? "" }]) : originalUserMessageContent || [{ type: "text", text: userQueryToDisplay || "" }], createdAt: new Date(), metadata: isEdit ? undefined : originalUserMessageMetadata, @@ -1448,7 +1446,7 @@ export default function NewChatPage() { ]); try { - const selection = await getAgentFilesystemSelection(); + const selection = await getAgentFilesystemSelection(searchSpaceId); const requestBody: Record = { search_space_id: searchSpaceId, user_query: newUserQuery, @@ -1557,9 +1555,7 @@ export default function NewChatPage() { try { // Persist user message (for both edit and reload modes, since backend deleted it) const userContentToPersist = isEdit - ? (editExtras?.userMessageContent ?? [ - { type: "text", text: newUserQuery ?? "" }, - ]) + ? (editExtras?.userMessageContent ?? [{ type: "text", text: newUserQuery ?? "" }]) : originalUserMessageContent || [{ type: "text", text: userQueryToDisplay || "" }]; const savedUserMessage = await appendMessage(threadId, { diff --git a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/ApiKeyContent.tsx b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/ApiKeyContent.tsx index 3600d30db..c34d9c0ca 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/ApiKeyContent.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/ApiKeyContent.tsx @@ -3,7 +3,7 @@ import { Check, Copy, Info } from "lucide-react"; import { useTranslations } from "next-intl"; import { useCallback, useRef, useState } from "react"; -import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert"; +import { Alert, AlertDescription } from "@/components/ui/alert"; import { Button } from "@/components/ui/button"; import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip"; import { useApiKey } from "@/hooks/use-api-key"; diff --git a/surfsense_web/atoms/documents/folder.atoms.ts b/surfsense_web/atoms/documents/folder.atoms.ts index fe7d556eb..bbdc58e4e 100644 --- a/surfsense_web/atoms/documents/folder.atoms.ts +++ b/surfsense_web/atoms/documents/folder.atoms.ts @@ -12,6 +12,15 @@ export const expandedFolderIdsAtom = atomWithStorage>( {} ); +/** + * Expanded folder keys for Local filesystem tree, keyed by search space ID. + * Persisted so local tree expansion survives remounts/reloads. + */ +export const localExpandedFolderKeysAtom = atomWithStorage>( + "surfsense:localExpandedFolderKeys", + {} +); + /** * Folder currently being renamed (inline edit mode). * null means no folder is being renamed. diff --git a/surfsense_web/components/assistant-ui/assistant-message.tsx b/surfsense_web/components/assistant-ui/assistant-message.tsx index ef7e217ec..6b9c2c87e 100644 --- a/surfsense_web/components/assistant-ui/assistant-message.tsx +++ b/surfsense_web/components/assistant-ui/assistant-message.tsx @@ -15,7 +15,7 @@ import { DownloadIcon, ExternalLink, Globe, - MessageSquare, + MessageCircleReply, MoreHorizontalIcon, RefreshCwIcon, } from "lucide-react"; @@ -657,7 +657,7 @@ export const AssistantMessage: FC = () => { : "text-muted-foreground hover:text-foreground hover:bg-muted" )} > - + {hasComments ? ( {commentCount} {commentCount === 1 ? "comment" : "comments"} diff --git a/surfsense_web/components/assistant-ui/connector-popup/connect-forms/components/obsidian-connect-form.tsx b/surfsense_web/components/assistant-ui/connector-popup/connect-forms/components/obsidian-connect-form.tsx index 08c1dd30c..ecbb09fae 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connect-forms/components/obsidian-connect-form.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connect-forms/components/obsidian-connect-form.tsx @@ -1,311 +1,187 @@ "use client"; -import { zodResolver } from "@hookform/resolvers/zod"; -import { Info } from "lucide-react"; -import type { FC } from "react"; -import { useRef, useState } from "react"; -import { useForm } from "react-hook-form"; -import * as z from "zod"; +import { Check, Copy, Info } from "lucide-react"; +import { type FC, useCallback, useRef, useState } from "react"; import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert"; -import { - Form, - FormControl, - FormDescription, - FormField, - FormItem, - FormLabel, - FormMessage, -} from "@/components/ui/form"; -import { Input } from "@/components/ui/input"; -import { Label } from "@/components/ui/label"; -import { - Select, - SelectContent, - SelectItem, - SelectTrigger, - SelectValue, -} from "@/components/ui/select"; -import { Switch } from "@/components/ui/switch"; +import { Button } from "@/components/ui/button"; import { EnumConnectorName } from "@/contracts/enums/connector"; +import { useApiKey } from "@/hooks/use-api-key"; +import { copyToClipboard as copyToClipboardUtil } from "@/lib/utils"; import { getConnectorBenefits } from "../connector-benefits"; import type { ConnectFormProps } from "../index"; -const obsidianConnectorFormSchema = z.object({ - name: z.string().min(3, { - message: "Connector name must be at least 3 characters.", - }), - vault_path: z.string().min(1, { - message: "Vault path is required.", - }), - vault_name: z.string().min(1, { - message: "Vault name is required.", - }), - exclude_folders: z.string().optional(), - include_attachments: z.boolean(), -}); +const PLUGIN_RELEASES_URL = + "https://github.com/MODSetter/SurfSense/releases?q=obsidian&expanded=true"; -type ObsidianConnectorFormValues = z.infer; +const BACKEND_URL = process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL ?? "https://surfsense.com"; -export const ObsidianConnectForm: FC = ({ onSubmit, isSubmitting }) => { - const isSubmittingRef = useRef(false); - const [periodicEnabled, setPeriodicEnabled] = useState(true); - const [frequencyMinutes, setFrequencyMinutes] = useState("60"); - const form = useForm({ - resolver: zodResolver(obsidianConnectorFormSchema), - defaultValues: { - name: "Obsidian Vault", - vault_path: "", - vault_name: "", - exclude_folders: ".obsidian,.trash", - include_attachments: false, - }, - }); +/** + * Obsidian connect form for the plugin-only architecture. + * + * The legacy `vault_path` form was removed because it only worked on + * self-hosted with a server-side bind mount and broke for everyone else. + * The plugin pushes data over HTTPS so this UI is purely instructional — + * there is no backend create call here. The connector row is created + * server-side the first time the plugin calls `POST /obsidian/connect`. + * + * The footer "Connect" button in `ConnectorConnectView` triggers this + * form's submit; we just close the dialog (`onBack()`) since there's + * nothing to validate or persist from this side. + */ +export const ObsidianConnectForm: FC = ({ onBack }) => { + const { apiKey, isLoading, copied, copyToClipboard } = useApiKey(); + const [copiedUrl, setCopiedUrl] = useState(false); + const urlCopyTimerRef = useRef | undefined>(undefined); - const handleSubmit = async (values: ObsidianConnectorFormValues) => { - // Prevent multiple submissions - if (isSubmittingRef.current || isSubmitting) { - return; - } + const copyServerUrl = useCallback(async () => { + const ok = await copyToClipboardUtil(BACKEND_URL); + if (!ok) return; + setCopiedUrl(true); + if (urlCopyTimerRef.current) clearTimeout(urlCopyTimerRef.current); + urlCopyTimerRef.current = setTimeout(() => setCopiedUrl(false), 2000); + }, []); - isSubmittingRef.current = true; - try { - // Parse exclude_folders into an array - const excludeFolders = values.exclude_folders - ? values.exclude_folders - .split(",") - .map((f) => f.trim()) - .filter(Boolean) - : [".obsidian", ".trash"]; - - await onSubmit({ - name: values.name, - connector_type: EnumConnectorName.OBSIDIAN_CONNECTOR, - config: { - vault_path: values.vault_path, - vault_name: values.vault_name, - exclude_folders: excludeFolders, - include_attachments: values.include_attachments, - }, - is_indexable: true, - is_active: true, - last_indexed_at: null, - periodic_indexing_enabled: periodicEnabled, - indexing_frequency_minutes: periodicEnabled ? Number.parseInt(frequencyMinutes, 10) : null, - next_scheduled_at: null, - periodicEnabled, - frequencyMinutes, - }); - } finally { - isSubmittingRef.current = false; - } + const handleSubmit = (event: React.FormEvent) => { + event.preventDefault(); + onBack(); }; return (
- + {/* Form is intentionally empty so the footer Connect button is a no-op + that just closes the dialog (see component-level docstring). */} +
+ + - Self-Hosted Only + Plugin-based sync - This connector requires direct file system access and only works with self-hosted - SurfSense installations. + SurfSense now syncs Obsidian via an official plugin that runs inside Obsidian itself. + Works on desktop and mobile, in cloud and self-hosted deployments. -
- - - ( - - Connector Name - - - - - A friendly name to identify this connector. - - - - )} - /> - - ( - - Vault Path - - - - - The absolute path to your Obsidian vault on the server. This must be accessible - from the SurfSense backend. - - - - )} - /> - - ( - - Vault Name - - - - - A display name for your vault. This will be used in search results. - - - - )} - /> - - ( - - Exclude Folders - - - - - Comma-separated list of folder names to exclude from indexing. - - - - )} - /> - - ( - -
- Include Attachments - - Index attachment folders and embedded files (images, PDFs, etc.) - -
- - - -
- )} - /> - - {/* Indexing Configuration */} -
-

Indexing Configuration

- - {/* Periodic Sync Config */} -
-
-
-

Enable Periodic Sync

-

- Automatically re-index at regular intervals -

-
- -
- - {periodicEnabled && ( -
-
- - -
-
- )} +
+
+ {/* Step 1 — Install plugin */} +
+
+
+ 1
-
- - -
+

Install the plugin

+ +

+ Grab the latest SurfSense plugin release. Once it's in the community store, you'll + also be able to install it from{" "} + Settings → Community plugins inside Obsidian. +

+ + + + + +
+ + {/* Step 2 — Copy API key */} +
+
+
+ 2 +
+

Copy your API key

+
+

+ Paste this into the plugin's API token setting. + The token expires after 24 hours. Long-lived personal access tokens are coming in a + future release. +

+ + {isLoading ? ( +
+ ) : apiKey ? ( +
+
+

+ {apiKey} +

+
+ +
+ ) : ( +

+ No API key available — try refreshing the page. +

+ )} +
+ +
+ + {/* Step 3 — Server URL */} +
+
+
+ 3 +
+

Point the plugin at this server

+
+

+ For SurfSense Cloud, use the default{" "} + surfsense.com. If you are self-hosting, set the + plugin's Server URL to your frontend domain. +

+
+ +
+ + {/* Step 4 — Pick search space */} +
+
+
+ 4 +
+

Pick this search space

+
+

+ In the plugin's Search space setting, choose the + search space you want this vault to sync into. The connector will appear here + automatically once the plugin makes its first sync. +

+
+
+ - {/* What you get section */} {getConnectorBenefits(EnumConnectorName.OBSIDIAN_CONNECTOR) && ( -
-

+
+

What you get with Obsidian integration:

-
    +
      {getConnectorBenefits(EnumConnectorName.OBSIDIAN_CONNECTOR)?.map((benefit) => (
    • {benefit}
    • ))} diff --git a/surfsense_web/components/assistant-ui/connector-popup/connect-forms/connector-benefits.ts b/surfsense_web/components/assistant-ui/connector-popup/connect-forms/connector-benefits.ts index 0dc093100..f4883fa36 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connect-forms/connector-benefits.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/connect-forms/connector-benefits.ts @@ -104,11 +104,11 @@ export function getConnectorBenefits(connectorType: string): string[] | null { "No manual indexing required - meetings are added automatically", ], OBSIDIAN_CONNECTOR: [ - "Search through all your Obsidian notes and knowledge base", - "Access note content with YAML frontmatter metadata preserved", - "Wiki-style links ([[note]]) and #tags are indexed", - "Connect your personal knowledge base directly to your search space", - "Incremental sync - only changed files are re-indexed", + "Search through all of your Obsidian notes", + "Realtime sync as you create, edit, rename, or delete notes", + "YAML frontmatter, [[wiki links]], and #tags are preserved and indexed", + "Open any chat citation straight back in Obsidian via deep links", + "Each device is identifiable, so you can revoke a vault from one machine", "Full support for your vault's folder structure", ], }; diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/obsidian-config.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/obsidian-config.tsx index 3da1d6e7e..094eb3aa0 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/obsidian-config.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/obsidian-config.tsx @@ -1,167 +1,162 @@ "use client"; -import type { FC } from "react"; -import { useState } from "react"; -import { Input } from "@/components/ui/input"; -import { Label } from "@/components/ui/label"; -import { Switch } from "@/components/ui/switch"; +import { AlertTriangle, Info } from "lucide-react"; +import { type FC, useEffect, useMemo, useState } from "react"; +import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert"; +import { connectorsApiService, type ObsidianStats } from "@/lib/apis/connectors-api.service"; import type { ConnectorConfigProps } from "../index"; -export interface ObsidianConfigProps extends ConnectorConfigProps { - onNameChange?: (name: string) => void; +const OBSIDIAN_SETUP_DOCS_URL = "/docs/connectors/obsidian"; + +function formatTimestamp(value: unknown): string { + if (typeof value !== "string" || !value) return "—"; + const d = new Date(value); + if (Number.isNaN(d.getTime())) return value; + return d.toLocaleString(); } -export const ObsidianConfig: FC = ({ - connector, - onConfigChange, - onNameChange, -}) => { - const [vaultPath, setVaultPath] = useState( - (connector.config?.vault_path as string) || "" - ); - const [vaultName, setVaultName] = useState( - (connector.config?.vault_name as string) || "" - ); - const [excludeFolders, setExcludeFolders] = useState(() => { - const folders = connector.config?.exclude_folders; - if (Array.isArray(folders)) { - return folders.join(", "); - } - return (folders as string) || ".obsidian, .trash"; - }); - const [includeAttachments, setIncludeAttachments] = useState( - (connector.config?.include_attachments as boolean) || false - ); - const [name, setName] = useState(connector.name || ""); +/** + * Obsidian connector config view. + * + * Read-only on purpose: the plugin owns vault identity, so the connector's + * display name is auto-derived from `payload.vault_name` server-side on + * every `/connect` (see `obsidian_plugin_routes.obsidian_connect`). The + * web UI doesn't expose a Name input or a Save button for Obsidian (the + * latter is suppressed in `connector-edit-view.tsx`). + * + * Renders one of three modes depending on the connector's `config`: + * + * 1. **Plugin connector** (`config.source === "plugin"`) — read-only stats + * panel showing what the plugin most recently reported. + * 2. **Legacy server-path connector** (`config.legacy === true`, set by the + * migration) — migration warning + docs link + explicit disconnect data-loss + * warning so users move to the plugin flow safely. + * 3. **Unknown** — fallback for rows that escaped migration; suggests a + * clean re-install. + */ +export const ObsidianConfig: FC = ({ connector }) => { + const config = (connector.config ?? {}) as Record; + const isLegacy = config.legacy === true; + const isPlugin = config.source === "plugin"; - const handleVaultPathChange = (value: string) => { - setVaultPath(value); - if (onConfigChange) { - onConfigChange({ - ...connector.config, - vault_path: value, - }); - } - }; - - const handleVaultNameChange = (value: string) => { - setVaultName(value); - if (onConfigChange) { - onConfigChange({ - ...connector.config, - vault_name: value, - }); - } - }; - - const handleExcludeFoldersChange = (value: string) => { - setExcludeFolders(value); - const foldersArray = value - .split(",") - .map((f) => f.trim()) - .filter(Boolean); - if (onConfigChange) { - onConfigChange({ - ...connector.config, - exclude_folders: foldersArray, - }); - } - }; - - const handleIncludeAttachmentsChange = (value: boolean) => { - setIncludeAttachments(value); - if (onConfigChange) { - onConfigChange({ - ...connector.config, - include_attachments: value, - }); - } - }; - - const handleNameChange = (value: string) => { - setName(value); - if (onNameChange) { - onNameChange(value); - } - }; + if (isLegacy) return ; + if (isPlugin) return ; + return ; +}; +const LegacyBanner: FC = () => { return (
      - {/* Connector Name */} -
      -
      - - handleNameChange(e.target.value)} - placeholder="My Obsidian Vault" - className="border-slate-400/20 focus-visible:border-slate-400/40" - /> -

      - A friendly name to identify this connector. -

      -
      -
      + + + + Sync stopped, install the plugin to migrate + + + This Obsidian connector used the legacy server-path scanner, which has been removed. The + notes already indexed remain searchable, but they no longer reflect changes made in your + vault. + + - {/* Configuration */} -
      -
      -

      - Vault Configuration -

      -
      - -
      -
      - - handleVaultPathChange(e.target.value)} - placeholder="/path/to/your/obsidian/vault" - className="border-slate-400/20 focus-visible:border-slate-400/40 font-mono" - /> -

      - The absolute path to your Obsidian vault on the server. -

      -
      - -
      - - handleVaultNameChange(e.target.value)} - placeholder="My Knowledge Base" - className="border-slate-400/20 focus-visible:border-slate-400/40" - /> -

      - A display name for your vault in search results. -

      -
      - -
      - - handleExcludeFoldersChange(e.target.value)} - placeholder=".obsidian, .trash, templates" - className="border-slate-400/20 focus-visible:border-slate-400/40 font-mono" - /> -

      - Comma-separated list of folder names to exclude from indexing. -

      -
      - -
      -
      - -

      - Index attachment folders and embedded files -

      -
      - -
      -
      +
      +

      Migration required

      +

      + Follow the{" "} + + Obsidian setup guide + {" "} + to reconnect this vault through the plugin. +

      +

      + Heads up: Disconnect also deletes every document this connector previously indexed. +

      ); }; + +const PluginStats: FC<{ config: Record }> = ({ config }) => { + const vaultId = typeof config.vault_id === "string" ? config.vault_id : null; + const [stats, setStats] = useState(null); + const [statsError, setStatsError] = useState(false); + + useEffect(() => { + if (!vaultId) return; + let cancelled = false; + setStats(null); + setStatsError(false); + connectorsApiService + .getObsidianStats(vaultId) + .then((result) => { + if (!cancelled) setStats(result); + }) + .catch((err) => { + if (!cancelled) { + console.error("Failed to fetch Obsidian stats", err); + setStatsError(true); + } + }); + return () => { + cancelled = true; + }; + }, [vaultId]); + + const tileRows = useMemo(() => { + const placeholder = statsError ? "—" : stats ? null : "…"; + return [ + { label: "Vault name", value: (config.vault_name as string) || "—" }, + { + label: "Last sync", + value: placeholder ?? formatTimestamp(stats?.last_sync_at ?? null), + }, + { + label: "Files synced", + value: + placeholder ?? + (typeof stats?.files_synced === "number" ? stats.files_synced.toLocaleString() : "—"), + }, + ]; + }, [config.vault_name, stats, statsError]); + + return ( +
      + + + Plugin connected + + Your notes stay synced automatically. To stop syncing, disable or uninstall the plugin in + Obsidian, or delete this connector. + + + +
      +

      Vault Status

      +
      + {tileRows.map((stat) => ( +
      +
      + {stat.label} +
      +
      {stat.value}
      +
      + ))} +
      +
      +
      + ); +}; + +const UnknownConnectorState: FC = () => ( + + + Unrecognized config + + This connector has neither plugin metadata nor a legacy marker. It may predate migration — you + can safely delete it and re-install the SurfSense Obsidian plugin to resume syncing. + + +); diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-connect-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-connect-view.tsx index 8a0ef5ae1..5b82a8e88 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-connect-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-connect-view.tsx @@ -111,7 +111,9 @@ export const ConnectorConnectView: FC = ({ : getConnectorTypeDisplay(connectorType)}

- Enter your connection details + {connectorType === "OBSIDIAN_CONNECTOR" + ? "Follow the plugin setup steps below" + : "Enter your connection details"}

@@ -149,7 +151,9 @@ export const ConnectorConnectView: FC = ({ {connectorType === "MCP_CONNECTOR" ? "Connect" - : `Connect ${getConnectorTypeDisplay(connectorType)}`} + : connectorType === "OBSIDIAN_CONNECTOR" + ? "Done" + : `Connect ${getConnectorTypeDisplay(connectorType)}`} {isSubmitting && } diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx index 48f42c3b4..e7895c2e9 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx @@ -1,12 +1,13 @@ "use client"; import { useAtomValue } from "jotai"; -import { ArrowLeft, Info, RefreshCw, Trash2 } from "lucide-react"; +import { ArrowLeft, Info, RefreshCw } from "lucide-react"; import { type FC, useCallback, useEffect, useMemo, useRef, useState } from "react"; import { toast } from "sonner"; import { activeSearchSpaceIdAtom } from "@/atoms/search-spaces/search-space-query.atoms"; import { Button } from "@/components/ui/button"; import { Spinner } from "@/components/ui/spinner"; +import { EnumConnectorName } from "@/contracts/enums/connector"; import { getConnectorIcon } from "@/contracts/enums/connectorIcons"; import type { SearchSourceConnector } from "@/contracts/types/connector.types"; import { authenticatedFetch } from "@/lib/auth-utils"; @@ -18,7 +19,15 @@ import { VisionLLMConfig } from "../../components/vision-llm-config"; import { getReauthEndpoint, LIVE_CONNECTOR_TYPES } from "../../constants/connector-constants"; import { getConnectorDisplayName } from "../../tabs/all-connectors-tab"; import { MCPServiceConfig } from "../components/mcp-service-config"; -import { type ConnectorConfigProps, getConnectorConfigComponent } from "../index"; +import { getConnectorConfigComponent } from "../index"; + +const VISION_LLM_CONNECTOR_TYPES = new Set([ + EnumConnectorName.GOOGLE_DRIVE_CONNECTOR, + EnumConnectorName.COMPOSIO_GOOGLE_DRIVE_CONNECTOR, + EnumConnectorName.DROPBOX_CONNECTOR, + EnumConnectorName.ONEDRIVE_CONNECTOR, + EnumConnectorName.OBSIDIAN_CONNECTOR, +]); interface ConnectorEditViewProps { connector: SearchSourceConnector; @@ -75,6 +84,9 @@ export const ConnectorEditView: FC = ({ const isAuthExpired = connector.config?.auth_expired === true; const reauthEndpoint = getReauthEndpoint(connector); const [reauthing, setReauthing] = useState(false); + const supportsVisionLlm = VISION_LLM_CONNECTOR_TYPES.has(connector.connector_type); + const showsAiToggles = + connector.is_indexable || connector.connector_type === EnumConnectorName.OBSIDIAN_CONNECTOR; const handleReauth = useCallback(async () => { const spaceId = searchSpaceId ?? searchSpaceIdAtom; @@ -264,25 +276,23 @@ export const ConnectorEditView: FC = ({ /> )} - {/* Summary and sync settings - hidden for live connectors */} - {connector.is_indexable && !isLive && ( + {/* Summary + vision toggles (Obsidian is plugin-push, non-indexable by design) */} + {showsAiToggles && !isLive && ( <> {/* AI Summary toggle */} - {/* Vision LLM toggle - only for file-based connectors */} - {(connector.connector_type === "GOOGLE_DRIVE_CONNECTOR" || - connector.connector_type === "COMPOSIO_GOOGLE_DRIVE_CONNECTOR" || - connector.connector_type === "DROPBOX_CONNECTOR" || - connector.connector_type === "ONEDRIVE_CONNECTOR") && ( + {/* Vision LLM toggle for file/attachment connectors */} + {supportsVisionLlm && ( )} - {/* Date range selector - not shown for file-based connectors (Drive, Dropbox, OneDrive), Webcrawler, GitHub, or Local Folder */} - {connector.connector_type !== "GOOGLE_DRIVE_CONNECTOR" && + {/* Date-range and periodic sync stay indexable-only */} + {connector.is_indexable && + connector.connector_type !== "GOOGLE_DRIVE_CONNECTOR" && connector.connector_type !== "COMPOSIO_GOOGLE_DRIVE_CONNECTOR" && connector.connector_type !== "DROPBOX_CONNECTOR" && connector.connector_type !== "ONEDRIVE_CONNECTOR" && @@ -302,37 +312,40 @@ export const ConnectorEditView: FC = ({ /> )} - {(() => { - const isGoogleDrive = connector.connector_type === "GOOGLE_DRIVE_CONNECTOR"; - const isComposioGoogleDrive = - connector.connector_type === "COMPOSIO_GOOGLE_DRIVE_CONNECTOR"; - const requiresFolderSelection = isGoogleDrive || isComposioGoogleDrive; - const selectedFolders = - (connector.config?.selected_folders as - | Array<{ id: string; name: string }> - | undefined) || []; - const selectedFiles = - (connector.config?.selected_files as - | Array<{ id: string; name: string }> - | undefined) || []; - const hasItemsSelected = selectedFolders.length > 0 || selectedFiles.length > 0; - const isDisabled = requiresFolderSelection && !hasItemsSelected; + {connector.is_indexable && + (() => { + const isGoogleDrive = + connector.connector_type === "GOOGLE_DRIVE_CONNECTOR"; + const isComposioGoogleDrive = + connector.connector_type === "COMPOSIO_GOOGLE_DRIVE_CONNECTOR"; + const requiresFolderSelection = isGoogleDrive || isComposioGoogleDrive; + const selectedFolders = + (connector.config?.selected_folders as + | Array<{ id: string; name: string }> + | undefined) || []; + const selectedFiles = + (connector.config?.selected_files as + | Array<{ id: string; name: string }> + | undefined) || []; + const hasItemsSelected = + selectedFolders.length > 0 || selectedFiles.length > 0; + const isDisabled = requiresFolderSelection && !hasItemsSelected; - return ( - - ); - })()} + return ( + + ); + })()} )} @@ -403,7 +416,6 @@ export const ConnectorEditView: FC = ({ disabled={isSaving || isDisconnecting} className="text-xs sm:text-sm flex-1 sm:flex-initial h-12 sm:h-auto py-3 sm:py-2" > - Disconnect )} diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/indexing-configuration-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/indexing-configuration-view.tsx index e8dffb3c3..982b0be11 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/indexing-configuration-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/indexing-configuration-view.tsx @@ -4,6 +4,7 @@ import { ArrowLeft, Check, Info } from "lucide-react"; import { type FC, useCallback, useEffect, useMemo, useRef, useState } from "react"; import { Button } from "@/components/ui/button"; import { Spinner } from "@/components/ui/spinner"; +import { EnumConnectorName } from "@/contracts/enums/connector"; import type { SearchSourceConnector } from "@/contracts/types/connector.types"; import { getConnectorTypeDisplay } from "@/lib/connectors/utils"; import { cn } from "@/lib/utils"; @@ -18,6 +19,14 @@ import { import { getConnectorDisplayName } from "../../tabs/all-connectors-tab"; import { getConnectorConfigComponent } from "../index"; +const VISION_LLM_CONNECTOR_TYPES = new Set([ + "GOOGLE_DRIVE_CONNECTOR", + "COMPOSIO_GOOGLE_DRIVE_CONNECTOR", + "DROPBOX_CONNECTOR", + "ONEDRIVE_CONNECTOR", + "OBSIDIAN_CONNECTOR", +]); + interface IndexingConfigurationViewProps { config: IndexingConfigState; connector?: SearchSourceConnector; @@ -68,6 +77,9 @@ export const IndexingConfigurationView: FC = ({ () => (connector ? getConnectorConfigComponent(connector.connector_type) : null), [connector] ); + const showsAiToggles = + (connector?.is_indexable ?? false) || + connector?.connector_type === EnumConnectorName.OBSIDIAN_CONNECTOR; const [isScrolled, setIsScrolled] = useState(false); const [hasMoreContent, setHasMoreContent] = useState(false); const scrollContainerRef = useRef(null); @@ -164,25 +176,23 @@ export const IndexingConfigurationView: FC = ({ )} - {/* Summary and sync settings - hidden for live connectors */} - {connector?.is_indexable && !isLive && ( + {/* Summary + vision toggles (Obsidian is plugin-push, non-indexable by design) */} + {showsAiToggles && !isLive && ( <> {/* AI Summary toggle */} - {/* Vision LLM toggle - only for file-based connectors */} - {(config.connectorType === "GOOGLE_DRIVE_CONNECTOR" || - config.connectorType === "COMPOSIO_GOOGLE_DRIVE_CONNECTOR" || - config.connectorType === "DROPBOX_CONNECTOR" || - config.connectorType === "ONEDRIVE_CONNECTOR") && ( + {/* Vision LLM toggle for file/attachment connectors */} + {VISION_LLM_CONNECTOR_TYPES.has(config.connectorType) && ( )} - {/* Date range selector - not shown for file-based connectors (Drive, Dropbox, OneDrive), Webcrawler, GitHub, or Local Folder */} - {config.connectorType !== "GOOGLE_DRIVE_CONNECTOR" && + {/* Date-range and periodic sync stay indexable-only */} + {connector?.is_indexable && + config.connectorType !== "GOOGLE_DRIVE_CONNECTOR" && config.connectorType !== "COMPOSIO_GOOGLE_DRIVE_CONNECTOR" && config.connectorType !== "DROPBOX_CONNECTOR" && config.connectorType !== "ONEDRIVE_CONNECTOR" && @@ -202,7 +212,8 @@ export const IndexingConfigurationView: FC = ({ /> )} - {config.connectorType !== "GOOGLE_DRIVE_CONNECTOR" && + {connector?.is_indexable && + config.connectorType !== "GOOGLE_DRIVE_CONNECTOR" && config.connectorType !== "COMPOSIO_GOOGLE_DRIVE_CONNECTOR" && config.connectorType !== "DROPBOX_CONNECTOR" && config.connectorType !== "ONEDRIVE_CONNECTOR" && ( diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index 2ee811c19..ae2c413cf 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -200,7 +200,7 @@ export const OTHER_CONNECTORS = [ { id: "obsidian-connector", title: "Obsidian", - description: "Index your Obsidian vault (Local folder scan on Desktop)", + description: "Sync your Obsidian vault on desktop or mobile", connectorType: EnumConnectorName.OBSIDIAN_CONNECTOR, }, ] as const; diff --git a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts index a9223fee5..ed9bf70a8 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts @@ -1,5 +1,5 @@ import { format } from "date-fns"; -import { useAtom, useAtomValue, useSetAtom } from "jotai"; +import { useAtom, useAtomValue } from "jotai"; import { useCallback, useEffect, useRef, useState } from "react"; import { toast } from "sonner"; import { connectorDialogOpenAtom } from "@/atoms/connector-dialog/connector-dialog.atoms"; @@ -10,17 +10,11 @@ import { updateConnectorMutationAtom, } from "@/atoms/connectors/connector-mutation.atoms"; import { connectorsAtom } from "@/atoms/connectors/connector-query.atoms"; -import { - folderWatchDialogOpenAtom, - folderWatchInitialFolderAtom, -} from "@/atoms/folder-sync/folder-sync.atoms"; import { activeSearchSpaceIdAtom } from "@/atoms/search-spaces/search-space-query.atoms"; import { EnumConnectorName } from "@/contracts/enums/connector"; import type { SearchSourceConnector } from "@/contracts/types/connector.types"; import { searchSourceConnector } from "@/contracts/types/connector.types"; -import { usePlatform } from "@/hooks/use-platform"; import { authenticatedFetch } from "@/lib/auth-utils"; -import { isSelfHosted } from "@/lib/env-config"; import { trackConnectorConnected, trackConnectorDeleted, @@ -71,10 +65,6 @@ export const useConnectorDialog = () => { const { mutateAsync: updateConnector } = useAtomValue(updateConnectorMutationAtom); const { mutateAsync: deleteConnector } = useAtomValue(deleteConnectorMutationAtom); const { mutateAsync: createConnector } = useAtomValue(createConnectorMutationAtom); - const setFolderWatchOpen = useSetAtom(folderWatchDialogOpenAtom); - const setFolderWatchInitialFolder = useSetAtom(folderWatchInitialFolderAtom); - const { isDesktop } = usePlatform(); - const selfHosted = isSelfHosted(); // Use global atom for dialog open state so it can be controlled from anywhere const [isOpen, setIsOpen] = useAtom(connectorDialogOpenAtom); @@ -439,6 +429,7 @@ export const useConnectorDialog = () => { indexing_frequency_minutes: null, next_scheduled_at: null, enable_summary: false, + enable_vision_llm: false, }, queryParams: { search_space_id: searchSpaceId, @@ -487,31 +478,16 @@ export const useConnectorDialog = () => { } }, [searchSpaceId, createConnector, refetchAllConnectors, setIsOpen]); - // Handle connecting non-OAuth connectors (like Tavily API) + // Handle connecting non-OAuth connectors (like Tavily API, Obsidian plugin, etc.) const handleConnectNonOAuth = useCallback( (connectorType: string) => { if (!searchSpaceId) return; trackConnectorSetupStarted(Number(searchSpaceId), connectorType, "non_oauth_click"); - // Handle Obsidian specifically on Desktop & Cloud - if (connectorType === EnumConnectorName.OBSIDIAN_CONNECTOR && !selfHosted && isDesktop) { - setIsOpen(false); - setFolderWatchInitialFolder(null); - setFolderWatchOpen(true); - return; - } - setConnectingConnectorType(connectorType); }, - [ - searchSpaceId, - selfHosted, - isDesktop, - setIsOpen, - setFolderWatchOpen, - setFolderWatchInitialFolder, - ] + [searchSpaceId] ); // Handle submitting connect form @@ -555,6 +531,7 @@ export const useConnectorDialog = () => { is_active: true, next_scheduled_at: connectorData.next_scheduled_at as string | null, enable_summary: false, + enable_vision_llm: false, }, queryParams: { search_space_id: searchSpaceId, diff --git a/surfsense_web/components/assistant-ui/markdown-text.tsx b/surfsense_web/components/assistant-ui/markdown-text.tsx index 140ddcae7..f8abed486 100644 --- a/surfsense_web/components/assistant-ui/markdown-text.tsx +++ b/surfsense_web/components/assistant-ui/markdown-text.tsx @@ -229,6 +229,44 @@ function extractDomain(url: string): string { // Canonical local-file virtual paths are mount-prefixed: // const LOCAL_FILE_PATH_REGEX = /^\/[a-z0-9_-]+\/[^\s`]+(?:\/[^\s`]+)*$/; +type AgentFilesystemMount = { + mount: string; + rootPath: string; +}; + +function normalizeLocalVirtualPathForEditor( + candidatePath: string, + mounts: AgentFilesystemMount[] +): string { + const normalizedCandidate = candidatePath.trim().replace(/\\/g, "/").replace(/\/+/g, "/"); + if (!normalizedCandidate) { + return candidatePath; + } + const defaultMount = mounts[0]?.mount; + if (!defaultMount) { + return normalizedCandidate.startsWith("/") + ? normalizedCandidate + : `/${normalizedCandidate.replace(/^\/+/, "")}`; + } + + const mountNames = new Set(mounts.map((entry) => entry.mount)); + if (normalizedCandidate.startsWith("/")) { + const relative = normalizedCandidate.replace(/^\/+/, ""); + const [firstSegment] = relative.split("/", 1); + if (mountNames.has(firstSegment)) { + return `/${relative}`; + } + return `/${defaultMount}/${relative}`; + } + + const relative = normalizedCandidate.replace(/^\/+/, ""); + const [firstSegment] = relative.split("/", 1); + if (mountNames.has(firstSegment)) { + return `/${relative}`; + } + return `/${defaultMount}/${relative}`; +} + function isVirtualFilePathToken(value: string): boolean { if (!LOCAL_FILE_PATH_REGEX.test(value) || value.startsWith("//")) { return false; @@ -421,8 +459,15 @@ const defaultComponents = memoizeMarkdownComponents({ !codeString.includes("\n"); if (!isCodeBlock) { const inlineValue = String(children ?? "").trim(); + const normalizedInlinePath = inlineValue.replace(/\/+$/, ""); + const leafSegment = normalizedInlinePath.split("/").filter(Boolean).at(-1) ?? ""; + const isLikelyFolder = + inlineValue.endsWith("/") || !leafSegment || !leafSegment.includes("."); const isLocalPath = - !!electronAPI && isVirtualFilePathToken(inlineValue) && !inlineValue.startsWith("//"); + !!electronAPI && + isVirtualFilePathToken(inlineValue) && + !inlineValue.startsWith("//") && + !isLikelyFolder; const displayLocalPath = inlineValue.replace(/^\/+/, ""); const searchSpaceIdParam = params?.search_space_id; const parsedSearchSpaceId = Array.isArray(searchSpaceIdParam) @@ -438,14 +483,31 @@ const defaultComponents = memoizeMarkdownComponents({ onClick={(event) => { event.preventDefault(); event.stopPropagation(); - openEditorPanel({ - kind: "local_file", - localFilePath: inlineValue, - title: inlineValue.split("/").pop() || inlineValue, - searchSpaceId: Number.isFinite(parsedSearchSpaceId) + void (async () => { + let resolvedLocalPath = inlineValue; + const resolvedSearchSpaceId = Number.isFinite(parsedSearchSpaceId) ? parsedSearchSpaceId - : undefined, - }); + : undefined; + if (electronAPI?.getAgentFilesystemMounts) { + try { + const mounts = (await electronAPI.getAgentFilesystemMounts( + resolvedSearchSpaceId + )) as AgentFilesystemMount[]; + resolvedLocalPath = normalizeLocalVirtualPathForEditor( + inlineValue, + mounts + ); + } catch { + // Fall back to the raw inline path if mount lookup fails. + } + } + openEditorPanel({ + kind: "local_file", + localFilePath: resolvedLocalPath, + title: resolvedLocalPath.split("/").pop() || resolvedLocalPath, + searchSpaceId: resolvedSearchSpaceId, + }); + })(); }} title="Open in editor panel" > diff --git a/surfsense_web/components/chat-comments/comment-item/comment-item.tsx b/surfsense_web/components/chat-comments/comment-item/comment-item.tsx index 03c6c5675..a8da34855 100644 --- a/surfsense_web/components/chat-comments/comment-item/comment-item.tsx +++ b/surfsense_web/components/chat-comments/comment-item/comment-item.tsx @@ -1,7 +1,7 @@ "use client"; import { useAtomValue, useSetAtom } from "jotai"; -import { MessageSquare } from "lucide-react"; +import { MessageCircleReply } from "lucide-react"; import { useEffect, useRef, useState } from "react"; import { clearTargetCommentIdAtom, targetCommentIdAtom } from "@/atoms/chat/current-thread.atom"; import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar"; @@ -216,7 +216,7 @@ export function CommentItem({ className="mt-1 h-7 w-fit px-2 text-xs text-muted-foreground hover:text-foreground" onClick={() => onReply(comment.id)} > - + Reply )} diff --git a/surfsense_web/components/chat-comments/comment-sheet/comment-sheet.tsx b/surfsense_web/components/chat-comments/comment-sheet/comment-sheet.tsx index d483ab261..8db45f764 100644 --- a/surfsense_web/components/chat-comments/comment-sheet/comment-sheet.tsx +++ b/surfsense_web/components/chat-comments/comment-sheet/comment-sheet.tsx @@ -1,6 +1,6 @@ "use client"; -import { MessageSquare } from "lucide-react"; +import { MessageCircleReply } from "lucide-react"; import { Drawer, DrawerContent, @@ -30,7 +30,7 @@ export function CommentSheet({ - + Comments {commentCount > 0 && ( @@ -56,7 +56,7 @@ export function CommentSheet({ > - + Comments {commentCount > 0 && ( diff --git a/surfsense_web/components/chat-comments/comment-thread/comment-thread.tsx b/surfsense_web/components/chat-comments/comment-thread/comment-thread.tsx index e47531129..7929716bb 100644 --- a/surfsense_web/components/chat-comments/comment-thread/comment-thread.tsx +++ b/surfsense_web/components/chat-comments/comment-thread/comment-thread.tsx @@ -1,6 +1,6 @@ "use client"; -import { ChevronDown, ChevronRight, MessageSquare } from "lucide-react"; +import { ChevronDown, ChevronRight, MessageCircleReply } from "lucide-react"; import { useState } from "react"; import { Button } from "@/components/ui/button"; import { CommentComposer } from "../comment-composer/comment-composer"; @@ -143,7 +143,7 @@ export function CommentThread({
) : ( )} @@ -155,7 +155,7 @@ export function CommentThread({ {!hasReplies && !isReplyComposerOpen && (
diff --git a/surfsense_web/components/documents/DocumentsFilters.tsx b/surfsense_web/components/documents/DocumentsFilters.tsx index f03684631..57e6479cb 100644 --- a/surfsense_web/components/documents/DocumentsFilters.tsx +++ b/surfsense_web/components/documents/DocumentsFilters.tsx @@ -84,7 +84,7 @@ export function DocumentsFilters({ { e.preventDefault(); onCreateFolder(); @@ -104,11 +104,11 @@ export function DocumentsFilters({ value="ai-sort" disabled={aiSortBusy} className={cn( - "h-9 w-9 shrink-0 border-sidebar-border bg-sidebar", + "h-9 w-9 shrink-0 border bg-muted/50 transition-colors", "disabled:pointer-events-none disabled:opacity-50", aiSortEnabled - ? "bg-accent text-accent-foreground" - : "text-muted-foreground hover:text-foreground hover:border-sidebar-border" + ? "bg-accent text-accent-foreground hover:bg-accent" + : "text-muted-foreground hover:bg-muted/80 hover:text-foreground" )} onClick={(e) => { e.preventDefault(); @@ -142,11 +142,11 @@ export function DocumentsFilters({ {activeTypes.length > 0 && ( - + {activeTypes.length} )} @@ -226,13 +226,13 @@ export function DocumentsFilters({ {/* Search Input */}
-
+
onSearch(e.target.value)} placeholder="Search docs" @@ -242,7 +242,7 @@ export function DocumentsFilters({ {Boolean(searchValue) && (
-
+

{displayTitle}

@@ -353,6 +426,12 @@ export function EditorPanelContent({ ) : ( <> + {!isLocalFileMode && editorDoc?.document_type && documentId && ( + + )}
) : (
-
+

{displayTitle}

@@ -429,6 +499,12 @@ export function EditorPanelContent({ ) : ( <> + {!isLocalFileMode && editorDoc?.document_type && documentId && ( + + )} )} - {!isLocalFileMode && editorDoc?.document_type && documentId && ( - - )} )}
@@ -508,7 +578,7 @@ export function EditorPanelContent({ diff --git a/surfsense_web/components/editor/source-code-editor.tsx b/surfsense_web/components/editor/source-code-editor.tsx index 9a763db27..9102dffe9 100644 --- a/surfsense_web/components/editor/source-code-editor.tsx +++ b/surfsense_web/components/editor/source-code-editor.tsx @@ -114,10 +114,10 @@ export function SourceCodeEditor({ automaticLayout: true, minimap: { enabled: false }, lineNumbers: "on", - lineNumbersMinChars: 3, - lineDecorationsWidth: 12, + lineNumbersMinChars: 4, + lineDecorationsWidth: 20, glyphMargin: false, - folding: true, + folding: false, overviewRulerLanes: 0, hideCursorInOverviewRuler: true, scrollBeyondLastLine: false, @@ -142,7 +142,17 @@ export function SourceCodeEditor({ fontSize, fontFamily: "ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, Liberation Mono, monospace", - renderWhitespace: "selection", + renderWhitespace: "none", + renderValidationDecorations: "off", + colorDecorators: false, + codeLens: false, + hover: { enabled: false }, + stickyScroll: { enabled: false }, + unicodeHighlight: { + ambiguousCharacters: false, + invisibleCharacters: false, + nonBasicASCII: false, + }, smoothScrolling: true, readOnly, }} diff --git a/surfsense_web/components/layout/ui/sidebar/DesktopLocalTabContent.tsx b/surfsense_web/components/layout/ui/sidebar/DesktopLocalTabContent.tsx new file mode 100644 index 000000000..dd7520d24 --- /dev/null +++ b/surfsense_web/components/layout/ui/sidebar/DesktopLocalTabContent.tsx @@ -0,0 +1,205 @@ +"use client"; + +import { Folder, FolderPlus, Search, X } from "lucide-react"; +import { useAtom } from "jotai"; +import { useCallback, useMemo, useRef, useState } from "react"; +import { localExpandedFolderKeysAtom } from "@/atoms/documents/folder.atoms"; +import { Input } from "@/components/ui/input"; +import { Separator } from "@/components/ui/separator"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuLabel, + DropdownMenuSeparator, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; +import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; +import { useDebouncedValue } from "@/hooks/use-debounced-value"; +import { LocalFilesystemBrowser } from "./LocalFilesystemBrowser"; + +const getFolderDisplayName = (rootPath: string): string => + rootPath.split(/[\\/]/).at(-1) || rootPath; + +interface DesktopLocalTabContentProps { + localRootPaths: string[]; + canAddMoreLocalRoots: boolean; + maxLocalFilesystemRoots: number; + searchSpaceId: number; + onPickFilesystemRoot: () => Promise | void; + onRemoveFilesystemRoot: (rootPath: string) => Promise | void; + onClearFilesystemRoots: () => Promise | void; + onOpenLocalFile: (localFilePath: string) => void; + electronAvailable: boolean; +} + +export function DesktopLocalTabContent({ + localRootPaths, + canAddMoreLocalRoots, + maxLocalFilesystemRoots, + searchSpaceId, + onPickFilesystemRoot, + onRemoveFilesystemRoot, + onClearFilesystemRoots, + onOpenLocalFile, + electronAvailable, +}: DesktopLocalTabContentProps) { + const [localSearch, setLocalSearch] = useState(""); + const debouncedLocalSearch = useDebouncedValue(localSearch, 250); + const localSearchInputRef = useRef(null); + const [expandedFolderKeyMap, setExpandedFolderKeyMap] = useAtom(localExpandedFolderKeysAtom); + const expandedFolderKeys = useMemo( + () => new Set(expandedFolderKeyMap[searchSpaceId] ?? []), + [expandedFolderKeyMap, searchSpaceId] + ); + const handleExpandedFolderKeysChange = useCallback( + (nextExpandedKeys: Set) => { + setExpandedFolderKeyMap((prev) => ({ + ...prev, + [searchSpaceId]: Array.from(nextExpandedKeys), + })); + }, + [searchSpaceId, setExpandedFolderKeyMap] + ); + + return ( +
+
+
+ {localRootPaths.length > 0 ? ( + + + + + + + Selected folders + + + {localRootPaths.map((rootPath) => ( + event.preventDefault()} + className="group h-8 gap-1.5 px-1.5 text-sm text-foreground" + > + + + {getFolderDisplayName(rootPath)} + + + + ))} + + { + void onClearFilesystemRoots(); + }} + > + Clear all folders + + + + ) : ( +
+ + No local folders selected +
+ )} + + {electronAvailable ? ( + + + + + + + + {canAddMoreLocalRoots + ? "Add folder" + : `You can add up to ${maxLocalFilesystemRoots} folders`} + + + ) : null} +
+
+
+
+
+
+ setLocalSearch(e.target.value)} + placeholder="Search local files" + type="text" + aria-label="Search local files" + /> + {Boolean(localSearch) && ( + + )} +
+
+ +
+ ); +} diff --git a/surfsense_web/components/layout/ui/sidebar/DocumentsSidebar.tsx b/surfsense_web/components/layout/ui/sidebar/DocumentsSidebar.tsx index e88478259..0a147f7b7 100644 --- a/surfsense_web/components/layout/ui/sidebar/DocumentsSidebar.tsx +++ b/surfsense_web/components/layout/ui/sidebar/DocumentsSidebar.tsx @@ -6,19 +6,17 @@ import { ChevronLeft, ChevronRight, FileText, - Folder, FolderClock, - FolderPlus, Laptop, Lock, Paperclip, - Search, Server, Trash2, Unplug, Upload, X, } from "lucide-react"; +import dynamic from "next/dynamic"; import Link from "next/link"; import { useParams } from "next/navigation"; import { useTranslations } from "next-intl"; @@ -49,7 +47,6 @@ import { EXPORT_FILE_EXTENSIONS } from "@/components/shared/ExportMenuItems"; import { DEFAULT_EXCLUDE_PATTERNS, FolderWatchDialog, - type SelectedFolder, } from "@/components/sources/FolderWatchDialog"; import { AlertDialog, @@ -64,16 +61,7 @@ import { import { Avatar, AvatarFallback, AvatarGroup } from "@/components/ui/avatar"; import { Button } from "@/components/ui/button"; import { Drawer, DrawerContent, DrawerHandle, DrawerTitle } from "@/components/ui/drawer"; -import { - DropdownMenu, - DropdownMenuContent, - DropdownMenuItem, - DropdownMenuLabel, - DropdownMenuSeparator, - DropdownMenuTrigger, -} from "@/components/ui/dropdown-menu"; -import { Input } from "@/components/ui/input"; -import { Separator } from "@/components/ui/separator"; +import { Skeleton } from "@/components/ui/skeleton"; import { Spinner } from "@/components/ui/spinner"; import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; @@ -83,7 +71,7 @@ import { getConnectorIcon } from "@/contracts/enums/connectorIcons"; import type { DocumentTypeEnum } from "@/contracts/types/document.types"; import { useDebouncedValue } from "@/hooks/use-debounced-value"; import { useMediaQuery } from "@/hooks/use-media-query"; -import { useElectronAPI } from "@/hooks/use-platform"; +import { useElectronAPI, usePlatform } from "@/hooks/use-platform"; import { anonymousChatApiService } from "@/lib/apis/anonymous-chat-api.service"; import { documentsApiService } from "@/lib/apis/documents-api.service"; import { foldersApiService } from "@/lib/apis/folders-api.service"; @@ -92,12 +80,42 @@ import { authenticatedFetch } from "@/lib/auth-utils"; import { uploadFolderScan } from "@/lib/folder-sync-upload"; import { getSupportedExtensionsSet } from "@/lib/supported-extensions"; import { queries } from "@/zero/queries/index"; -import { LocalFilesystemBrowser } from "./LocalFilesystemBrowser"; import { SidebarSlideOutPanel } from "./SidebarSlideOutPanel"; +const DesktopLocalTabContent = dynamic( + () => import("./DesktopLocalTabContent").then((mod) => mod.DesktopLocalTabContent), + { ssr: false } +); + const NON_DELETABLE_DOCUMENT_TYPES: readonly string[] = ["SURFSENSE_DOCS"]; const LOCAL_FILESYSTEM_TRUST_KEY = "surfsense.local-filesystem-trust.v1"; -const MAX_LOCAL_FILESYSTEM_ROOTS = 5; +const MAX_LOCAL_FILESYSTEM_ROOTS = 10; + +function CloudDocumentsSkeleton() { + const rows = [ + { id: "row-1", widthClass: "w-44" }, + { id: "row-2", widthClass: "w-32" }, + { id: "row-3", widthClass: "w-32" }, + { id: "row-4", widthClass: "w-44" }, + { id: "row-5", widthClass: "w-32" }, + { id: "row-6", widthClass: "w-32" }, + { id: "row-7", widthClass: "w-44" }, + { id: "row-8", widthClass: "w-32" }, + ]; + + return ( +
+
+ {rows.map((row) => ( +
+ + +
+ ))} +
+
+ ); +} type FilesystemSettings = { mode: "cloud" | "desktop_local_folder"; @@ -115,9 +133,6 @@ interface WatchedFolderEntry { active: boolean; } -const getFolderDisplayName = (rootPath: string): string => - rootPath.split(/[\\/]/).at(-1) || rootPath; - const SHOWCASE_CONNECTORS = [ { type: "GOOGLE_DRIVE_CONNECTOR", label: "Google Drive" }, { type: "GOOGLE_GMAIL_CONNECTOR", label: "Gmail" }, @@ -143,25 +158,40 @@ interface DocumentsSidebarProps { export function DocumentsSidebar(props: DocumentsSidebarProps) { const isAnonymous = useIsAnonymous(); + const { isDesktop } = usePlatform(); if (isAnonymous) { return ; } - return ; + return isDesktop ? ( + + ) : ( + + ); } -function AuthenticatedDocumentsSidebar({ +function AuthenticatedDesktopDocumentsSidebar(props: DocumentsSidebarProps) { + return ; +} + +function AuthenticatedWebDocumentsSidebar(props: DocumentsSidebarProps) { + return ; +} + +function AuthenticatedDocumentsSidebarBase({ open, onOpenChange, isDocked = false, onDockedChange, embedded = false, headerAction, -}: DocumentsSidebarProps) { + desktopFeaturesEnabled, +}: DocumentsSidebarProps & { desktopFeaturesEnabled: boolean }) { const t = useTranslations("documents"); const tSidebar = useTranslations("sidebar"); const params = useParams(); const isMobile = !useMediaQuery("(min-width: 640px)"); - const electronAPI = useElectronAPI(); + const platformElectronAPI = useElectronAPI(); + const electronAPI = desktopFeaturesEnabled ? platformElectronAPI : null; const searchSpaceId = Number(params.search_space_id); const setConnectorDialogOpen = useSetAtom(connectorDialogOpenAtom); const setRightPanelCollapsed = useSetAtom(rightPanelCollapsedAtom); @@ -171,9 +201,6 @@ function AuthenticatedDocumentsSidebar({ const [search, setSearch] = useState(""); const debouncedSearch = useDebouncedValue(search, 250); - const [localSearch, setLocalSearch] = useState(""); - const debouncedLocalSearch = useDebouncedValue(localSearch, 250); - const localSearchInputRef = useRef(null); const [activeTypes, setActiveTypes] = useState([]); const [filesystemSettings, setFilesystemSettings] = useState(null); const [localTrustDialogOpen, setLocalTrustDialogOpen] = useState(false); @@ -181,13 +208,14 @@ function AuthenticatedDocumentsSidebar({ const [watchedFolderIds, setWatchedFolderIds] = useState>(new Set()); const [folderWatchOpen, setFolderWatchOpen] = useAtom(folderWatchDialogOpenAtom); const [watchInitialFolder, setWatchInitialFolder] = useAtom(folderWatchInitialFolderAtom); - const isElectron = typeof window !== "undefined" && !!window.electronAPI; + const isElectron = + desktopFeaturesEnabled && typeof window !== "undefined" && !!window.electronAPI; useEffect(() => { if (!electronAPI?.getAgentFilesystemSettings) return; let mounted = true; electronAPI - .getAgentFilesystemSettings() + .getAgentFilesystemSettings(searchSpaceId) .then((settings: FilesystemSettings) => { if (!mounted) return; setFilesystemSettings(settings); @@ -203,7 +231,7 @@ function AuthenticatedDocumentsSidebar({ return () => { mounted = false; }; - }, [electronAPI]); + }, [electronAPI, searchSpaceId]); const hasLocalFilesystemTrust = useCallback(() => { try { @@ -219,17 +247,20 @@ function AuthenticatedDocumentsSidebar({ const applyLocalRootPath = useCallback( async (path: string) => { if (!electronAPI?.setAgentFilesystemSettings) return; - const nextLocalRootPaths = [...localRootPaths, path] + const nextLocalRootPaths = [path, ...localRootPaths] .filter((rootPath, index, allPaths) => allPaths.indexOf(rootPath) === index) .slice(0, MAX_LOCAL_FILESYSTEM_ROOTS); if (nextLocalRootPaths.length === localRootPaths.length) return; - const updated = await electronAPI.setAgentFilesystemSettings({ - mode: "desktop_local_folder", - localRootPaths: nextLocalRootPaths, - }); + const updated = await electronAPI.setAgentFilesystemSettings( + { + mode: "desktop_local_folder", + localRootPaths: nextLocalRootPaths, + }, + searchSpaceId + ); setFilesystemSettings(updated); }, - [electronAPI, localRootPaths] + [electronAPI, localRootPaths, searchSpaceId] ); const runPickLocalRoot = useCallback(async () => { @@ -255,33 +286,42 @@ function AuthenticatedDocumentsSidebar({ const handleRemoveFilesystemRoot = useCallback( async (rootPathToRemove: string) => { if (!electronAPI?.setAgentFilesystemSettings) return; - const updated = await electronAPI.setAgentFilesystemSettings({ - mode: "desktop_local_folder", - localRootPaths: localRootPaths.filter((rootPath) => rootPath !== rootPathToRemove), - }); + const updated = await electronAPI.setAgentFilesystemSettings( + { + mode: "desktop_local_folder", + localRootPaths: localRootPaths.filter((rootPath) => rootPath !== rootPathToRemove), + }, + searchSpaceId + ); setFilesystemSettings(updated); }, - [electronAPI, localRootPaths] + [electronAPI, localRootPaths, searchSpaceId] ); const handleClearFilesystemRoots = useCallback(async () => { if (!electronAPI?.setAgentFilesystemSettings) return; - const updated = await electronAPI.setAgentFilesystemSettings({ - mode: "desktop_local_folder", - localRootPaths: [], - }); + const updated = await electronAPI.setAgentFilesystemSettings( + { + mode: "desktop_local_folder", + localRootPaths: [], + }, + searchSpaceId + ); setFilesystemSettings(updated); - }, [electronAPI]); + }, [electronAPI, searchSpaceId]); const handleFilesystemTabChange = useCallback( async (tab: "cloud" | "local") => { if (!electronAPI?.setAgentFilesystemSettings) return; - const updated = await electronAPI.setAgentFilesystemSettings({ - mode: tab === "cloud" ? "cloud" : "desktop_local_folder", - }); + const updated = await electronAPI.setAgentFilesystemSettings( + { + mode: tab === "cloud" ? "cloud" : "desktop_local_folder", + }, + searchSpaceId + ); setFilesystemSettings(updated); }, - [electronAPI] + [electronAPI, searchSpaceId] ); // AI File Sort state @@ -407,8 +447,8 @@ function AuthenticatedDocumentsSidebar({ ); // Zero queries for tree data - const [zeroFolders] = useQuery(queries.folders.bySpace({ searchSpaceId })); - const [zeroAllDocs] = useQuery(queries.documents.bySpace({ searchSpaceId })); + const [zeroFolders, zeroFoldersResult] = useQuery(queries.folders.bySpace({ searchSpaceId })); + const [zeroAllDocs, zeroAllDocsResult] = useQuery(queries.documents.bySpace({ searchSpaceId })); const [agentCreatedDocs, setAgentCreatedDocs] = useAtom(agentCreatedDocumentsAtom); const treeFolders: FolderDisplay[] = useMemo( @@ -994,6 +1034,9 @@ function AuthenticatedDocumentsSidebar({ const showFilesystemTabs = !isMobile && !!electronAPI && !!filesystemSettings; const currentFilesystemTab = filesystemSettings?.mode === "desktop_local_folder" ? "local" : "cloud"; + const showCloudSkeleton = + currentFilesystemTab === "cloud" && + (zeroFoldersResult.type !== "complete" || zeroAllDocsResult.type !== "complete"); const cloudContent = ( <> @@ -1106,173 +1149,73 @@ function AuthenticatedDocumentsSidebar({
)} - { - openEditorPanel({ - documentId: doc.id, - searchSpaceId, - title: doc.title, - }); - }} - onEditDocument={(doc) => { - openEditorPanel({ - documentId: doc.id, - searchSpaceId, - title: doc.title, - }); - }} - onDeleteDocument={(doc) => handleDeleteDocument(doc.id)} - onMoveDocument={handleMoveDocument} - onExportDocument={handleExportDocument} - onVersionHistory={(doc) => setVersionDocId(doc.id)} - activeTypes={activeTypes} - onDropIntoFolder={handleDropIntoFolder} - onReorderFolder={handleReorderFolder} - watchedFolderIds={watchedFolderIds} - onRescanFolder={handleRescanFolder} - onStopWatchingFolder={handleStopWatching} - onExportFolder={handleExportFolder} - /> + {showCloudSkeleton ? ( + + ) : ( + { + openEditorPanel({ + documentId: doc.id, + searchSpaceId, + title: doc.title, + }); + }} + onEditDocument={(doc) => { + openEditorPanel({ + documentId: doc.id, + searchSpaceId, + title: doc.title, + }); + }} + onDeleteDocument={(doc) => handleDeleteDocument(doc.id)} + onMoveDocument={handleMoveDocument} + onExportDocument={handleExportDocument} + onVersionHistory={(doc) => setVersionDocId(doc.id)} + activeTypes={activeTypes} + onDropIntoFolder={handleDropIntoFolder} + onReorderFolder={handleReorderFolder} + watchedFolderIds={watchedFolderIds} + onRescanFolder={handleRescanFolder} + onStopWatchingFolder={handleStopWatching} + onExportFolder={handleExportFolder} + /> + )}
); const localContent = ( -
-
-
- {localRootPaths.length > 0 ? ( - - - - - - - Selected folders - - - {localRootPaths.map((rootPath) => ( - { - void handleRemoveFilesystemRoot(rootPath); - }} - className="group h-8 gap-1.5 px-1.5 text-sm text-foreground" - > - - - {getFolderDisplayName(rootPath)} - - - - ))} - - { - void handleClearFilesystemRoots(); - }} - > - Clear all folders - - - - ) : ( -
- - No local folders selected -
- )} - - -
-
-
-
-
-
- setLocalSearch(e.target.value)} - placeholder="Search local files" - type="text" - aria-label="Search local files" - /> - {Boolean(localSearch) && ( - - )} -
-
- { - openEditorPanel({ - kind: "local_file", - localFilePath, - title: localFilePath.split("/").pop() || localFilePath, - searchSpaceId, - }); - }} - /> -
+ { + openEditorPanel({ + kind: "local_file", + localFilePath, + title: localFilePath.split("/").pop() || localFilePath, + searchSpaceId, + }); + }} + electronAvailable={!!electronAPI} + /> ); const documentsContent = ( @@ -1305,16 +1248,16 @@ function AuthenticatedDocumentsSidebar({ className="h-5 gap-1 px-1.5 text-[11px] select-none focus-visible:ring-0 focus-visible:ring-offset-0 data-[state=active]:bg-muted-foreground/25 data-[state=active]:text-foreground data-[state=active]:shadow-none" title="Cloud" > - - Cloud + + Cloud - - Local + + Local @@ -1366,7 +1309,7 @@ function AuthenticatedDocumentsSidebar({ {cloudContent} - {localContent} + {currentFilesystemTab === "local" ? localContent : null} ) : ( @@ -1904,10 +1847,13 @@ function AnonymousDocumentsSidebar({ type="button" onClick={handleAnonUploadClick} disabled={isUploading} - className="flex w-full items-center justify-center gap-2 rounded-lg border-2 border-dashed border-primary/30 px-4 py-6 text-sm text-primary transition-colors hover:border-primary/60 hover:bg-primary/5 cursor-pointer disabled:opacity-50 disabled:pointer-events-none" + className="relative flex w-full items-center justify-center rounded-lg border-2 border-dashed border-primary/30 px-4 py-6 text-sm text-primary transition-colors hover:border-primary/60 hover:bg-primary/5 cursor-pointer disabled:opacity-50 disabled:pointer-events-none" > - - {isUploading ? "Uploading..." : "Upload a document"} + + + Upload a document + + {isUploading && }

Text, code, CSV, and HTML files only. Create an account for PDFs, images, and 30+ diff --git a/surfsense_web/components/layout/ui/sidebar/InboxSidebar.tsx b/surfsense_web/components/layout/ui/sidebar/InboxSidebar.tsx index 65946487e..fa05559d7 100644 --- a/surfsense_web/components/layout/ui/sidebar/InboxSidebar.tsx +++ b/surfsense_web/components/layout/ui/sidebar/InboxSidebar.tsx @@ -14,7 +14,7 @@ import { Inbox, LayoutGrid, ListFilter, - MessageSquare, + MessageCircleReply, Search, X, } from "lucide-react"; @@ -847,7 +847,7 @@ export function InboxSidebarContent({ - + {t("comments") || "Comments"} {formatInboxCount(comments.unreadCount)} @@ -1032,7 +1032,7 @@ export function InboxSidebarContent({ ) : (

{activeTab === "comments" ? ( - + ) : ( )} diff --git a/surfsense_web/components/layout/ui/sidebar/LocalFilesystemBrowser.tsx b/surfsense_web/components/layout/ui/sidebar/LocalFilesystemBrowser.tsx index 93227054b..19c47d605 100644 --- a/surfsense_web/components/layout/ui/sidebar/LocalFilesystemBrowser.tsx +++ b/surfsense_web/components/layout/ui/sidebar/LocalFilesystemBrowser.tsx @@ -1,17 +1,20 @@ "use client"; -import { ChevronDown, ChevronRight, FileText, Folder } from "lucide-react"; -import { useCallback, useEffect, useMemo, useState } from "react"; +import { ChevronDown, ChevronRight, FileText, Folder, FolderOpen } from "lucide-react"; +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { DEFAULT_EXCLUDE_PATTERNS } from "@/components/sources/FolderWatchDialog"; +import { Skeleton } from "@/components/ui/skeleton"; import { Spinner } from "@/components/ui/spinner"; import { useElectronAPI } from "@/hooks/use-platform"; -import { getSupportedExtensionsSet } from "@/lib/supported-extensions"; interface LocalFilesystemBrowserProps { rootPaths: string[]; searchSpaceId: number; + active?: boolean; searchQuery?: string; onOpenFile: (fullPath: string) => void; + expandedFolderKeys?: Set; + onExpandedFolderKeysChange?: (nextExpandedKeys: Set) => void; } interface LocalFolderFileEntry { @@ -39,6 +42,53 @@ type LocalRootMount = { rootPath: string; }; +type MountLoadStatus = "idle" | "loading" | "complete" | "error"; + +const LOCAL_OPENABLE_EXTENSIONS = [ + ".md", + ".markdown", + ".txt", + ".json", + ".yaml", + ".yml", + ".csv", + ".tsv", + ".xml", + ".html", + ".htm", + ".css", + ".scss", + ".sass", + ".sql", + ".toml", + ".ini", + ".conf", + ".log", + ".py", + ".js", + ".jsx", + ".mjs", + ".cjs", + ".ts", + ".tsx", + ".java", + ".kt", + ".kts", + ".go", + ".rs", + ".rb", + ".php", + ".swift", + ".r", + ".lua", + ".sh", + ".bash", + ".zsh", + ".fish", + ".env", + ".mk", +]; + const getFolderDisplayName = (rootPath: string): string => rootPath.split(/[\\/]/).at(-1) || rootPath; @@ -69,24 +119,82 @@ function toMountedVirtualPath(mount: string, relativePath: string): string { return `/${mount}${toVirtualPath(relativePath)}`; } +function getNormalizedExtension(pathValue: string): string { + const fileName = getFileName(pathValue).toLowerCase(); + if (!fileName) return ""; + if (fileName === "dockerfile" || fileName === "makefile") { + return `.${fileName}`; + } + const dotIndex = fileName.lastIndexOf("."); + if (dotIndex <= 0) return ""; + return fileName.slice(dotIndex); +} + export function LocalFilesystemBrowser({ rootPaths, searchSpaceId, + active = true, searchQuery, onOpenFile, + expandedFolderKeys, + onExpandedFolderKeysChange, }: LocalFilesystemBrowserProps) { const electronAPI = useElectronAPI(); const [rootStateMap, setRootStateMap] = useState>({}); - const [expandedFolderKeys, setExpandedFolderKeys] = useState>(new Set()); + const [internalExpandedFolderKeys, setInternalExpandedFolderKeys] = useState>( + new Set() + ); const [mountByRootKey, setMountByRootKey] = useState>(new Map()); - const supportedExtensions = useMemo(() => Array.from(getSupportedExtensionsSet()), []); + const [mountStatus, setMountStatus] = useState("idle"); + const [mountRefreshInFlight, setMountRefreshInFlight] = useState(false); + const [reloadNonceByRoot, setReloadNonceByRoot] = useState>({}); + const lastLoadedSignatureByRootRef = useRef>(new Map()); + const hasLoadedMountsOnceRef = useRef(false); + const hasResolvedAtLeastOneRootRef = useRef(false); + const openableExtensions = useMemo(() => new Set(LOCAL_OPENABLE_EXTENSIONS), []); const isWindowsPlatform = electronAPI?.versions.platform === "win32"; + const effectiveExpandedFolderKeys = expandedFolderKeys ?? internalExpandedFolderKeys; useEffect(() => { - if (!electronAPI?.listFolderFiles) return; + if (!active) return; + if (!electronAPI?.listAgentFilesystemFiles) { + for (const rootPath of rootPaths) { + setRootStateMap((prev) => ({ + ...prev, + [rootPath]: { + loading: false, + error: "Desktop app update required for local mode browsing.", + files: [], + }, + })); + } + return; + } + const rootEntries = rootPaths.map((rootPath) => ({ + rootPath, + rootKey: normalizeRootPathForLookup(rootPath, isWindowsPlatform), + })); + const activeRootKeys = new Set(rootEntries.map((entry) => entry.rootKey)); + for (const key of Array.from(lastLoadedSignatureByRootRef.current.keys())) { + if (!activeRootKeys.has(key)) { + lastLoadedSignatureByRootRef.current.delete(key); + } + } + const rootsToReload = rootEntries.filter(({ rootKey }) => { + const nonce = reloadNonceByRoot[rootKey] ?? 0; + const signature = `${searchSpaceId}:${rootKey}:${nonce}`; + return lastLoadedSignatureByRootRef.current.get(rootKey) !== signature; + }); + if (rootsToReload.length === 0) { + return; + } + for (const { rootKey } of rootsToReload) { + const nonce = reloadNonceByRoot[rootKey] ?? 0; + lastLoadedSignatureByRootRef.current.set(rootKey, `${searchSpaceId}:${rootKey}:${nonce}`); + } let cancelled = false; - for (const rootPath of rootPaths) { + for (const { rootPath } of rootsToReload) { setRootStateMap((prev) => ({ ...prev, [rootPath]: { @@ -98,16 +206,12 @@ export function LocalFilesystemBrowser({ } void Promise.all( - rootPaths.map(async (rootPath) => { + rootsToReload.map(async ({ rootPath }) => { try { - const files = (await electronAPI.listFolderFiles({ - path: rootPath, - name: getFolderDisplayName(rootPath), - excludePatterns: DEFAULT_EXCLUDE_PATTERNS, - fileExtensions: supportedExtensions, - rootFolderId: null, + const files = (await electronAPI.listAgentFilesystemFiles({ + rootPath, searchSpaceId, - active: true, + excludePatterns: DEFAULT_EXCLUDE_PATTERNS, })) as LocalFolderFileEntry[]; if (cancelled) return; setRootStateMap((prev) => ({ @@ -135,32 +239,114 @@ export function LocalFilesystemBrowser({ return () => { cancelled = true; }; - }, [electronAPI, rootPaths, searchSpaceId, supportedExtensions]); + }, [active, electronAPI, isWindowsPlatform, reloadNonceByRoot, rootPaths, searchSpaceId]); + + useEffect(() => { + if (active) return; + lastLoadedSignatureByRootRef.current.clear(); + }, [active]); + + useEffect(() => { + if (!electronAPI?.startAgentFilesystemTreeWatch) return; + if (!electronAPI?.stopAgentFilesystemTreeWatch) return; + if (!electronAPI?.onAgentFilesystemTreeDirty) return; + if (!active) return; + if (rootPaths.length === 0) { + void electronAPI.stopAgentFilesystemTreeWatch(searchSpaceId); + return; + } + + const unsubscribe = electronAPI.onAgentFilesystemTreeDirty( + (event: { + searchSpaceId: number | null; + reason: "watcher_event" | "safety_poll"; + rootPath: string; + changedPath: string | null; + timestamp: number; + }) => { + if ((event.searchSpaceId ?? null) !== (searchSpaceId ?? null)) { + return; + } + const eventRootKey = normalizeRootPathForLookup(event.rootPath, isWindowsPlatform); + const knownRootKeys = new Set( + rootPaths.map((rootPath) => normalizeRootPathForLookup(rootPath, isWindowsPlatform)) + ); + if (!knownRootKeys.has(eventRootKey)) { + setReloadNonceByRoot((prev) => { + const next = { ...prev }; + for (const rootKey of knownRootKeys) { + next[rootKey] = (prev[rootKey] ?? 0) + 1; + } + return next; + }); + return; + } + setReloadNonceByRoot((prev) => ({ + ...prev, + [eventRootKey]: (prev[eventRootKey] ?? 0) + 1, + })); + } + ); + void electronAPI.startAgentFilesystemTreeWatch({ + searchSpaceId, + rootPaths, + excludePatterns: DEFAULT_EXCLUDE_PATTERNS, + }); + + return () => { + unsubscribe(); + void electronAPI.stopAgentFilesystemTreeWatch(searchSpaceId); + }; + }, [active, electronAPI, isWindowsPlatform, rootPaths, searchSpaceId]); useEffect(() => { if (!electronAPI?.getAgentFilesystemMounts) { + setMountStatus("error"); setMountByRootKey(new Map()); return; } + if (rootPaths.length === 0) { + setMountByRootKey(new Map()); + setMountStatus("complete"); + setMountRefreshInFlight(false); + hasLoadedMountsOnceRef.current = true; + return; + } let cancelled = false; + const isInitialMountLoad = !hasLoadedMountsOnceRef.current; + if (isInitialMountLoad) { + setMountStatus("loading"); + } else { + setMountRefreshInFlight(true); + } void electronAPI - .getAgentFilesystemMounts() + .getAgentFilesystemMounts(searchSpaceId) .then((mounts: LocalRootMount[]) => { if (cancelled) return; const next = new Map(); for (const entry of mounts) { - next.set(normalizeRootPathForLookup(entry.rootPath, isWindowsPlatform), entry.mount); + const normalizedRootKey = normalizeRootPathForLookup(entry.rootPath, isWindowsPlatform); + next.set(normalizedRootKey, entry.mount); } setMountByRootKey(next); + setMountStatus("complete"); + hasLoadedMountsOnceRef.current = true; }) .catch(() => { if (cancelled) return; - setMountByRootKey(new Map()); + if (isInitialMountLoad) { + setMountByRootKey(new Map()); + setMountStatus("error"); + } + }) + .finally(() => { + if (cancelled) return; + setMountRefreshInFlight(false); }); return () => { cancelled = true; }; - }, [electronAPI, isWindowsPlatform, rootPaths]); + }, [electronAPI, isWindowsPlatform, rootPaths, searchSpaceId]); const treeByRoot = useMemo(() => { const query = searchQuery?.trim().toLowerCase() ?? ""; @@ -193,21 +379,30 @@ export function LocalFilesystemBrowser({ }); }, [rootPaths, rootStateMap, searchQuery]); - const toggleFolder = useCallback((folderKey: string) => { - setExpandedFolderKeys((prev) => { - const next = new Set(prev); - if (next.has(folderKey)) { - next.delete(folderKey); - } else { - next.add(folderKey); + const toggleFolder = useCallback( + (folderKey: string) => { + const update = (prev: Set) => { + const next = new Set(prev); + if (next.has(folderKey)) { + next.delete(folderKey); + } else { + next.add(folderKey); + } + return next; + }; + if (onExpandedFolderKeysChange) { + onExpandedFolderKeysChange(update(effectiveExpandedFolderKeys)); + return; } - return next; - }); - }, []); + setInternalExpandedFolderKeys(update); + }, + [effectiveExpandedFolderKeys, onExpandedFolderKeysChange] + ); const renderFolder = useCallback( (folder: LocalFolderNode, depth: number, mount: string) => { - const isExpanded = expandedFolderKeys.has(folder.key); + const isExpanded = effectiveExpandedFolderKeys.has(folder.key); + const FolderIcon = isExpanded ? FolderOpen : Folder; const childFolders = Array.from(folder.folders.values()).sort((a, b) => a.name.localeCompare(b.name) ); @@ -226,32 +421,47 @@ export function LocalFilesystemBrowser({ ) : ( )} - + {folder.name} {isExpanded && ( <> {childFolders.map((childFolder) => renderFolder(childFolder, depth + 1, mount))} - {files.map((file) => ( - - ))} + {files.map((file) => { + const extension = getNormalizedExtension(file.relativePath); + const isOpenable = openableExtensions.has(extension); + return ( + + ); + })} )}
); }, - [expandedFolderKeys, onOpenFile, toggleFolder] + [effectiveExpandedFolderKeys, onOpenFile, openableExtensions, toggleFolder] ); if (rootPaths.length === 0) { @@ -265,6 +475,43 @@ export function LocalFilesystemBrowser({ ); } + const allRootsLoaded = rootPaths.every((rootPath) => { + const state = rootStateMap[rootPath]; + return !!state && !state.loading; + }); + const mountsSettled = mountStatus === "complete" || mountStatus === "error"; + if (allRootsLoaded && mountsSettled && rootPaths.length > 0) { + hasResolvedAtLeastOneRootRef.current = true; + } + const showInitialLoading = + !hasResolvedAtLeastOneRootRef.current && (!allRootsLoaded || !mountsSettled); + + if (showInitialLoading) { + const rows = [ + { id: "local-row-1", widthClass: "w-44" }, + { id: "local-row-2", widthClass: "w-32" }, + { id: "local-row-3", widthClass: "w-32" }, + { id: "local-row-4", widthClass: "w-44" }, + { id: "local-row-5", widthClass: "w-32" }, + { id: "local-row-6", widthClass: "w-32" }, + { id: "local-row-7", widthClass: "w-44" }, + { id: "local-row-8", widthClass: "w-32" }, + ]; + + return ( +
+
+ {rows.map((row) => ( +
+ + +
+ ))} +
+
+ ); + } + return (
{treeByRoot.map(({ rootPath, rootNode, matchCount, totalCount }) => { @@ -273,12 +520,11 @@ export function LocalFilesystemBrowser({ const mount = mountByRootKey.get(rootKey); if (!state || state.loading) { return ( -
- - Loading {getFolderDisplayName(rootPath)}... +
+
+ + Loading {getFolderDisplayName(rootPath)}... +
); } @@ -297,11 +543,24 @@ export function LocalFilesystemBrowser({ return (
{mount ? renderFolder(rootNode, 0, mount) : null} - {!mount && ( + {!mount && (mountRefreshInFlight || mountStatus === "loading") && ( +
+
+ + Loading {getFolderDisplayName(rootPath)}... +
+
+ )} + {!mount && mountStatus === "complete" && !mountRefreshInFlight && (
Unable to resolve mounted root for this folder.
)} + {!mount && mountStatus === "error" && ( +
+ Failed to resolve local folder mounts. +
+ )} {isEmpty && (
No supported files found in this folder. diff --git a/surfsense_web/components/layout/ui/tabs/DocumentTabContent.tsx b/surfsense_web/components/layout/ui/tabs/DocumentTabContent.tsx index 59eccd093..77668a93d 100644 --- a/surfsense_web/components/layout/ui/tabs/DocumentTabContent.tsx +++ b/surfsense_web/components/layout/ui/tabs/DocumentTabContent.tsx @@ -1,6 +1,6 @@ "use client"; -import { Download, FileQuestionMark, FileText, Loader2, Pencil, RefreshCw } from "lucide-react"; +import { Download, FileQuestionMark, FileText, Pencil, RefreshCw } from "lucide-react"; import { useRouter } from "next/navigation"; import { useCallback, useEffect, useRef, useState } from "react"; import { toast } from "sonner"; @@ -8,6 +8,7 @@ import { PlateEditor } from "@/components/editor/plate-editor"; import { MarkdownViewer } from "@/components/markdown-viewer"; import { Alert, AlertDescription } from "@/components/ui/alert"; import { Button } from "@/components/ui/button"; +import { Spinner } from "@/components/ui/spinner"; import { authenticatedFetch, getBearerToken, redirectToLogin } from "@/lib/auth-utils"; const LARGE_DOCUMENT_THRESHOLD = 2 * 1024 * 1024; // 2MB @@ -278,7 +279,7 @@ export function DocumentTabContent({ documentId, searchSpaceId, title }: Documen diff --git a/surfsense_web/components/new-chat/model-selector.tsx b/surfsense_web/components/new-chat/model-selector.tsx index 385a16aec..3f5a5fa8c 100644 --- a/surfsense_web/components/new-chat/model-selector.tsx +++ b/surfsense_web/components/new-chat/model-selector.tsx @@ -8,7 +8,7 @@ import { ChevronLeft, ChevronRight, ChevronUp, - Edit3, + Pencil, ImageIcon, Layers, Plus, @@ -320,6 +320,30 @@ export function ModelSelector({ [isMobile] ); + const scrollProviderSidebar = useCallback( + (direction: "backward" | "forward") => { + const el = providerSidebarRef.current; + if (!el) return; + const delta = isMobile + ? Math.max(56, Math.floor(el.clientWidth * 0.5)) + : Math.max(44, Math.floor(el.clientHeight * 0.4)); + + if (isMobile) { + el.scrollBy({ + left: direction === "backward" ? -delta : delta, + behavior: "smooth", + }); + return; + } + + el.scrollBy({ + top: direction === "backward" ? -delta : delta, + behavior: "smooth", + }); + }, + [isMobile] + ); + // Cmd/Ctrl+M shortcut (desktop only) useEffect(() => { if (isMobile) return; @@ -716,17 +740,40 @@ export function ModelSelector({ return (
- {!isMobile && sidebarScrollPos !== "top" && ( -
- + {!isMobile && ( +
+
)} - {isMobile && sidebarScrollPos !== "top" && ( -
+ {isMobile && ( +
)} @@ -802,13 +849,34 @@ export function ModelSelector({ ); })}
- {!isMobile && sidebarScrollPos !== "bottom" && ( -
- + {!isMobile && ( +
+
)} - {isMobile && sidebarScrollPos !== "bottom" && ( -
+ {isMobile && ( +
)} @@ -923,7 +991,7 @@ export function ModelSelector({ className="size-7 rounded-md hover:bg-muted opacity-0 group-hover:opacity-100 transition-opacity" onClick={(e) => handleEditItem(e, item)} > - + )} {isSelected && } diff --git a/surfsense_web/components/public-chat-snapshots/public-chat-snapshot-row.tsx b/surfsense_web/components/public-chat-snapshots/public-chat-snapshot-row.tsx index 55bcc52a9..ce3a83791 100644 --- a/surfsense_web/components/public-chat-snapshots/public-chat-snapshot-row.tsx +++ b/surfsense_web/components/public-chat-snapshots/public-chat-snapshot-row.tsx @@ -79,8 +79,11 @@ export function PublicChatSnapshotRow({ variant="ghost" size="icon" className={cn( - "absolute right-0 h-6 w-6 shrink-0 hover:bg-transparent", - dropdownOpen ? "opacity-100" : "sm:opacity-0 sm:group-hover:opacity-100" + "absolute right-0 h-6 w-6 shrink-0", + "hover:bg-accent", + dropdownOpen + ? "opacity-100 bg-accent hover:bg-accent" + : "sm:opacity-0 sm:group-hover:opacity-100" )} > diff --git a/surfsense_web/components/report-panel/pdf-viewer.tsx b/surfsense_web/components/report-panel/pdf-viewer.tsx index c4980dd7e..77d0f83a6 100644 --- a/surfsense_web/components/report-panel/pdf-viewer.tsx +++ b/surfsense_web/components/report-panel/pdf-viewer.tsx @@ -3,7 +3,7 @@ import { ZoomInIcon, ZoomOutIcon } from "lucide-react"; import type { PDFDocumentProxy, RenderTask } from "pdfjs-dist"; import * as pdfjsLib from "pdfjs-dist"; -import { useCallback, useEffect, useRef, useState } from "react"; +import { type ReactNode, useCallback, useEffect, useRef, useState } from "react"; import { Button } from "@/components/ui/button"; import { Spinner } from "@/components/ui/spinner"; import { getAuthHeaders } from "@/lib/auth-utils"; @@ -16,6 +16,8 @@ pdfjsLib.GlobalWorkerOptions.workerSrc = new URL( interface PdfViewerProps { pdfUrl: string; isPublic?: boolean; + /** Extra actions rendered on the right side of the zoom toolbar (e.g. download, version switcher) */ + toolbarActions?: ReactNode; } interface PageDimensions { @@ -30,7 +32,7 @@ const PAGE_GAP = 12; const SCROLL_DEBOUNCE_MS = 30; const BUFFER_PAGES = 1; -export function PdfViewer({ pdfUrl, isPublic = false }: PdfViewerProps) { +export function PdfViewer({ pdfUrl, isPublic = false, toolbarActions }: PdfViewerProps) { const [numPages, setNumPages] = useState(0); const [scale, setScale] = useState(1); const [loading, setLoading] = useState(true); @@ -286,29 +288,33 @@ export function PdfViewer({ pdfUrl, isPublic = false }: PdfViewerProps) {
{numPages > 0 && (
- - - {Math.round(scale * 100)}% - - + )} diff --git a/surfsense_web/components/report-panel/report-panel.tsx b/surfsense_web/components/report-panel/report-panel.tsx index c7a8509ed..ede63d902 100644 --- a/surfsense_web/components/report-panel/report-panel.tsx +++ b/surfsense_web/components/report-panel/report-panel.tsx @@ -1,7 +1,7 @@ "use client"; import { useAtomValue, useSetAtom } from "jotai"; -import { Check, ChevronDownIcon, Copy, Pencil, XIcon } from "lucide-react"; +import { Check, ChevronDownIcon, Copy, Download, Pencil, XIcon } from "lucide-react"; import dynamic from "next/dynamic"; import { useCallback, useEffect, useRef, useState } from "react"; import { toast } from "sonner"; @@ -309,6 +309,7 @@ export function ReportPanelContent({ const isResume = reportContent?.content_type === "typst"; const showReportEditingTier = !isResume; const hasUnsavedChanges = editedMarkdown !== null; + const showDesktopHeader = !!onClose; const handleCancelEditing = useCallback(() => { setEditedMarkdown(null); @@ -316,153 +317,177 @@ export function ReportPanelContent({ setIsEditing(false); }, []); + const exportButton = !isEditing && ( + <> + {isResume ? ( + + ) : ( + + + + + + + + + )} + + ); + + const versionSwitcher = !isEditing && versions.length > 1 && ( + + + + + + {versions.map((v, i) => ( + setActiveReportId(v.id)} + className={v.id === activeReportId ? "bg-accent font-medium" : ""} + > + Version {i + 1} + + ))} + + + ); + + const copyButton = !isEditing && showReportEditingTier && ( + + ); + + const editingActions = showReportEditingTier && + !isReadOnly && + (isEditing ? ( + <> + + + + ) : ( + + )); + return ( <> - {/* Action bar — always visible; buttons are disabled while loading */} -
-
- {/* Export — plain button for resume (typst), dropdown for others */} - {reportContent?.content_type === "typst" ? ( - - ) : ( - - - - - - - - - )} - - {/* Version switcher — only shown when multiple versions exist */} - {versions.length > 1 && ( - - - - - - {versions.map((v, i) => ( - setActiveReportId(v.id)} - className={v.id === activeReportId ? "bg-accent font-medium" : ""} - > - Version {i + 1} - - ))} - - - )} -
- {onClose && ( - - )} -
- - {showReportEditingTier && ( -
-
-

- {reportContent?.title || title} -

-
-
- {!isEditing && ( - )} - {!isReadOnly && - (isEditing ? ( - <> - - - - ) : ( - - ))}
-
+ + {!isResume && ( +
+
+

+ {reportContent?.title || title} +

+
+
+ {versionSwitcher} + {exportButton} + {copyButton} + {editingActions} +
+
+ )} + + ) : ( + !isResume && ( +
+
+

{reportContent?.title || title}

+
+
+ {versionSwitcher} + {exportButton} + {copyButton} + {editingActions} +
+
+ ) )} {/* Report content — skeleton/error/viewer/editor shown only in this area */} @@ -480,6 +505,12 @@ export function ReportPanelContent({ + {versionSwitcher} + {exportButton} + + } /> ) : reportContent.content ? ( isReadOnly ? ( diff --git a/surfsense_web/components/settings/agent-model-manager.tsx b/surfsense_web/components/settings/agent-model-manager.tsx index f7a2fb824..988befdd0 100644 --- a/surfsense_web/components/settings/agent-model-manager.tsx +++ b/surfsense_web/components/settings/agent-model-manager.tsx @@ -4,10 +4,9 @@ import { useAtomValue } from "jotai"; import { AlertCircle, Dot, - Edit3, FileText, Info, - MessageSquareQuote, + Pencil, RefreshCw, Trash2, } from "lucide-react"; @@ -288,7 +287,7 @@ export function AgentModelManager({ searchSpaceId }: AgentModelManagerProps) { onClick={() => openEditDialog(config)} className="h-7 w-7 rounded-lg text-muted-foreground hover:text-foreground" > - + Edit @@ -323,7 +322,6 @@ export function AgentModelManager({ searchSpaceId }: AgentModelManagerProps) { variant="secondary" className="text-[10px] px-1.5 py-0.5 border-0 text-muted-foreground bg-muted" > - Citations )} diff --git a/surfsense_web/components/settings/image-model-manager.tsx b/surfsense_web/components/settings/image-model-manager.tsx index fb28e5b1c..f5f128f80 100644 --- a/surfsense_web/components/settings/image-model-manager.tsx +++ b/surfsense_web/components/settings/image-model-manager.tsx @@ -1,7 +1,7 @@ "use client"; import { useAtomValue } from "jotai"; -import { AlertCircle, Dot, Edit3, Info, RefreshCw, Trash2 } from "lucide-react"; +import { AlertCircle, Dot, Info, Pencil, RefreshCw, Trash2 } from "lucide-react"; import { useMemo, useState } from "react"; import { deleteImageGenConfigMutationAtom } from "@/atoms/image-gen-config/image-gen-config-mutation.atoms"; import { @@ -116,8 +116,8 @@ export function ImageModelManager({ searchSpaceId }: ImageModelManagerProps) { return (
- {/* Header */} -
+ {/* Header actions */} +
Edit diff --git a/surfsense_web/components/settings/roles-manager.tsx b/surfsense_web/components/settings/roles-manager.tsx index 7f59ecd66..e7dadc20f 100644 --- a/surfsense_web/components/settings/roles-manager.tsx +++ b/surfsense_web/components/settings/roles-manager.tsx @@ -4,21 +4,25 @@ import { useQuery } from "@tanstack/react-query"; import { useAtomValue } from "jotai"; import { Bot, - ChevronDown, - Edit2, + ChevronRight, + ScanEye, + Pencil, FileText, - Globe, + Earth, + Image, Logs, type LucideIcon, - MessageCircle, + MessageCircleReply, MessageSquare, Mic, MoreHorizontal, - Plug, + Unplug, Settings, Shield, + SlidersHorizontal, Trash2, Users, + Video, } from "lucide-react"; import { useCallback, useEffect, useMemo, useState } from "react"; import { toast } from "sonner"; @@ -88,7 +92,7 @@ const CATEGORY_CONFIG: Record< }, comments: { label: "Comments", - icon: MessageCircle, + icon: MessageCircleReply, description: "Add annotations to documents", order: 3, }, @@ -98,6 +102,24 @@ const CATEGORY_CONFIG: Record< description: "Configure AI model settings", order: 4, }, + image_generations: { + label: "Image Models", + icon: Image, + description: "Configure image generation model settings", + order: 4.1, + }, + vision_configs: { + label: "Vision Models", + icon: ScanEye, + description: "Configure vision model settings", + order: 4.2, + }, + video_presentations: { + label: "Video Presentations", + icon: Video, + description: "Generate and manage video presentations", + order: 4.3, + }, podcasts: { label: "Podcasts", icon: Mic, @@ -105,8 +127,8 @@ const CATEGORY_CONFIG: Record< order: 5, }, connectors: { - label: "Integrations", - icon: Plug, + label: "Connectors", + icon: Unplug, description: "Connect external data sources", order: 6, }, @@ -136,10 +158,16 @@ const CATEGORY_CONFIG: Record< }, public_sharing: { label: "Public Chat Sharing", - icon: Globe, + icon: Earth, description: "Share chats publicly via links", order: 11, }, + general: { + label: "General", + icon: SlidersHorizontal, + description: "General search space permissions", + order: 12, + }, }; const ACTION_LABELS: Record = { @@ -434,12 +462,11 @@ function RolesContent({ return (
-
- +
{!role.is_system_role && ( -
+
e.stopPropagation()}>
)} - +
{isExpanded && ( @@ -659,52 +682,30 @@ function PermissionsEditor({ return (
-
- +
e.stopPropagation()} onCheckedChange={() => onToggleCategory(category)} aria-label={`Select all ${config.label} permissions`} /> - +
@@ -726,7 +727,7 @@ function PermissionsEditor({ > diff --git a/surfsense_web/components/settings/vision-model-manager.tsx b/surfsense_web/components/settings/vision-model-manager.tsx index 81528c86a..8abfa4774 100644 --- a/surfsense_web/components/settings/vision-model-manager.tsx +++ b/surfsense_web/components/settings/vision-model-manager.tsx @@ -1,7 +1,7 @@ "use client"; import { useAtomValue } from "jotai"; -import { AlertCircle, Dot, Edit3, Info, RefreshCw, Trash2 } from "lucide-react"; +import { AlertCircle, Dot, Info, Pencil, RefreshCw, Trash2 } from "lucide-react"; import { useMemo, useState } from "react"; import { membersAtom, myAccessAtom } from "@/atoms/members/members-query.atoms"; import { deleteVisionLLMConfigMutationAtom } from "@/atoms/vision-llm-config/vision-llm-config-mutation.atoms"; @@ -121,7 +121,7 @@ export function VisionModelManager({ searchSpaceId }: VisionModelManagerProps) { return (
-
+
Edit diff --git a/surfsense_web/components/sources/DocumentUploadTab.tsx b/surfsense_web/components/sources/DocumentUploadTab.tsx index 42fa72847..3b22c0872 100644 --- a/surfsense_web/components/sources/DocumentUploadTab.tsx +++ b/surfsense_web/components/sources/DocumentUploadTab.tsx @@ -764,22 +764,16 @@ export function DocumentUploadTab({
)} diff --git a/surfsense_web/components/tool-ui/generate-report.tsx b/surfsense_web/components/tool-ui/generate-report.tsx index 32f97b6a4..912028596 100644 --- a/surfsense_web/components/tool-ui/generate-report.tsx +++ b/surfsense_web/components/tool-ui/generate-report.tsx @@ -137,10 +137,9 @@ function ReportCard({ const autoOpenedRef = useRef(false); const [metadata, setMetadata] = useState<{ title: string; - wordCount: number | null; versionLabel: string | null; content: string | null; - }>({ title, wordCount: wordCount ?? null, versionLabel: null, content: null }); + }>({ title, versionLabel: null, content: null }); const [isLoading, setIsLoading] = useState(true); const [error, setError] = useState(null); @@ -169,10 +168,8 @@ function ReportCard({ } } const resolvedTitle = parsed.data.title || title; - const resolvedWordCount = parsed.data.report_metadata?.word_count ?? wordCount ?? null; setMetadata({ title: resolvedTitle, - wordCount: resolvedWordCount, versionLabel, content: parsed.data.content ?? null, }); @@ -182,7 +179,7 @@ function ReportCard({ openPanel({ reportId, title: resolvedTitle, - wordCount: resolvedWordCount ?? undefined, + wordCount: parsed.data.report_metadata?.word_count ?? wordCount ?? undefined, shareToken, }); } @@ -210,7 +207,6 @@ function ReportCard({ openPanel({ reportId, title: metadata.title, - wordCount: metadata.wordCount ?? undefined, shareToken, }); }; @@ -233,10 +229,8 @@ function ReportCard({ ) : ( <> - {metadata.wordCount != null && `${metadata.wordCount.toLocaleString()} words`} - {metadata.wordCount != null && metadata.versionLabel && ( - - )} + Markdown + {metadata.versionLabel && } {metadata.versionLabel} )} diff --git a/surfsense_web/components/tool-ui/generate-resume.tsx b/surfsense_web/components/tool-ui/generate-resume.tsx index 1290a70ea..4e9d06fbb 100644 --- a/surfsense_web/components/tool-ui/generate-resume.tsx +++ b/surfsense_web/components/tool-ui/generate-resume.tsx @@ -2,6 +2,7 @@ import type { ToolCallMessagePartProps } from "@assistant-ui/react"; import { useAtomValue, useSetAtom } from "jotai"; +import { Dot } from "lucide-react"; import { useParams, usePathname } from "next/navigation"; import * as pdfjsLib from "pdfjs-dist"; import { useCallback, useEffect, useRef, useState } from "react"; @@ -9,6 +10,7 @@ import { z } from "zod"; import { openReportPanelAtom, reportPanelAtom } from "@/atoms/chat/report-panel.atom"; import { TextShimmerLoader } from "@/components/prompt-kit/loader"; import { useMediaQuery } from "@/hooks/use-media-query"; +import { baseApiService } from "@/lib/apis/base-api.service"; import { getAuthHeaders } from "@/lib/auth-utils"; pdfjsLib.GlobalWorkerOptions.workerSrc = new URL( @@ -32,6 +34,18 @@ const GenerateResumeResultSchema = z.object({ error: z.string().nullish(), }); +const ResumeVersionsResponseSchema = z.object({ + id: z.number(), + versions: z + .array( + z.object({ + id: z.number(), + created_at: z.string().nullish(), + }) + ) + .nullish(), +}); + type GenerateResumeArgs = z.infer; type GenerateResumeResult = z.infer; @@ -201,6 +215,7 @@ function ResumeCard({ const autoOpenedRef = useRef(false); const [pdfUrl, setPdfUrl] = useState(null); const [thumbState, setThumbState] = useState<"loading" | "ready" | "error">("loading"); + const [versionLabel, setVersionLabel] = useState(null); useEffect(() => { const previewPath = shareToken @@ -219,6 +234,35 @@ function ResumeCard({ } }, [reportId, title, shareToken, autoOpen, isDesktop, openPanel]); + useEffect(() => { + let cancelled = false; + const fetchVersions = async () => { + try { + const url = shareToken + ? `/api/v1/public/${shareToken}/reports/${reportId}/content` + : `/api/v1/reports/${reportId}/content`; + const rawData = await baseApiService.get(url); + if (cancelled) return; + const parsed = ResumeVersionsResponseSchema.safeParse(rawData); + if (parsed.success) { + const versions = parsed.data.versions; + if (versions && versions.length > 1) { + const idx = versions.findIndex((v) => v.id === reportId); + if (idx >= 0) { + setVersionLabel(`version ${idx + 1}`); + } + } + } + } catch { + // silently ignore — version label is non-critical + } + }; + fetchVersions(); + return () => { + cancelled = true; + }; + }, [reportId, shareToken]); + const onThumbLoad = useCallback(() => setThumbState("ready"), []); const onThumbError = useCallback(() => setThumbState("error"), []); @@ -243,8 +287,12 @@ function ResumeCard({ className="w-full text-left transition-colors hover:bg-muted/50 focus:outline-none focus-visible:outline-none cursor-pointer select-none" >
-

{title}

-

PDF

+

{title}

+

+ PDF + {versionLabel && } + {versionLabel} +

diff --git a/surfsense_web/components/ui/tooltip.tsx b/surfsense_web/components/ui/tooltip.tsx index bcf1c72f8..c1469156d 100644 --- a/surfsense_web/components/ui/tooltip.tsx +++ b/surfsense_web/components/ui/tooltip.tsx @@ -6,20 +6,19 @@ import { useEffect, useState } from "react"; import { cn } from "@/lib/utils"; -const MOBILE_BREAKPOINT = 768; - -function useIsTouchDevice() { - const [isTouch, setIsTouch] = useState(false); +function useCanHover() { + const [canHover, setCanHover] = useState(false); useEffect(() => { - const mql = window.matchMedia(`(max-width: ${MOBILE_BREAKPOINT - 1}px)`); - const update = () => setIsTouch(mql.matches); + // Hover-capable pointers are a better cross-platform signal than viewport width. + const mql = window.matchMedia("(hover: hover) and (pointer: fine)"); + const update = () => setCanHover(mql.matches); update(); mql.addEventListener("change", update); return () => mql.removeEventListener("change", update); }, []); - return isTouch; + return canHover; } function TooltipProvider({ @@ -42,14 +41,14 @@ function Tooltip({ onOpenChange, ...props }: React.ComponentProps) { - const isMobile = useIsTouchDevice(); + const canHover = useCanHover(); return ( diff --git a/surfsense_web/content/docs/connectors/index.mdx b/surfsense_web/content/docs/connectors/index.mdx index e3d06aa3c..ef8d214ef 100644 --- a/surfsense_web/content/docs/connectors/index.mdx +++ b/surfsense_web/content/docs/connectors/index.mdx @@ -105,7 +105,7 @@ Connect SurfSense to your favorite tools and services. Browse the available inte /> - This connector requires direct file system access and only works with self-hosted SurfSense installations. - +SurfSense integrates with Obsidian through the SurfSense Obsidian plugin. ## How it works -The Obsidian connector scans your local Obsidian vault directory and indexes all Markdown files. It preserves your note structure and extracts metadata from YAML frontmatter. +The plugin runs inside your Obsidian app and pushes note updates to SurfSense over HTTPS. +This works for cloud and self-hosted deployments, including desktop and mobile clients. -- For follow-up indexing runs, the connector uses content hashing to skip unchanged files for faster sync. -- Indexing should be configured to run periodically, so updates should appear in your search results within minutes. - ---- - -## What Gets Indexed +## What gets indexed | Content Type | Description | |--------------|-------------| -| Markdown Files | All `.md` files in your vault | -| Frontmatter | YAML metadata (title, tags, aliases, dates) | -| Wiki Links | Links between notes (`[[note]]`) | -| Inline Tags | Tags throughout your notes (`#tag`) | -| Note Content | Full content with intelligent chunking | +| Markdown files | Note content (`.md`) | +| Frontmatter | YAML metadata like title, tags, aliases, dates | +| Wiki links | Linked notes (`[[note]]`) | +| Tags | Inline and frontmatter tags | +| Vault metadata | Vault and path metadata used for deep links and sync state | + +## Quick start + +1. Open **Connectors** in SurfSense and choose **Obsidian**. +2. Install the plugin (recommended via BRAT) using the steps below. +3. In Obsidian, open **Settings → SurfSense**. +4. Paste your SurfSense API token from the user settings section. +5. Paste your Server URL in the plugin setting: either your SurfSense main domain (if `/api/v1` rewrites are enabled) or your direct backend URL. +6. Choose the Search Space in the plugin, then the first sync should run automatically. +7. Confirm the connector appears as **Obsidian - <vault>** in SurfSense. + +## Install via BRAT (recommended) + +1. In Obsidian, open **Settings → Community plugins** and install **[BRAT](obsidian://show-plugin?id=obsidian42-brat)**. +2. Open BRAT settings and click **Add beta plugin** button. +3. Paste the repository: `https://github.com/MODSetter/SurfSense/`. +4. Select the latest plugin version, then click "Add plugin". +5. Open **Settings → SurfSense** to finish setup. + +## Migrating from the legacy connector + +If you previously used the legacy Obsidian connector architecture, migrate to the plugin flow: + +1. Delete the old legacy Obsidian connector from SurfSense. +2. Install and configure the SurfSense Obsidian plugin using the quick start above. +3. Run the first plugin sync and verify the new **Obsidian - <vault>** connector is active. - Binary files and attachments are not indexed by default. Enable "Include Attachments" to index embedded files. + Deleting the legacy connector also deletes all documents that were indexed by that connector. Always finish and verify plugin sync before deleting the old connector. ---- - -## Quick Start (Local Installation) - -1. Navigate to **Connectors** → **Add Connector** → **Obsidian** -2. Enter your vault path: `/Users/yourname/Documents/MyVault` -3. Enter a vault name (e.g., `Personal Notes`) -4. Click **Connect Obsidian** - - - Find your vault path: In Obsidian, right-click any note → "Reveal in Finder" (macOS) or "Show in Explorer" (Windows). - - - -Enable periodic sync to automatically re-index notes when content changes. Available frequencies: Every 5 minutes, 15 minutes, hourly, every 6 hours, daily, or weekly. - - ---- - -## Docker Setup - -For Docker deployments, you need to mount your Obsidian vault as a volume. - -### Step 1: Update docker-compose.yml - -Add your vault as a volume mount to the SurfSense backend service: - -```yaml -services: - surfsense: - # ... other config - volumes: - - /path/to/your/obsidian/vault:/app/obsidian_vaults/my-vault:ro -``` - - - The `:ro` flag mounts the vault as read-only, which is recommended for security. - - -### Step 2: Configure the Connector - -Use the **container path** (not your local path) when setting up the connector: - -| Your Local Path | Container Path (use this) | -|-----------------|---------------------------| -| `/Users/john/Documents/MyVault` | `/app/obsidian_vaults/my-vault` | -| `C:\Users\john\Documents\MyVault` | `/app/obsidian_vaults/my-vault` | - -### Example: Multiple Vaults - -```yaml -volumes: - - /Users/john/Documents/PersonalNotes:/app/obsidian_vaults/personal:ro - - /Users/john/Documents/WorkNotes:/app/obsidian_vaults/work:ro -``` - -Then create separate connectors for each vault using `/app/obsidian_vaults/personal` and `/app/obsidian_vaults/work`. - ---- - -## Connector Configuration - -| Field | Description | Required | -|-------|-------------|----------| -| **Connector Name** | A friendly name to identify this connector | Yes | -| **Vault Path** | Absolute path to your vault (container path for Docker) | Yes | -| **Vault Name** | Display name for your vault in search results | Yes | -| **Exclude Folders** | Comma-separated folder names to skip | No | -| **Include Attachments** | Index embedded files (images, PDFs) | No | - ---- - -## Recommended Exclusions - -Common folders to exclude from indexing: - -| Folder | Reason | -|--------|--------| -| `.obsidian` | Obsidian config files (always exclude) | -| `.trash` | Obsidian's trash folder | -| `templates` | Template files you don't want searchable | -| `daily-notes` | If you want to exclude daily notes | -| `attachments` | If not using "Include Attachments" | - -Default exclusions: `.obsidian,.trash` - ---- - ## Troubleshooting -**Vault not found / Permission denied** -- Verify the path exists and is accessible -- For Docker: ensure the volume is mounted correctly in `docker-compose.yml` -- Check file permissions: SurfSense needs read access to the vault directory +**Plugin connects but no files appear** +- Verify the plugin is pointed to the correct Search Space. +- Trigger a manual sync from the plugin settings. +- Confirm your API token is valid and not expired. -**No notes indexed** -- Ensure your vault contains `.md` files -- Check that notes aren't in excluded folders -- Verify the path points to the vault root (contains `.obsidian` folder) +**Self-hosted URL issues** +- Use a public or LAN backend URL that your Obsidian device can reach. +- If your instance is behind TLS, ensure the URL/certificate is valid for the device running Obsidian. -**Changes not appearing** -- Wait for the next sync cycle, or manually trigger re-indexing -- For Docker: restart the container if you modified volume mounts +**Unauthorized / 401 errors** +- Regenerate and paste a fresh API token from SurfSense. +- Ensure the token belongs to the same account and workspace you are syncing into. -**Docker: "path not found" error** -- Use the container path (`/app/obsidian_vaults/...`), not your local path -- Verify the volume mount in `docker-compose.yml` matches +**Cannot reach server URL** +- Check that the backend URL is reachable from the Obsidian device. +- For self-hosted setups, verify firewall and reverse proxy rules. +- Avoid using localhost unless SurfSense and Obsidian run on the same machine. diff --git a/surfsense_web/lib/agent-filesystem.ts b/surfsense_web/lib/agent-filesystem.ts index 91c366d43..da5fc1b1d 100644 --- a/surfsense_web/lib/agent-filesystem.ts +++ b/surfsense_web/lib/agent-filesystem.ts @@ -22,15 +22,17 @@ export function getClientPlatform(): ClientPlatform { return window.electronAPI ? "desktop" : "web"; } -export async function getAgentFilesystemSelection(): Promise { +export async function getAgentFilesystemSelection( + searchSpaceId?: number | null +): Promise { const platform = getClientPlatform(); if (platform !== "desktop" || !window.electronAPI?.getAgentFilesystemSettings) { return { ...DEFAULT_SELECTION, client_platform: platform }; } try { - const settings = await window.electronAPI.getAgentFilesystemSettings(); + const settings = await window.electronAPI.getAgentFilesystemSettings(searchSpaceId); if (settings.mode === "desktop_local_folder") { - const mounts = await window.electronAPI.getAgentFilesystemMounts?.(); + const mounts = await window.electronAPI.getAgentFilesystemMounts?.(searchSpaceId); const localFilesystemMounts = mounts?.map((entry) => ({ mount_id: entry.mount, diff --git a/surfsense_web/lib/apis/connectors-api.service.ts b/surfsense_web/lib/apis/connectors-api.service.ts index f4137c787..a35e731a4 100644 --- a/surfsense_web/lib/apis/connectors-api.service.ts +++ b/surfsense_web/lib/apis/connectors-api.service.ts @@ -427,6 +427,19 @@ class ConnectorsApiService { body: { tool_name: toolName }, }); }; + + /** Live stats for the Obsidian connector tile. */ + getObsidianStats = async (vaultId: string): Promise => { + return baseApiService.get( + `/api/v1/obsidian/stats?vault_id=${encodeURIComponent(vaultId)}` + ); + }; +} + +export interface ObsidianStats { + vault_id: string; + files_synced: number; + last_sync_at: string | null; } export type { SlackChannel, DiscordChannel }; diff --git a/surfsense_web/types/window.d.ts b/surfsense_web/types/window.d.ts index ea55743db..f25d43f5e 100644 --- a/surfsense_web/types/window.d.ts +++ b/surfsense_web/types/window.d.ts @@ -54,6 +54,28 @@ interface AgentFilesystemMount { rootPath: string; } +interface AgentFilesystemListOptions { + rootPath: string; + searchSpaceId?: number | null; + excludePatterns?: string[] | null; + fileExtensions?: string[] | null; +} + +interface AgentFilesystemTreeWatchOptions { + searchSpaceId?: number | null; + rootPaths: string[]; + excludePatterns?: string[] | null; + fileExtensions?: string[] | null; +} + +interface AgentFilesystemTreeDirtyEvent { + searchSpaceId: number | null; + reason: "watcher_event" | "safety_poll"; + rootPath: string; + changedPath: string | null; + timestamp: number; +} + interface LocalTextFileResult { ok: boolean; path: string; @@ -103,8 +125,15 @@ interface ElectronAPI { // Browse files/folders via native dialogs browseFiles: () => Promise; readLocalFiles: (paths: string[]) => Promise; - readAgentLocalFileText: (virtualPath: string) => Promise; - writeAgentLocalFileText: (virtualPath: string, content: string) => Promise; + readAgentLocalFileText: ( + virtualPath: string, + searchSpaceId?: number | null + ) => Promise; + writeAgentLocalFileText: ( + virtualPath: string, + content: string, + searchSpaceId?: number | null + ) => Promise; // Auth token sync across windows getAuthTokens: () => Promise<{ bearer: string; refresh: string } | null>; setAuthTokens: (bearer: string, refresh: string) => Promise; @@ -145,12 +174,23 @@ interface ElectronAPI { platform: string; }>; // Agent filesystem mode - getAgentFilesystemSettings: () => Promise; - getAgentFilesystemMounts: () => Promise; - setAgentFilesystemSettings: (settings: { - mode?: AgentFilesystemMode; - localRootPaths?: string[] | null; - }) => Promise; + getAgentFilesystemSettings: (searchSpaceId?: number | null) => Promise; + getAgentFilesystemMounts: (searchSpaceId?: number | null) => Promise; + listAgentFilesystemFiles: (options: AgentFilesystemListOptions) => Promise; + startAgentFilesystemTreeWatch: ( + options: AgentFilesystemTreeWatchOptions + ) => Promise<{ ok: true }>; + stopAgentFilesystemTreeWatch: (searchSpaceId?: number | null) => Promise<{ ok: true }>; + onAgentFilesystemTreeDirty: ( + callback: (data: AgentFilesystemTreeDirtyEvent) => void + ) => () => void; + setAgentFilesystemSettings: ( + settings: { + mode?: AgentFilesystemMode; + localRootPaths?: string[] | null; + }, + searchSpaceId?: number | null + ) => Promise; pickAgentFilesystemRoot: () => Promise; } diff --git a/versions.json b/versions.json new file mode 100644 index 000000000..9a3c3429d --- /dev/null +++ b/versions.json @@ -0,0 +1,3 @@ +{ + "0.1.0": "1.5.4" +}