From d6e605fd50d6841082602eb08f9449cb6d7abdfc Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Mon, 20 Apr 2026 20:59:17 +0200 Subject: [PATCH 001/113] feat(notion-mcp): add OAuth + PKCE service layer and MCP adapter Implements Notion MCP integration core: - OAuth 2.0 discovery (RFC 9470 + 8414), dynamic client registration, PKCE token exchange, and refresh with rotation - NotionMCPAdapter connecting to mcp.notion.com/mcp with fallback to direct API on known serialization errors - Response parser translating MCP text responses into dicts matching NotionHistoryConnector output format - has_mcp_notion_connector() helper for connector gating --- .../app/services/notion_mcp/__init__.py | 27 ++ .../app/services/notion_mcp/adapter.py | 253 +++++++++++++++ .../app/services/notion_mcp/oauth.py | 298 ++++++++++++++++++ .../services/notion_mcp/response_parser.py | 212 +++++++++++++ 4 files changed, 790 insertions(+) create mode 100644 surfsense_backend/app/services/notion_mcp/__init__.py create mode 100644 surfsense_backend/app/services/notion_mcp/adapter.py create mode 100644 surfsense_backend/app/services/notion_mcp/oauth.py create mode 100644 surfsense_backend/app/services/notion_mcp/response_parser.py diff --git a/surfsense_backend/app/services/notion_mcp/__init__.py b/surfsense_backend/app/services/notion_mcp/__init__.py new file mode 100644 index 000000000..6a57500b6 --- /dev/null +++ b/surfsense_backend/app/services/notion_mcp/__init__.py @@ -0,0 +1,27 @@ +"""Notion MCP integration. + +Routes Notion operations through Notion's hosted MCP server +at https://mcp.notion.com/mcp instead of direct API calls. +""" + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.db import SearchSourceConnector, SearchSourceConnectorType + + +async def has_mcp_notion_connector( + session: AsyncSession, + search_space_id: int, +) -> bool: + """Check whether the search space has at least one MCP-mode Notion connector.""" + result = await session.execute( + select(SearchSourceConnector.id, SearchSourceConnector.config).filter( + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, + ) + ) + for _, config in result.all(): + if isinstance(config, dict) and config.get("mcp_mode"): + return True + return False diff --git a/surfsense_backend/app/services/notion_mcp/adapter.py b/surfsense_backend/app/services/notion_mcp/adapter.py new file mode 100644 index 000000000..76eac6305 --- /dev/null +++ b/surfsense_backend/app/services/notion_mcp/adapter.py @@ -0,0 +1,253 @@ +"""Notion MCP Adapter. + +Connects to Notion's hosted MCP server at ``https://mcp.notion.com/mcp`` +and exposes the same method signatures as ``NotionHistoryConnector``'s +write operations so that tool factories can swap with a one-line change. + +Includes an optional fallback to ``NotionHistoryConnector`` when the MCP +server returns known serialization errors (GitHub issues #215, #216). +""" + +import logging +from datetime import UTC, datetime +from typing import Any + +from mcp import ClientSession +from mcp.client.streamable_http import streamablehttp_client +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.config import config +from app.db import SearchSourceConnector +from app.schemas.notion_auth_credentials import NotionAuthCredentialsBase +from app.utils.oauth_security import TokenEncryption + +from .response_parser import ( + extract_text_from_mcp_response, + is_mcp_serialization_error, + parse_create_page_response, + parse_delete_page_response, + parse_fetch_page_response, + parse_health_check_response, + parse_update_page_response, +) + +logger = logging.getLogger(__name__) + +NOTION_MCP_URL = "https://mcp.notion.com/mcp" + + +class NotionMCPAdapter: + """Routes Notion operations through the hosted MCP server. + + Drop-in replacement for ``NotionHistoryConnector`` write methods. + Returns the same dict structure so KB sync works unchanged. + """ + + def __init__(self, session: AsyncSession, connector_id: int): + self._session = session + self._connector_id = connector_id + self._access_token: str | None = None + + async def _get_valid_token(self) -> str: + """Get a valid MCP access token, refreshing if expired.""" + result = await self._session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == self._connector_id + ) + ) + connector = result.scalars().first() + if not connector: + raise ValueError(f"Connector {self._connector_id} not found") + + cfg = connector.config or {} + + if not cfg.get("mcp_mode"): + raise ValueError( + f"Connector {self._connector_id} is not an MCP connector" + ) + + access_token = cfg.get("access_token") + if not access_token: + raise ValueError("No access token in MCP connector config") + + is_encrypted = cfg.get("_token_encrypted", False) + if is_encrypted and config.SECRET_KEY: + token_encryption = TokenEncryption(config.SECRET_KEY) + access_token = token_encryption.decrypt_token(access_token) + + expires_at_str = cfg.get("expires_at") + if expires_at_str: + expires_at = datetime.fromisoformat(expires_at_str) + if expires_at.tzinfo is None: + expires_at = expires_at.replace(tzinfo=UTC) + if expires_at <= datetime.now(UTC): + from app.routes.notion_mcp_connector_route import refresh_notion_mcp_token + + connector = await refresh_notion_mcp_token(self._session, connector) + cfg = connector.config or {} + access_token = cfg.get("access_token", "") + if is_encrypted and config.SECRET_KEY: + token_encryption = TokenEncryption(config.SECRET_KEY) + access_token = token_encryption.decrypt_token(access_token) + + self._access_token = access_token + return access_token + + async def _call_mcp_tool( + self, tool_name: str, arguments: dict[str, Any] + ) -> str: + """Connect to Notion MCP server and call a tool. Returns raw text.""" + token = await self._get_valid_token() + headers = {"Authorization": f"Bearer {token}"} + + async with ( + streamablehttp_client(NOTION_MCP_URL, headers=headers) as (read, write, _), + ClientSession(read, write) as session, + ): + await session.initialize() + response = await session.call_tool(tool_name, arguments=arguments) + return extract_text_from_mcp_response(response) + + async def _call_with_fallback( + self, + tool_name: str, + arguments: dict[str, Any], + parser, + fallback_method: str | None = None, + fallback_kwargs: dict[str, Any] | None = None, + ) -> dict[str, Any]: + """Call MCP tool, parse response, and fall back on serialization errors.""" + try: + raw_text = await self._call_mcp_tool(tool_name, arguments) + result = parser(raw_text) + + if result.get("mcp_serialization_error") and fallback_method: + logger.warning( + "MCP tool '%s' hit serialization bug, falling back to direct API", + tool_name, + ) + return await self._fallback(fallback_method, fallback_kwargs or {}) + + return result + + except Exception as e: + error_str = str(e) + if is_mcp_serialization_error(error_str) and fallback_method: + logger.warning( + "MCP tool '%s' raised serialization error, falling back: %s", + tool_name, + error_str, + ) + return await self._fallback(fallback_method, fallback_kwargs or {}) + + logger.error("MCP tool '%s' failed: %s", tool_name, e, exc_info=True) + return {"status": "error", "message": f"MCP call failed: {e!s}"} + + async def _fallback( + self, method_name: str, kwargs: dict[str, Any] + ) -> dict[str, Any]: + """Fall back to NotionHistoryConnector for the given method. + + Uses the already-refreshed MCP access token directly with the + Notion SDK, bypassing the connector's config-based token loading. + """ + from app.connectors.notion_history import NotionHistoryConnector + from app.schemas.notion_auth_credentials import NotionAuthCredentialsBase + + token = self._access_token + if not token: + token = await self._get_valid_token() + + connector = NotionHistoryConnector( + session=self._session, + connector_id=self._connector_id, + ) + connector._credentials = NotionAuthCredentialsBase(access_token=token) + connector._using_legacy_token = True + + method = getattr(connector, method_name) + return await method(**kwargs) + + # ------------------------------------------------------------------ + # Public API — same signatures as NotionHistoryConnector + # ------------------------------------------------------------------ + + async def create_page( + self, + title: str, + content: str, + parent_page_id: str | None = None, + ) -> dict[str, Any]: + arguments: dict[str, Any] = { + "pages": [ + { + "title": title, + "content": content, + } + ] + } + if parent_page_id: + arguments["pages"][0]["parent_page_url"] = parent_page_id + + return await self._call_with_fallback( + tool_name="notion-create-pages", + arguments=arguments, + parser=parse_create_page_response, + fallback_method="create_page", + fallback_kwargs={ + "title": title, + "content": content, + "parent_page_id": parent_page_id, + }, + ) + + async def update_page( + self, + page_id: str, + content: str | None = None, + ) -> dict[str, Any]: + arguments: dict[str, Any] = { + "page_id": page_id, + "command": "replace_content", + } + if content: + arguments["new_str"] = content + + return await self._call_with_fallback( + tool_name="notion-update-page", + arguments=arguments, + parser=parse_update_page_response, + fallback_method="update_page", + fallback_kwargs={"page_id": page_id, "content": content}, + ) + + async def delete_page(self, page_id: str) -> dict[str, Any]: + arguments: dict[str, Any] = { + "page_id": page_id, + "command": "update_properties", + "archived": True, + } + + return await self._call_with_fallback( + tool_name="notion-update-page", + arguments=arguments, + parser=parse_delete_page_response, + fallback_method="delete_page", + fallback_kwargs={"page_id": page_id}, + ) + + async def fetch_page(self, page_url_or_id: str) -> dict[str, Any]: + """Fetch page content via ``notion-fetch``.""" + raw_text = await self._call_mcp_tool( + "notion-fetch", {"url": page_url_or_id} + ) + return parse_fetch_page_response(raw_text) + + async def health_check(self) -> dict[str, Any]: + """Check MCP connection via ``notion-get-self``.""" + try: + raw_text = await self._call_mcp_tool("notion-get-self", {}) + return parse_health_check_response(raw_text) + except Exception as e: + return {"status": "error", "message": str(e)} diff --git a/surfsense_backend/app/services/notion_mcp/oauth.py b/surfsense_backend/app/services/notion_mcp/oauth.py new file mode 100644 index 000000000..cfa6ad3e0 --- /dev/null +++ b/surfsense_backend/app/services/notion_mcp/oauth.py @@ -0,0 +1,298 @@ +"""OAuth 2.0 + PKCE utilities for Notion's remote MCP server. + +Implements the flow described in the official guide: +https://developers.notion.com/guides/mcp/build-mcp-client + +Steps: + 1. Discover OAuth metadata (RFC 9470 → RFC 8414) + 2. Dynamic client registration (RFC 7591) + 3. Build authorization URL with PKCE code_challenge + 4. Exchange authorization code + code_verifier for tokens + 5. Refresh access tokens (with refresh-token rotation) + +All functions are stateless — callers (route handlers) manage storage. +""" + +import logging +from dataclasses import dataclass +from datetime import UTC, datetime, timedelta +from typing import Any + +import httpx + +logger = logging.getLogger(__name__) + +NOTION_MCP_SERVER_URL = "https://mcp.notion.com/mcp" +_HTTP_TIMEOUT = 30.0 + + +@dataclass(frozen=True) +class OAuthMetadata: + issuer: str + authorization_endpoint: str + token_endpoint: str + registration_endpoint: str | None + code_challenge_methods_supported: list[str] + + +@dataclass(frozen=True) +class ClientCredentials: + client_id: str + client_secret: str | None = None + client_id_issued_at: int | None = None + client_secret_expires_at: int | None = None + + +@dataclass(frozen=True) +class TokenSet: + access_token: str + refresh_token: str | None + token_type: str + expires_in: int | None + expires_at: datetime | None + scope: str | None + + +# --------------------------------------------------------------------------- +# Step 1 — OAuth discovery +# --------------------------------------------------------------------------- + + +async def discover_oauth_metadata( + mcp_server_url: str = NOTION_MCP_SERVER_URL, +) -> OAuthMetadata: + """Discover OAuth endpoints via RFC 9470 + RFC 8414. + + 1. Fetch protected-resource metadata to find the authorization server. + 2. Fetch authorization-server metadata to get OAuth endpoints. + """ + from urllib.parse import urlparse + + parsed = urlparse(mcp_server_url) + origin = f"{parsed.scheme}://{parsed.netloc}" + path = parsed.path.rstrip("/") + + async with httpx.AsyncClient(timeout=_HTTP_TIMEOUT) as client: + # RFC 9470 — Protected Resource Metadata + # URL format: {origin}/.well-known/oauth-protected-resource{path} + pr_url = f"{origin}/.well-known/oauth-protected-resource{path}" + pr_resp = await client.get(pr_url) + pr_resp.raise_for_status() + pr_data = pr_resp.json() + + auth_servers = pr_data.get("authorization_servers", []) + if not auth_servers: + raise ValueError("No authorization_servers in protected resource metadata") + auth_server_url = auth_servers[0] + + # RFC 8414 — Authorization Server Metadata + as_url = f"{auth_server_url}/.well-known/oauth-authorization-server" + as_resp = await client.get(as_url) + as_resp.raise_for_status() + as_data = as_resp.json() + + if not as_data.get("authorization_endpoint") or not as_data.get("token_endpoint"): + raise ValueError("Missing required OAuth endpoints in server metadata") + + return OAuthMetadata( + issuer=as_data.get("issuer", auth_server_url), + authorization_endpoint=as_data["authorization_endpoint"], + token_endpoint=as_data["token_endpoint"], + registration_endpoint=as_data.get("registration_endpoint"), + code_challenge_methods_supported=as_data.get( + "code_challenge_methods_supported", [] + ), + ) + + +# --------------------------------------------------------------------------- +# Step 2 — Dynamic client registration (RFC 7591) +# --------------------------------------------------------------------------- + + +async def register_client( + metadata: OAuthMetadata, + redirect_uri: str, + client_name: str = "SurfSense", +) -> ClientCredentials: + """Dynamically register an OAuth client with the Notion MCP server.""" + if not metadata.registration_endpoint: + raise ValueError("Server does not support dynamic client registration") + + payload = { + "client_name": client_name, + "redirect_uris": [redirect_uri], + "grant_types": ["authorization_code", "refresh_token"], + "response_types": ["code"], + "token_endpoint_auth_method": "none", + } + + async with httpx.AsyncClient(timeout=_HTTP_TIMEOUT) as client: + resp = await client.post( + metadata.registration_endpoint, + json=payload, + headers={"Content-Type": "application/json", "Accept": "application/json"}, + ) + if not resp.is_success: + logger.error( + "Dynamic client registration failed (%s): %s", + resp.status_code, + resp.text, + ) + resp.raise_for_status() + data = resp.json() + + return ClientCredentials( + client_id=data["client_id"], + client_secret=data.get("client_secret"), + client_id_issued_at=data.get("client_id_issued_at"), + client_secret_expires_at=data.get("client_secret_expires_at"), + ) + + +# --------------------------------------------------------------------------- +# Step 3 — Build authorization URL +# --------------------------------------------------------------------------- + + +def build_authorization_url( + metadata: OAuthMetadata, + client_id: str, + redirect_uri: str, + code_challenge: str, + state: str, +) -> str: + """Build the OAuth authorization URL with PKCE parameters.""" + from urllib.parse import urlencode + + params = { + "response_type": "code", + "client_id": client_id, + "redirect_uri": redirect_uri, + "code_challenge": code_challenge, + "code_challenge_method": "S256", + "state": state, + "prompt": "consent", + } + return f"{metadata.authorization_endpoint}?{urlencode(params)}" + + +# --------------------------------------------------------------------------- +# Step 4 — Exchange authorization code for tokens +# --------------------------------------------------------------------------- + + +async def exchange_code_for_tokens( + code: str, + code_verifier: str, + metadata: OAuthMetadata, + client_id: str, + redirect_uri: str, + client_secret: str | None = None, +) -> TokenSet: + """Exchange an authorization code + PKCE verifier for tokens.""" + form_data: dict[str, Any] = { + "grant_type": "authorization_code", + "code": code, + "client_id": client_id, + "redirect_uri": redirect_uri, + "code_verifier": code_verifier, + } + if client_secret: + form_data["client_secret"] = client_secret + + async with httpx.AsyncClient(timeout=_HTTP_TIMEOUT) as client: + resp = await client.post( + metadata.token_endpoint, + data=form_data, + headers={ + "Content-Type": "application/x-www-form-urlencoded", + "Accept": "application/json", + }, + ) + if not resp.is_success: + body = resp.text + raise ValueError(f"Token exchange failed ({resp.status_code}): {body}") + tokens = resp.json() + + if not tokens.get("access_token"): + raise ValueError("No access_token in token response") + + expires_at = None + if tokens.get("expires_in"): + expires_at = datetime.now(UTC) + timedelta(seconds=int(tokens["expires_in"])) + + return TokenSet( + access_token=tokens["access_token"], + refresh_token=tokens.get("refresh_token"), + token_type=tokens.get("token_type", "Bearer"), + expires_in=tokens.get("expires_in"), + expires_at=expires_at, + scope=tokens.get("scope"), + ) + + +# --------------------------------------------------------------------------- +# Step 5 — Refresh access token +# --------------------------------------------------------------------------- + + +async def refresh_access_token( + refresh_token: str, + metadata: OAuthMetadata, + client_id: str, + client_secret: str | None = None, +) -> TokenSet: + """Refresh an access token. + + Notion MCP uses refresh-token rotation: each refresh returns a new + refresh_token and invalidates the old one. Callers MUST persist the + new refresh_token atomically with the new access_token. + """ + form_data: dict[str, Any] = { + "grant_type": "refresh_token", + "refresh_token": refresh_token, + "client_id": client_id, + } + if client_secret: + form_data["client_secret"] = client_secret + + async with httpx.AsyncClient(timeout=_HTTP_TIMEOUT) as client: + resp = await client.post( + metadata.token_endpoint, + data=form_data, + headers={ + "Content-Type": "application/x-www-form-urlencoded", + "Accept": "application/json", + }, + ) + + if not resp.is_success: + body = resp.text + try: + error_data = resp.json() + error_code = error_data.get("error", "") + if error_code == "invalid_grant": + raise ValueError("REAUTH_REQUIRED") + except ValueError: + if "REAUTH_REQUIRED" in str(resp.text) or resp.status_code == 401: + raise + raise ValueError(f"Token refresh failed ({resp.status_code}): {body}") + + tokens = resp.json() + + if not tokens.get("access_token"): + raise ValueError("No access_token in refresh response") + + expires_at = None + if tokens.get("expires_in"): + expires_at = datetime.now(UTC) + timedelta(seconds=int(tokens["expires_in"])) + + return TokenSet( + access_token=tokens["access_token"], + refresh_token=tokens.get("refresh_token"), + token_type=tokens.get("token_type", "Bearer"), + expires_in=tokens.get("expires_in"), + expires_at=expires_at, + scope=tokens.get("scope"), + ) diff --git a/surfsense_backend/app/services/notion_mcp/response_parser.py b/surfsense_backend/app/services/notion_mcp/response_parser.py new file mode 100644 index 000000000..34d5ef332 --- /dev/null +++ b/surfsense_backend/app/services/notion_mcp/response_parser.py @@ -0,0 +1,212 @@ +"""Parse Notion MCP tool responses into structured dicts. + +The Notion MCP server returns responses as MCP TextContent where the +``text`` field contains JSON-stringified Notion API response data. +See: https://deepwiki.com/makenotion/notion-mcp-server/4.3-request-and-response-handling + +This module extracts that JSON and normalises it into the same dict +format that ``NotionHistoryConnector`` methods return, so downstream +code (KB sync, tool factories) works unchanged. +""" + +import json +import logging +from typing import Any + +logger = logging.getLogger(__name__) + +MCP_SERIALIZATION_ERROR_MARKERS = [ + "Expected array, received string", + "Expected object, received string", + "should be defined, instead was `undefined`", +] + + +def is_mcp_serialization_error(text: str) -> bool: + """Return True if the MCP error text matches a known serialization bug.""" + return any(marker in text for marker in MCP_SERIALIZATION_ERROR_MARKERS) + + +def extract_text_from_mcp_response(response) -> str: + """Pull the concatenated text out of an MCP ``CallToolResult``. + + Args: + response: The ``CallToolResult`` returned by ``session.call_tool()``. + + Returns: + Concatenated text content from the response. + """ + parts: list[str] = [] + for content in response.content: + if hasattr(content, "text"): + parts.append(content.text) + elif hasattr(content, "data"): + parts.append(str(content.data)) + else: + parts.append(str(content)) + return "\n".join(parts) if parts else "" + + +def _try_parse_json(text: str) -> dict[str, Any] | None: + """Attempt to parse *text* as JSON, returning None on failure.""" + try: + parsed = json.loads(text) + if isinstance(parsed, dict): + return parsed + except (json.JSONDecodeError, TypeError): + pass + return None + + +def _extract_page_title(page_data: dict[str, Any]) -> str: + """Best-effort extraction of the page title from a Notion page object.""" + props = page_data.get("properties", {}) + for prop in props.values(): + if prop.get("type") == "title": + title_parts = prop.get("title", []) + if title_parts: + return " ".join(t.get("plain_text", "") for t in title_parts) + return page_data.get("id", "Untitled") + + +def parse_create_page_response(raw_text: str) -> dict[str, Any]: + """Parse a ``notion-create-pages`` MCP response. + + Returns a dict compatible with ``NotionHistoryConnector.create_page()``: + ``{status, page_id, url, title, message}`` + """ + data = _try_parse_json(raw_text) + + if data is None: + if is_mcp_serialization_error(raw_text): + return { + "status": "mcp_error", + "message": raw_text, + "mcp_serialization_error": True, + } + return {"status": "error", "message": f"Unexpected MCP response: {raw_text[:500]}"} + + if data.get("status") == "error" or "error" in data: + return { + "status": "error", + "message": data.get("message", data.get("error", str(data))), + } + + page_id = data.get("id", "") + url = data.get("url", "") + title = _extract_page_title(data) + + return { + "status": "success", + "page_id": page_id, + "url": url, + "title": title, + "message": f"Created Notion page '{title}'", + } + + +def parse_update_page_response(raw_text: str) -> dict[str, Any]: + """Parse a ``notion-update-page`` MCP response. + + Returns a dict compatible with ``NotionHistoryConnector.update_page()``: + ``{status, page_id, url, title, message}`` + """ + data = _try_parse_json(raw_text) + + if data is None: + if is_mcp_serialization_error(raw_text): + return { + "status": "mcp_error", + "message": raw_text, + "mcp_serialization_error": True, + } + return {"status": "error", "message": f"Unexpected MCP response: {raw_text[:500]}"} + + if data.get("status") == "error" or "error" in data: + return { + "status": "error", + "message": data.get("message", data.get("error", str(data))), + } + + page_id = data.get("id", "") + url = data.get("url", "") + title = _extract_page_title(data) + + return { + "status": "success", + "page_id": page_id, + "url": url, + "title": title, + "message": f"Updated Notion page '{title}' (content appended)", + } + + +def parse_delete_page_response(raw_text: str) -> dict[str, Any]: + """Parse an archive (delete) MCP response. + + The Notion API responds to ``pages.update(archived=True)`` with + the archived page object. + + Returns a dict compatible with ``NotionHistoryConnector.delete_page()``: + ``{status, page_id, message}`` + """ + data = _try_parse_json(raw_text) + + if data is None: + if is_mcp_serialization_error(raw_text): + return { + "status": "mcp_error", + "message": raw_text, + "mcp_serialization_error": True, + } + return {"status": "error", "message": f"Unexpected MCP response: {raw_text[:500]}"} + + if data.get("status") == "error" or "error" in data: + return { + "status": "error", + "message": data.get("message", data.get("error", str(data))), + } + + page_id = data.get("id", "") + title = _extract_page_title(data) + + return { + "status": "success", + "page_id": page_id, + "message": f"Deleted Notion page '{title}'", + } + + +def parse_fetch_page_response(raw_text: str) -> dict[str, Any]: + """Parse a ``notion-fetch`` MCP response. + + Returns the raw parsed dict (Notion page/block data) or an error dict. + """ + data = _try_parse_json(raw_text) + + if data is None: + return {"status": "error", "message": f"Unexpected MCP response: {raw_text[:500]}"} + + if data.get("status") == "error" or "error" in data: + return { + "status": "error", + "message": data.get("message", data.get("error", str(data))), + } + + return {"status": "success", "data": data} + + +def parse_health_check_response(raw_text: str) -> dict[str, Any]: + """Parse a ``notion-get-self`` MCP response for health checking.""" + data = _try_parse_json(raw_text) + + if data is None: + return {"status": "error", "message": raw_text[:500]} + + if data.get("status") == "error" or "error" in data: + return { + "status": "error", + "message": data.get("message", data.get("error", str(data))), + } + + return {"status": "success", "data": data} From 41d547934dc41ac8d965f7a9ce4767fd4f9d249a Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Mon, 20 Apr 2026 21:02:05 +0200 Subject: [PATCH 002/113] feat(notion-mcp): add MCP connector OAuth routes --- surfsense_backend/app/routes/__init__.py | 2 + .../app/routes/notion_mcp_connector_route.py | 486 ++++++++++++++++++ 2 files changed, 488 insertions(+) create mode 100644 surfsense_backend/app/routes/notion_mcp_connector_route.py diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py index ad40666cd..faec7fe09 100644 --- a/surfsense_backend/app/routes/__init__.py +++ b/surfsense_backend/app/routes/__init__.py @@ -37,6 +37,7 @@ from .new_llm_config_routes import router as new_llm_config_router from .notes_routes import router as notes_router from .notifications_routes import router as notifications_router from .notion_add_connector_route import router as notion_add_connector_router +from .notion_mcp_connector_route import router as notion_mcp_connector_router from .onedrive_add_connector_route import router as onedrive_add_connector_router from .podcasts_routes import router as podcasts_router from .prompts_routes import router as prompts_router @@ -81,6 +82,7 @@ router.include_router(airtable_add_connector_router) router.include_router(linear_add_connector_router) router.include_router(luma_add_connector_router) router.include_router(notion_add_connector_router) +router.include_router(notion_mcp_connector_router) router.include_router(slack_add_connector_router) router.include_router(teams_add_connector_router) router.include_router(onedrive_add_connector_router) diff --git a/surfsense_backend/app/routes/notion_mcp_connector_route.py b/surfsense_backend/app/routes/notion_mcp_connector_route.py new file mode 100644 index 000000000..b9305cd74 --- /dev/null +++ b/surfsense_backend/app/routes/notion_mcp_connector_route.py @@ -0,0 +1,486 @@ +"""Notion MCP Connector OAuth Routes. + +Handles OAuth 2.0 + PKCE authentication for Notion's hosted MCP server. +Based on: https://developers.notion.com/guides/mcp/build-mcp-client + +This creates connectors with the same ``NOTION_CONNECTOR`` type as the +existing direct-API connector, but with ``mcp_mode: True`` in the config +so the adapter layer knows to route through MCP. +""" + +import logging +from uuid import UUID + +from fastapi import APIRouter, Depends, HTTPException, Request +from fastapi.responses import RedirectResponse +from sqlalchemy import select +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm.attributes import flag_modified + +from app.config import config +from app.db import ( + SearchSourceConnector, + SearchSourceConnectorType, + User, + get_async_session, +) +from app.services.notion_mcp.oauth import ( + ClientCredentials, + OAuthMetadata, + build_authorization_url, + discover_oauth_metadata, + exchange_code_for_tokens, + refresh_access_token, + register_client, +) +from app.users import current_active_user +from app.utils.connector_naming import ( + check_duplicate_connector, + extract_identifier_from_credentials, + generate_unique_connector_name, +) +from app.utils.oauth_security import OAuthStateManager, TokenEncryption, generate_pkce_pair + +logger = logging.getLogger(__name__) + +router = APIRouter() + +_state_manager: OAuthStateManager | None = None +_token_encryption: TokenEncryption | None = None +_oauth_metadata: OAuthMetadata | None = None + + +def _get_state_manager() -> OAuthStateManager: + global _state_manager + if _state_manager is None: + if not config.SECRET_KEY: + raise ValueError("SECRET_KEY must be set for OAuth security") + _state_manager = OAuthStateManager(config.SECRET_KEY) + return _state_manager + + +def _get_token_encryption() -> TokenEncryption: + global _token_encryption + if _token_encryption is None: + if not config.SECRET_KEY: + raise ValueError("SECRET_KEY must be set for token encryption") + _token_encryption = TokenEncryption(config.SECRET_KEY) + return _token_encryption + + +async def _get_oauth_metadata() -> OAuthMetadata: + global _oauth_metadata + if _oauth_metadata is None: + _oauth_metadata = await discover_oauth_metadata() + return _oauth_metadata + + +async def _fetch_workspace_info(access_token: str) -> dict: + """Fetch workspace metadata using the Notion API with the fresh token. + + The ``/v1/users/me`` endpoint returns bot info including workspace_name. + This populates connector config fields so naming and metadata services + work correctly. + """ + try: + import httpx + + async with httpx.AsyncClient(timeout=15.0) as client: + resp = await client.get( + "https://api.notion.com/v1/users/me", + headers={ + "Authorization": f"Bearer {access_token}", + "Notion-Version": "2022-06-28", + }, + ) + if resp.is_success: + data = resp.json() + bot_info = data.get("bot", {}) + return { + "bot_id": data.get("id"), + "workspace_name": bot_info.get("workspace_name", "Notion Workspace"), + "workspace_icon": data.get("avatar_url") or "📄", + } + except Exception as e: + logger.warning("Failed to fetch workspace info: %s", e) + return {} + + +NOTION_MCP_REDIRECT_URI = None + + +def _get_redirect_uri() -> str: + global NOTION_MCP_REDIRECT_URI + if NOTION_MCP_REDIRECT_URI is None: + backend = config.BACKEND_URL or "http://localhost:8000" + NOTION_MCP_REDIRECT_URI = f"{backend}/api/v1/auth/notion-mcp/connector/callback" + return NOTION_MCP_REDIRECT_URI + + +# --------------------------------------------------------------------------- +# Route: initiate OAuth +# --------------------------------------------------------------------------- + + +@router.get("/auth/notion-mcp/connector/add") +async def connect_notion_mcp( + space_id: int, + user: User = Depends(current_active_user), +): + """Initiate Notion MCP OAuth + PKCE flow.""" + if not config.SECRET_KEY: + raise HTTPException(status_code=500, detail="SECRET_KEY not configured.") + + try: + metadata = await _get_oauth_metadata() + + redirect_uri = _get_redirect_uri() + credentials = await register_client(metadata, redirect_uri) + + code_verifier, code_challenge = generate_pkce_pair() + + state_manager = _get_state_manager() + state_encoded = state_manager.generate_secure_state( + space_id, + user.id, + code_verifier=code_verifier, + mcp_client_id=credentials.client_id, + mcp_client_secret=credentials.client_secret or "", + ) + + auth_url = build_authorization_url( + metadata=metadata, + client_id=credentials.client_id, + redirect_uri=redirect_uri, + code_challenge=code_challenge, + state=state_encoded, + ) + + logger.info("Generated Notion MCP OAuth URL for user %s, space %s", user.id, space_id) + return {"auth_url": auth_url} + + except Exception as e: + logger.error("Failed to initiate Notion MCP OAuth: %s", e, exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to initiate Notion MCP OAuth: {e!s}" + ) from e + + +# --------------------------------------------------------------------------- +# Route: re-authenticate existing connector +# --------------------------------------------------------------------------- + + +@router.get("/auth/notion-mcp/connector/reauth") +async def reauth_notion_mcp( + space_id: int, + connector_id: int, + return_url: str | None = None, + user: User = Depends(current_active_user), + session: AsyncSession = Depends(get_async_session), +): + """Initiate re-authentication for an existing Notion MCP connector.""" + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == connector_id, + SearchSourceConnector.user_id == user.id, + SearchSourceConnector.search_space_id == space_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, + ) + ) + connector = result.scalars().first() + if not connector: + raise HTTPException(status_code=404, detail="Connector not found or access denied") + + if not config.SECRET_KEY: + raise HTTPException(status_code=500, detail="SECRET_KEY not configured.") + + try: + metadata = await _get_oauth_metadata() + redirect_uri = _get_redirect_uri() + credentials = await register_client(metadata, redirect_uri) + + code_verifier, code_challenge = generate_pkce_pair() + + extra: dict = { + "connector_id": connector_id, + "code_verifier": code_verifier, + "mcp_client_id": credentials.client_id, + "mcp_client_secret": credentials.client_secret or "", + } + if return_url and return_url.startswith("/"): + extra["return_url"] = return_url + + state_manager = _get_state_manager() + state_encoded = state_manager.generate_secure_state(space_id, user.id, **extra) + + auth_url = build_authorization_url( + metadata=metadata, + client_id=credentials.client_id, + redirect_uri=redirect_uri, + code_challenge=code_challenge, + state=state_encoded, + ) + + logger.info("Initiating Notion MCP re-auth for user %s, connector %s", user.id, connector_id) + return {"auth_url": auth_url} + + except HTTPException: + raise + except Exception as e: + logger.error("Failed to initiate Notion MCP re-auth: %s", e, exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to initiate Notion MCP re-auth: {e!s}" + ) from e + + +# --------------------------------------------------------------------------- +# Route: OAuth callback +# --------------------------------------------------------------------------- + + +@router.get("/auth/notion-mcp/connector/callback") +async def notion_mcp_callback( + request: Request, + code: str | None = None, + error: str | None = None, + state: str | None = None, + session: AsyncSession = Depends(get_async_session), +): + """Handle the OAuth callback from Notion's MCP authorization server.""" + if error: + logger.warning("Notion MCP OAuth error: %s", error) + space_id = None + if state: + try: + data = _get_state_manager().validate_state(state) + space_id = data.get("space_id") + except Exception: + pass + if space_id: + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?error=notion_mcp_oauth_denied" + ) + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=notion_mcp_oauth_denied" + ) + + if not code: + raise HTTPException(status_code=400, detail="Missing authorization code") + if not state: + raise HTTPException(status_code=400, detail="Missing state parameter") + + state_manager = _get_state_manager() + try: + data = state_manager.validate_state(state) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=400, detail=f"Invalid state: {e!s}") from e + + user_id = UUID(data["user_id"]) + space_id = data["space_id"] + code_verifier = data.get("code_verifier") + mcp_client_id = data.get("mcp_client_id") + mcp_client_secret = data.get("mcp_client_secret") or None + + if not code_verifier or not mcp_client_id: + raise HTTPException(status_code=400, detail="Missing PKCE or client data in state") + + try: + metadata = await _get_oauth_metadata() + redirect_uri = _get_redirect_uri() + + token_set = await exchange_code_for_tokens( + code=code, + code_verifier=code_verifier, + metadata=metadata, + client_id=mcp_client_id, + redirect_uri=redirect_uri, + client_secret=mcp_client_secret, + ) + except Exception as e: + logger.error("Notion MCP token exchange failed: %s", e, exc_info=True) + raise HTTPException(status_code=400, detail=f"Token exchange failed: {e!s}") from e + + token_encryption = _get_token_encryption() + + workspace_info = await _fetch_workspace_info(token_set.access_token) + + connector_config = { + "access_token": token_encryption.encrypt_token(token_set.access_token), + "refresh_token": token_encryption.encrypt_token(token_set.refresh_token) + if token_set.refresh_token + else None, + "expires_in": token_set.expires_in, + "expires_at": token_set.expires_at.isoformat() if token_set.expires_at else None, + "workspace_id": workspace_info.get("workspace_id"), + "workspace_name": workspace_info.get("workspace_name", "Notion Workspace"), + "workspace_icon": workspace_info.get("workspace_icon", "📄"), + "bot_id": workspace_info.get("bot_id"), + "mcp_mode": True, + "mcp_client_id": mcp_client_id, + "mcp_client_secret": token_encryption.encrypt_token(mcp_client_secret) + if mcp_client_secret + else None, + "_token_encrypted": True, + } + + reauth_connector_id = data.get("connector_id") + reauth_return_url = data.get("return_url") + + # --- Re-auth path --- + if reauth_connector_id: + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == reauth_connector_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.search_space_id == space_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, + ) + ) + db_connector = result.scalars().first() + if not db_connector: + raise HTTPException(status_code=404, detail="Connector not found during re-auth") + + db_connector.config = connector_config + flag_modified(db_connector, "config") + await session.commit() + await session.refresh(db_connector) + + logger.info("Re-authenticated Notion MCP connector %s for user %s", db_connector.id, user_id) + if reauth_return_url and reauth_return_url.startswith("/"): + return RedirectResponse(url=f"{config.NEXT_FRONTEND_URL}{reauth_return_url}") + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?success=true&connector=notion-connector&connectorId={db_connector.id}" + ) + + # --- New connector path --- + connector_identifier = extract_identifier_from_credentials( + SearchSourceConnectorType.NOTION_CONNECTOR, connector_config + ) + + is_duplicate = await check_duplicate_connector( + session, + SearchSourceConnectorType.NOTION_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + if is_duplicate: + logger.warning("Duplicate Notion MCP connector for user %s", user_id) + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?error=duplicate_account&connector=notion-connector" + ) + + connector_name = await generate_unique_connector_name( + session, + SearchSourceConnectorType.NOTION_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + + new_connector = SearchSourceConnector( + name=connector_name, + connector_type=SearchSourceConnectorType.NOTION_CONNECTOR, + is_indexable=True, + config=connector_config, + search_space_id=space_id, + user_id=user_id, + ) + session.add(new_connector) + + try: + await session.commit() + logger.info("Created Notion MCP connector for user %s in space %s", user_id, space_id) + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?success=true&connector=notion-connector&connectorId={new_connector.id}" + ) + except IntegrityError as e: + await session.rollback() + raise HTTPException(status_code=409, detail=f"Database integrity error: {e!s}") from e + except Exception as e: + await session.rollback() + raise HTTPException( + status_code=500, detail=f"Failed to create connector: {e!s}" + ) from e + + +# --------------------------------------------------------------------------- +# Token refresh helper (used by the adapter) +# --------------------------------------------------------------------------- + + +async def refresh_notion_mcp_token( + session: AsyncSession, + connector: SearchSourceConnector, +) -> SearchSourceConnector: + """Refresh the MCP access token for a connector. + + Handles refresh-token rotation: persists both new access_token + and new refresh_token atomically. + """ + token_encryption = _get_token_encryption() + + cfg = connector.config or {} + encrypted_refresh = cfg.get("refresh_token") + if not encrypted_refresh: + raise HTTPException(status_code=400, detail="No refresh token available. Please re-authenticate.") + + try: + refresh_token = token_encryption.decrypt_token(encrypted_refresh) + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to decrypt refresh token: {e!s}") from e + + mcp_client_id = cfg.get("mcp_client_id") + mcp_client_secret_encrypted = cfg.get("mcp_client_secret") + mcp_client_secret = ( + token_encryption.decrypt_token(mcp_client_secret_encrypted) + if mcp_client_secret_encrypted + else None + ) + + if not mcp_client_id: + raise HTTPException(status_code=400, detail="Missing MCP client_id. Please re-authenticate.") + + metadata = await _get_oauth_metadata() + + try: + token_set = await refresh_access_token( + refresh_token=refresh_token, + metadata=metadata, + client_id=mcp_client_id, + client_secret=mcp_client_secret, + ) + except ValueError as e: + if "REAUTH_REQUIRED" in str(e): + connector.config = {**connector.config, "auth_expired": True} + flag_modified(connector, "config") + await session.commit() + await session.refresh(connector) + raise HTTPException( + status_code=401, detail="Notion MCP authentication expired. Please re-authenticate." + ) from e + raise HTTPException(status_code=400, detail=f"Token refresh failed: {e!s}") from e + + updated_config = { + **connector.config, + "access_token": token_encryption.encrypt_token(token_set.access_token), + "refresh_token": token_encryption.encrypt_token(token_set.refresh_token) + if token_set.refresh_token + else connector.config.get("refresh_token"), + "expires_in": token_set.expires_in, + "expires_at": token_set.expires_at.isoformat() if token_set.expires_at else None, + "_token_encrypted": True, + } + updated_config.pop("auth_expired", None) + + connector.config = updated_config + flag_modified(connector, "config") + await session.commit() + await session.refresh(connector) + + logger.info("Refreshed Notion MCP token for connector %s", connector.id) + return connector From 8d438f52f56ec7db3c64c29b0de5437926ba1a93 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Mon, 20 Apr 2026 21:02:10 +0200 Subject: [PATCH 003/113] feat(notion-mcp): add MCP agent tool factories and registry wiring --- .../new_chat/tools/notion_mcp/__init__.py | 5 + .../new_chat/tools/notion_mcp/create_page.py | 205 ++++++++++++++++++ .../new_chat/tools/notion_mcp/delete_page.py | 173 +++++++++++++++ .../new_chat/tools/notion_mcp/update_page.py | 179 +++++++++++++++ .../app/agents/new_chat/tools/registry.py | 39 ++++ 5 files changed, 601 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/tools/notion_mcp/__init__.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/notion_mcp/create_page.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/notion_mcp/delete_page.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/notion_mcp/update_page.py diff --git a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/__init__.py b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/__init__.py new file mode 100644 index 000000000..1e1515bfb --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/__init__.py @@ -0,0 +1,5 @@ +"""MCP-backed Notion tool factories. + +Drop-in replacements for ``tools/notion/`` that route through +Notion's hosted MCP server instead of direct API calls. +""" diff --git a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/create_page.py b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/create_page.py new file mode 100644 index 000000000..a73363a65 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/create_page.py @@ -0,0 +1,205 @@ +import logging +from typing import Any + +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from app.agents.new_chat.tools.hitl import request_approval +from app.services.notion import NotionToolMetadataService + +logger = logging.getLogger(__name__) + + +def _find_mcp_connector(connectors): + """Return the first connector with mcp_mode enabled, or None.""" + for c in connectors: + if (c.config or {}).get("mcp_mode"): + return c + return None + + +def create_create_notion_page_mcp_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, + connector_id: int | None = None, +): + @tool + async def create_notion_page( + title: str, + content: str | None = None, + ) -> dict[str, Any]: + """Create a new page in Notion with the given title and content. + + Use this tool when the user asks you to create, save, or publish + something to Notion. The page will be created in the user's + configured Notion workspace. The user MUST specify a topic before you + call this tool. If the request does not contain a topic (e.g. "create a + notion page"), ask what the page should be about. Never call this tool + without a clear topic from the user. + + Args: + title: The title of the Notion page. + content: Optional markdown content for the page body (supports headings, lists, paragraphs). + Generate this yourself based on the user's topic. + + Returns: + Dictionary with: + - status: "success", "rejected", or "error" + - page_id: Created page ID (if success) + - url: URL to the created page (if success) + - title: Page title (if success) + - message: Result message + + IMPORTANT: If status is "rejected", the user explicitly declined the action. + Respond with a brief acknowledgment (e.g., "Understood, I didn't create the page.") + and move on. Do NOT troubleshoot or suggest alternatives. + + Examples: + - "Create a Notion page about our Q2 roadmap" + - "Save a summary of today's discussion to Notion" + """ + logger.info("create_notion_page (MCP) called: title='%s'", title) + + if db_session is None or search_space_id is None or user_id is None: + logger.error("Notion MCP tool not properly configured - missing required parameters") + return { + "status": "error", + "message": "Notion tool not properly configured. Please contact support.", + } + + try: + metadata_service = NotionToolMetadataService(db_session) + context = await metadata_service.get_creation_context(search_space_id, user_id) + + if "error" in context: + logger.error("Failed to fetch creation context: %s", context["error"]) + return {"status": "error", "message": context["error"]} + + accounts = context.get("accounts", []) + if accounts and all(a.get("auth_expired") for a in accounts): + return { + "status": "auth_error", + "message": "All connected Notion accounts need re-authentication. Please re-authenticate in your connector settings.", + "connector_type": "notion", + } + + result = request_approval( + action_type="notion_page_creation", + tool_name="create_notion_page", + params={ + "title": title, + "content": content, + "parent_page_id": None, + "connector_id": connector_id, + }, + context=context, + ) + + if result.rejected: + logger.info("Notion page creation rejected by user") + return { + "status": "rejected", + "message": "User declined. Do not retry or suggest alternatives.", + } + + final_title = result.params.get("title", title) + final_content = result.params.get("content", content) + final_parent_page_id = result.params.get("parent_page_id") + final_connector_id = result.params.get("connector_id", connector_id) + + if not final_title or not final_title.strip(): + return { + "status": "error", + "message": "Page title cannot be empty. Please provide a valid title.", + } + + from sqlalchemy.future import select + + from app.db import SearchSourceConnector, SearchSourceConnectorType + + actual_connector_id = final_connector_id + if actual_connector_id is None: + query_result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, + ) + ) + connectors = query_result.scalars().all() + connector = _find_mcp_connector(connectors) + + if not connector: + return { + "status": "error", + "message": "No Notion MCP connector found. Please connect Notion (MCP) in your workspace settings.", + } + actual_connector_id = connector.id + else: + query_result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == actual_connector_id, + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, + ) + ) + connector = query_result.scalars().first() + if not connector: + return { + "status": "error", + "message": "Selected Notion account is invalid or has been disconnected.", + } + + from app.services.notion_mcp.adapter import NotionMCPAdapter + + adapter = NotionMCPAdapter(session=db_session, connector_id=actual_connector_id) + result = await adapter.create_page( + title=final_title, + content=final_content, + parent_page_id=final_parent_page_id, + ) + logger.info("create_page (MCP) result: %s - %s", result.get("status"), result.get("message", "")) + + if result.get("status") == "success": + kb_message_suffix = "" + try: + from app.services.notion import NotionKBSyncService + + kb_service = NotionKBSyncService(db_session) + kb_result = await kb_service.sync_after_create( + page_id=result.get("page_id"), + page_title=result.get("title", final_title), + page_url=result.get("url"), + content=final_content, + connector_id=actual_connector_id, + search_space_id=search_space_id, + user_id=user_id, + ) + if kb_result["status"] == "success": + kb_message_suffix = " Your knowledge base has also been updated." + else: + kb_message_suffix = " This page will be added to your knowledge base in the next scheduled sync." + except Exception as kb_err: + logger.warning("KB sync after create failed: %s", kb_err) + kb_message_suffix = " This page will be added to your knowledge base in the next scheduled sync." + + result["message"] = result.get("message", "") + kb_message_suffix + + return result + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + + logger.error("Error creating Notion page (MCP): %s", e, exc_info=True) + if isinstance(e, ValueError): + message = str(e) + else: + message = "Something went wrong while creating the page. Please try again." + return {"status": "error", "message": message} + + return create_notion_page diff --git a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/delete_page.py b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/delete_page.py new file mode 100644 index 000000000..c0cf7642b --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/delete_page.py @@ -0,0 +1,173 @@ +import logging +from typing import Any + +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from app.agents.new_chat.tools.hitl import request_approval +from app.services.notion.tool_metadata_service import NotionToolMetadataService + +logger = logging.getLogger(__name__) + + +def create_delete_notion_page_mcp_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, + connector_id: int | None = None, +): + @tool + async def delete_notion_page( + page_title: str, + delete_from_kb: bool = False, + ) -> dict[str, Any]: + """Delete (archive) a Notion page. + + Use this tool when the user asks you to delete, remove, or archive + a Notion page. Note that Notion doesn't permanently delete pages, + it archives them (they can be restored from trash). + + Args: + page_title: The title of the Notion page to delete. + delete_from_kb: Whether to also remove the page from the knowledge base. + Default is False. + + Returns: + Dictionary with: + - status: "success", "rejected", "not_found", or "error" + - page_id: Deleted page ID (if success) + - message: Success or error message + - deleted_from_kb: Whether the page was also removed from knowledge base (if success) + + Examples: + - "Delete the 'Meeting Notes' Notion page" + - "Remove the 'Old Project Plan' Notion page" + """ + logger.info( + "delete_notion_page (MCP) called: page_title='%s', delete_from_kb=%s", + page_title, + delete_from_kb, + ) + + if db_session is None or search_space_id is None or user_id is None: + logger.error("Notion MCP tool not properly configured - missing required parameters") + return { + "status": "error", + "message": "Notion tool not properly configured. Please contact support.", + } + + try: + metadata_service = NotionToolMetadataService(db_session) + context = await metadata_service.get_delete_context(search_space_id, user_id, page_title) + + if "error" in context: + error_msg = context["error"] + if "not found" in error_msg.lower(): + return {"status": "not_found", "message": error_msg} + return {"status": "error", "message": error_msg} + + account = context.get("account", {}) + if account.get("auth_expired"): + return { + "status": "auth_error", + "message": "The Notion account for this page needs re-authentication. Please re-authenticate in your connector settings.", + } + + page_id = context.get("page_id") + connector_id_from_context = account.get("id") + document_id = context.get("document_id") + + result = request_approval( + action_type="notion_page_deletion", + tool_name="delete_notion_page", + params={ + "page_id": page_id, + "connector_id": connector_id_from_context, + "delete_from_kb": delete_from_kb, + }, + context=context, + ) + + if result.rejected: + logger.info("Notion page deletion rejected by user") + return { + "status": "rejected", + "message": "User declined. Do not retry or suggest alternatives.", + } + + final_page_id = result.params.get("page_id", page_id) + final_connector_id = result.params.get("connector_id", connector_id_from_context) + final_delete_from_kb = result.params.get("delete_from_kb", delete_from_kb) + + from sqlalchemy.future import select + + from app.db import SearchSourceConnector, SearchSourceConnectorType + + if final_connector_id: + query_result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == final_connector_id, + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, + ) + ) + connector = query_result.scalars().first() + if not connector: + return { + "status": "error", + "message": "Selected Notion account is invalid or has been disconnected.", + } + actual_connector_id = connector.id + else: + return {"status": "error", "message": "No connector found for this page."} + + from app.services.notion_mcp.adapter import NotionMCPAdapter + + adapter = NotionMCPAdapter(session=db_session, connector_id=actual_connector_id) + result = await adapter.delete_page(page_id=final_page_id) + logger.info("delete_page (MCP) result: %s - %s", result.get("status"), result.get("message", "")) + + deleted_from_kb = False + if result.get("status") == "success" and final_delete_from_kb and document_id: + try: + from sqlalchemy.future import select + + from app.db import Document + + doc_result = await db_session.execute( + select(Document).filter(Document.id == document_id) + ) + document = doc_result.scalars().first() + + if document: + await db_session.delete(document) + await db_session.commit() + deleted_from_kb = True + logger.info("Deleted document %s from knowledge base", document_id) + except Exception as e: + logger.error("Failed to delete document from KB: %s", e) + await db_session.rollback() + result["warning"] = f"Page deleted from Notion, but failed to remove from knowledge base: {e!s}" + + if result.get("status") == "success": + result["deleted_from_kb"] = deleted_from_kb + if deleted_from_kb: + result["message"] = f"{result.get('message', '')} (also removed from knowledge base)" + + return result + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + + logger.error("Error deleting Notion page (MCP): %s", e, exc_info=True) + if isinstance(e, ValueError): + message = str(e) + else: + message = "Something went wrong while deleting the page. Please try again." + return {"status": "error", "message": message} + + return delete_notion_page diff --git a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/update_page.py b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/update_page.py new file mode 100644 index 000000000..28599cbae --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/update_page.py @@ -0,0 +1,179 @@ +import logging +from typing import Any + +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from app.agents.new_chat.tools.hitl import request_approval +from app.services.notion import NotionToolMetadataService + +logger = logging.getLogger(__name__) + + +def create_update_notion_page_mcp_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, + connector_id: int | None = None, +): + @tool + async def update_notion_page( + page_title: str, + content: str | None = None, + ) -> dict[str, Any]: + """Update an existing Notion page by appending new content. + + Use this tool when the user asks you to add content to, modify, or update + a Notion page. The new content will be appended to the existing page content. + The user MUST specify what to add before you call this tool. If the + request is vague, ask what content they want added. + + Args: + page_title: The title of the Notion page to update. + content: Optional markdown content to append to the page body (supports headings, lists, paragraphs). + Generate this yourself based on the user's request. + + Returns: + Dictionary with: + - status: "success", "rejected", "not_found", or "error" + - page_id: Updated page ID (if success) + - url: URL to the updated page (if success) + - title: Current page title (if success) + - message: Result message + + IMPORTANT: + - If status is "rejected", the user explicitly declined the action. + Respond with a brief acknowledgment (e.g., "Understood, I didn't update the page.") + and move on. Do NOT ask for alternatives or troubleshoot. + - If status is "not_found", inform the user conversationally using the exact message provided. + + Examples: + - "Add today's meeting notes to the 'Meeting Notes' Notion page" + - "Update the 'Project Plan' page with a status update on phase 1" + """ + logger.info( + "update_notion_page (MCP) called: page_title='%s', content_length=%d", + page_title, + len(content) if content else 0, + ) + + if db_session is None or search_space_id is None or user_id is None: + logger.error("Notion MCP tool not properly configured - missing required parameters") + return { + "status": "error", + "message": "Notion tool not properly configured. Please contact support.", + } + + if not content or not content.strip(): + return { + "status": "error", + "message": "Content is required to update the page. Please provide the actual content you want to add.", + } + + try: + metadata_service = NotionToolMetadataService(db_session) + context = await metadata_service.get_update_context(search_space_id, user_id, page_title) + + if "error" in context: + error_msg = context["error"] + if "not found" in error_msg.lower(): + return {"status": "not_found", "message": error_msg} + return {"status": "error", "message": error_msg} + + account = context.get("account", {}) + if account.get("auth_expired"): + return { + "status": "auth_error", + "message": "The Notion account for this page needs re-authentication. Please re-authenticate in your connector settings.", + } + + page_id = context.get("page_id") + document_id = context.get("document_id") + connector_id_from_context = account.get("id") + + result = request_approval( + action_type="notion_page_update", + tool_name="update_notion_page", + params={ + "page_id": page_id, + "content": content, + "connector_id": connector_id_from_context, + }, + context=context, + ) + + if result.rejected: + logger.info("Notion page update rejected by user") + return { + "status": "rejected", + "message": "User declined. Do not retry or suggest alternatives.", + } + + final_page_id = result.params.get("page_id", page_id) + final_content = result.params.get("content", content) + final_connector_id = result.params.get("connector_id", connector_id_from_context) + + from sqlalchemy.future import select + + from app.db import SearchSourceConnector, SearchSourceConnectorType + + if final_connector_id: + query_result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == final_connector_id, + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, + ) + ) + connector = query_result.scalars().first() + if not connector: + return { + "status": "error", + "message": "Selected Notion account is invalid or has been disconnected.", + } + actual_connector_id = connector.id + else: + return {"status": "error", "message": "No connector found for this page."} + + from app.services.notion_mcp.adapter import NotionMCPAdapter + + adapter = NotionMCPAdapter(session=db_session, connector_id=actual_connector_id) + result = await adapter.update_page(page_id=final_page_id, content=final_content) + logger.info("update_page (MCP) result: %s - %s", result.get("status"), result.get("message", "")) + + if result.get("status") == "success" and document_id is not None: + from app.services.notion import NotionKBSyncService + + kb_service = NotionKBSyncService(db_session) + kb_result = await kb_service.sync_after_update( + document_id=document_id, + appended_content=final_content, + user_id=user_id, + search_space_id=search_space_id, + appended_block_ids=result.get("appended_block_ids"), + ) + + if kb_result["status"] == "success": + result["message"] = f"{result['message']}. Your knowledge base has also been updated." + elif kb_result["status"] == "not_indexed": + result["message"] = f"{result['message']}. This page will be added to your knowledge base in the next scheduled sync." + else: + result["message"] = f"{result['message']}. Your knowledge base will be updated in the next scheduled sync." + + return result + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + + logger.error("Error updating Notion page (MCP): %s", e, exc_info=True) + if isinstance(e, ValueError): + message = str(e) + else: + message = "Something went wrong while updating the page. Please try again." + return {"status": "error", "message": message} + + return update_notion_page diff --git a/surfsense_backend/app/agents/new_chat/tools/registry.py b/surfsense_backend/app/agents/new_chat/tools/registry.py index 265aabbbf..e7378653b 100644 --- a/surfsense_backend/app/agents/new_chat/tools/registry.py +++ b/surfsense_backend/app/agents/new_chat/tools/registry.py @@ -86,6 +86,11 @@ from .notion import ( create_delete_notion_page_tool, create_update_notion_page_tool, ) +from .notion_mcp import ( + create_page as notion_mcp_create_page_mod, + delete_page as notion_mcp_delete_page_mod, + update_page as notion_mcp_update_page_mod, +) from .onedrive import ( create_create_onedrive_file_tool, create_delete_onedrive_file_tool, @@ -316,6 +321,40 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ requires=["db_session", "search_space_id", "user_id"], ), # ========================================================================= + # NOTION MCP TOOLS - MCP-backed variants (disabled until swap) + # These route through Notion's hosted MCP server instead of direct API. + # ========================================================================= + ToolDefinition( + name="create_notion_page_mcp", + description="Create a new page in Notion via MCP server", + factory=lambda deps: notion_mcp_create_page_mod.create_create_notion_page_mcp_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + ), + ToolDefinition( + name="update_notion_page_mcp", + description="Append new content to an existing Notion page via MCP server", + factory=lambda deps: notion_mcp_update_page_mod.create_update_notion_page_mcp_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + ), + ToolDefinition( + name="delete_notion_page_mcp", + description="Delete an existing Notion page via MCP server", + factory=lambda deps: notion_mcp_delete_page_mod.create_delete_notion_page_mcp_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + ), + # ========================================================================= # GOOGLE DRIVE TOOLS - create files, delete files # Auto-disabled when no Google Drive connector is configured (see chat_deepagent.py) # ========================================================================= From 5a8ec704fe45c51c1211f19b5333040a69fff854 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Mon, 20 Apr 2026 21:02:21 +0200 Subject: [PATCH 004/113] feat(notion-mcp): add MCP gating in agent, indexer, and health check --- .../app/agents/new_chat/chat_deepagent.py | 29 ++++++++++++++----- .../services/notion/tool_metadata_service.py | 21 +++++++++++++- .../connector_indexers/notion_indexer.py | 12 ++++++++ 3 files changed, 54 insertions(+), 8 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/chat_deepagent.py b/surfsense_backend/app/agents/new_chat/chat_deepagent.py index ab47b49ce..6709715bd 100644 --- a/surfsense_backend/app/agents/new_chat/chat_deepagent.py +++ b/surfsense_backend/app/agents/new_chat/chat_deepagent.py @@ -285,18 +285,33 @@ async def create_surfsense_deep_agent( "llm": llm, } - # Disable Notion action tools if no Notion connector is configured + # Disable Notion action tools if no Notion connector is configured. + # When an MCP-mode connector exists, use MCP tools; otherwise use direct-API tools. modified_disabled_tools = list(disabled_tools) if disabled_tools else [] has_notion_connector = ( available_connectors is not None and "NOTION_CONNECTOR" in available_connectors ) + _notion_direct_tools = [ + "create_notion_page", + "update_notion_page", + "delete_notion_page", + ] + _notion_mcp_tools = [ + "create_notion_page_mcp", + "update_notion_page_mcp", + "delete_notion_page_mcp", + ] if not has_notion_connector: - notion_tools = [ - "create_notion_page", - "update_notion_page", - "delete_notion_page", - ] - modified_disabled_tools.extend(notion_tools) + modified_disabled_tools.extend(_notion_direct_tools) + modified_disabled_tools.extend(_notion_mcp_tools) + else: + from app.services.notion_mcp import has_mcp_notion_connector + + _use_mcp = await has_mcp_notion_connector(db_session, search_space_id) + if _use_mcp: + modified_disabled_tools.extend(_notion_direct_tools) + else: + modified_disabled_tools.extend(_notion_mcp_tools) # Disable Linear action tools if no Linear connector is configured has_linear_connector = ( diff --git a/surfsense_backend/app/services/notion/tool_metadata_service.py b/surfsense_backend/app/services/notion/tool_metadata_service.py index 097ef3461..8a58d5e62 100644 --- a/surfsense_backend/app/services/notion/tool_metadata_service.py +++ b/surfsense_backend/app/services/notion/tool_metadata_service.py @@ -227,11 +227,30 @@ class NotionToolMetadataService: async def _check_account_health(self, connector_id: int) -> bool: """Check if a Notion connector's token is still valid. - Uses a lightweight ``users.me()`` call to verify the token. + For regular connectors: uses ``users.me()`` via the Notion SDK. + For MCP-mode connectors: uses ``notion-get-self`` via the MCP adapter. Returns True if the token is expired/invalid, False if healthy. """ try: + result = await self._db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == connector_id + ) + ) + db_connector = result.scalars().first() + if not db_connector: + return True + + if (db_connector.config or {}).get("mcp_mode"): + from app.services.notion_mcp.adapter import NotionMCPAdapter + + adapter = NotionMCPAdapter( + session=self._db_session, connector_id=connector_id + ) + health = await adapter.health_check() + return health.get("status") != "success" + connector = NotionHistoryConnector( session=self._db_session, connector_id=connector_id ) diff --git a/surfsense_backend/app/tasks/connector_indexers/notion_indexer.py b/surfsense_backend/app/tasks/connector_indexers/notion_indexer.py index 77aac795a..6a3a99b5c 100644 --- a/surfsense_backend/app/tasks/connector_indexers/notion_indexer.py +++ b/surfsense_backend/app/tasks/connector_indexers/notion_indexer.py @@ -129,6 +129,18 @@ async def index_notion_pages( f"Connector with ID {connector_id} not found or is not a Notion connector", ) + if (connector.config or {}).get("mcp_mode"): + msg = ( + f"Connector {connector_id} is an MCP-mode connector. " + "Background indexing is not supported for MCP connectors — " + "use a regular Notion connector for indexing." + ) + logger.info(msg) + await task_logger.log_task_completion( + log_entry, msg, {"skipped": True, "reason": "mcp_mode"} + ) + return 0, 0, None + if not connector.config.get("access_token") and not connector.config.get( "NOTION_INTEGRATION_TOKEN" ): From 30944c0fec25055bd470b5488e0f08d31104e2f1 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Mon, 20 Apr 2026 21:02:34 +0200 Subject: [PATCH 005/113] feat(notion-mcp): wire frontend to MCP OAuth endpoints --- .../connector-configs/views/connector-edit-view.tsx | 2 +- .../connector-popup/constants/connector-constants.ts | 2 +- .../connector-popup/views/connector-accounts-list-view.tsx | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx index e19600ab2..274fc0fc7 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx @@ -21,7 +21,7 @@ import { getConnectorConfigComponent } from "../index"; const REAUTH_ENDPOINTS: Partial> = { [EnumConnectorName.LINEAR_CONNECTOR]: "/api/v1/auth/linear/connector/reauth", - [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion/connector/reauth", + [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion-mcp/connector/reauth", [EnumConnectorName.GOOGLE_DRIVE_CONNECTOR]: "/api/v1/auth/google/drive/connector/reauth", [EnumConnectorName.GOOGLE_GMAIL_CONNECTOR]: "/api/v1/auth/google/gmail/connector/reauth", [EnumConnectorName.GOOGLE_CALENDAR_CONNECTOR]: "/api/v1/auth/google/calendar/connector/reauth", diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index da6885ffe..0e517b38e 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -38,7 +38,7 @@ export const OAUTH_CONNECTORS = [ title: "Notion", description: "Search your Notion pages", connectorType: EnumConnectorName.NOTION_CONNECTOR, - authEndpoint: "/api/v1/auth/notion/connector/add/", + authEndpoint: "/api/v1/auth/notion-mcp/connector/add", }, { id: "linear-connector", diff --git a/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx index b4c049c5c..6cdd535db 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx @@ -18,7 +18,7 @@ import { getConnectorDisplayName } from "../tabs/all-connectors-tab"; const REAUTH_ENDPOINTS: Partial> = { [EnumConnectorName.LINEAR_CONNECTOR]: "/api/v1/auth/linear/connector/reauth", - [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion/connector/reauth", + [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion-mcp/connector/reauth", [EnumConnectorName.GOOGLE_DRIVE_CONNECTOR]: "/api/v1/auth/google/drive/connector/reauth", [EnumConnectorName.GOOGLE_GMAIL_CONNECTOR]: "/api/v1/auth/google/gmail/connector/reauth", [EnumConnectorName.GOOGLE_CALENDAR_CONNECTOR]: "/api/v1/auth/google/calendar/connector/reauth", From 7f0a5cd06ae18d8593bbb557f29eb09a1097e252 Mon Sep 17 00:00:00 2001 From: Matt Van Horn <455140+mvanhorn@users.noreply.github.com> Date: Tue, 21 Apr 2026 01:43:20 -0700 Subject: [PATCH 006/113] fix(hitl-edit-panel): move duplicate-tag check into functional setTags Fixes #1248 handleAddTag had tags in its useCallback dependency array only so the closure-level duplicate check could read it, which forced the callback to re-create on every tag mutation and compared new additions against a potentially-stale closure value. Collapse the duplicate check into the functional setTags updater so the check always runs against the latest state, and drop tags from the dependency array - the callback is stable for the component's lifetime and downstream memoization won't get invalidated on every keystroke. --- .../hitl-edit-panel/hitl-edit-panel.tsx | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/surfsense_web/components/hitl-edit-panel/hitl-edit-panel.tsx b/surfsense_web/components/hitl-edit-panel/hitl-edit-panel.tsx index bd36431e9..b33392f38 100644 --- a/surfsense_web/components/hitl-edit-panel/hitl-edit-panel.tsx +++ b/surfsense_web/components/hitl-edit-panel/hitl-edit-panel.tsx @@ -65,16 +65,15 @@ function EmailsTagField({ setTags((prev) => (typeof newTags === "function" ? newTags(prev) : newTags)); }, []); - const handleAddTag = useCallback( - (text: string) => { - const trimmed = text.trim(); - if (!trimmed) return; - if (tags.some((tag) => tag.text === trimmed)) return; + const handleAddTag = useCallback((text: string) => { + const trimmed = text.trim(); + if (!trimmed) return; + setTags((prev) => { + if (prev.some((tag) => tag.text === trimmed)) return prev; const newTag: TagType = { id: Date.now().toString(), text: trimmed }; - setTags((prev) => [...prev, newTag]); - }, - [tags] - ); + return [...prev, newTag]; + }); + }, []); return ( Date: Tue, 21 Apr 2026 21:03:38 +0530 Subject: [PATCH 007/113] feat: add internal backend URL configuration --- docker/.env.example | 1 + docker/docker-compose.yml | 1 + surfsense_web/app/api/zero/query/route.ts | 5 ++++- 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/docker/.env.example b/docker/.env.example index a975ed8e7..95de0cf85 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -71,6 +71,7 @@ EMBEDDING_MODEL=sentence-transformers/all-MiniLM-L6-v2 # BACKEND_URL=https://api.yourdomain.com # NEXT_PUBLIC_FASTAPI_BACKEND_URL=https://api.yourdomain.com # NEXT_PUBLIC_ZERO_CACHE_URL=https://zero.yourdomain.com +# FASTAPI_BACKEND_INTERNAL_URL=http://backend:8000 # ------------------------------------------------------------------------------ # Zero-cache (real-time sync) diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 549190947..93d725979 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -198,6 +198,7 @@ services: NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE: ${AUTH_TYPE:-LOCAL} NEXT_PUBLIC_ETL_SERVICE: ${ETL_SERVICE:-DOCLING} NEXT_PUBLIC_DEPLOYMENT_MODE: ${DEPLOYMENT_MODE:-self-hosted} + FASTAPI_BACKEND_INTERNAL_URL: ${FASTAPI_BACKEND_INTERNAL_URL:-http://backend:8000} labels: - "com.centurylinklabs.watchtower.enable=true" depends_on: diff --git a/surfsense_web/app/api/zero/query/route.ts b/surfsense_web/app/api/zero/query/route.ts index 3d8ff0d33..a91edcd6f 100644 --- a/surfsense_web/app/api/zero/query/route.ts +++ b/surfsense_web/app/api/zero/query/route.ts @@ -5,7 +5,10 @@ import type { Context } from "@/types/zero"; import { queries } from "@/zero/queries"; import { schema } from "@/zero/schema"; -const backendURL = process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL || "http://localhost:8000"; +const backendURL = + process.env.FASTAPI_BACKEND_INTERNAL_URL || + process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL || + "http://localhost:8000"; async function authenticateRequest( request: Request From 59f8696eacbb49c0a54a7af8d0fb33d12e0fdf4a Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Tue, 21 Apr 2026 23:17:26 +0530 Subject: [PATCH 008/113] feat: add extra_hosts configuration for Docker services --- docker/docker-compose.dev.yml | 4 ++++ docker/docker-compose.yml | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml index c7922e3ef..bbe758d4f 100644 --- a/docker/docker-compose.dev.yml +++ b/docker/docker-compose.dev.yml @@ -77,6 +77,8 @@ services: - shared_temp:/shared_tmp env_file: - ../surfsense_backend/.env + extra_hosts: + - "host.docker.internal:host-gateway" environment: - DATABASE_URL=${DATABASE_URL:-postgresql+asyncpg://${DB_USER:-postgres}:${DB_PASSWORD:-postgres}@${DB_HOST:-db}:${DB_PORT:-5432}/${DB_NAME:-surfsense}} - CELERY_BROKER_URL=${REDIS_URL:-redis://redis:6379/0} @@ -118,6 +120,8 @@ services: - shared_temp:/shared_tmp env_file: - ../surfsense_backend/.env + extra_hosts: + - "host.docker.internal:host-gateway" environment: - DATABASE_URL=${DATABASE_URL:-postgresql+asyncpg://${DB_USER:-postgres}:${DB_PASSWORD:-postgres}@${DB_HOST:-db}:${DB_PORT:-5432}/${DB_NAME:-surfsense}} - CELERY_BROKER_URL=${REDIS_URL:-redis://redis:6379/0} diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 93d725979..10cace249 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -60,6 +60,8 @@ services: - shared_temp:/shared_tmp env_file: - .env + extra_hosts: + - "host.docker.internal:host-gateway" environment: DATABASE_URL: ${DATABASE_URL:-postgresql+asyncpg://${DB_USER:-surfsense}:${DB_PASSWORD:-surfsense}@${DB_HOST:-db}:${DB_PORT:-5432}/${DB_NAME:-surfsense}} CELERY_BROKER_URL: ${REDIS_URL:-redis://redis:6379/0} @@ -100,6 +102,8 @@ services: - shared_temp:/shared_tmp env_file: - .env + extra_hosts: + - "host.docker.internal:host-gateway" environment: DATABASE_URL: ${DATABASE_URL:-postgresql+asyncpg://${DB_USER:-surfsense}:${DB_PASSWORD:-surfsense}@${DB_HOST:-db}:${DB_PORT:-5432}/${DB_NAME:-surfsense}} CELERY_BROKER_URL: ${REDIS_URL:-redis://redis:6379/0} From 7c23f31dc001f2272e4b82f9350705ac57f48a58 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Tue, 21 Apr 2026 23:25:43 +0530 Subject: [PATCH 009/113] feat: add Ollama setup guide to documentation --- surfsense_web/content/docs/how-to/meta.json | 2 +- surfsense_web/content/docs/how-to/ollama.mdx | 90 ++++++++++++++++++++ 2 files changed, 91 insertions(+), 1 deletion(-) create mode 100644 surfsense_web/content/docs/how-to/ollama.mdx diff --git a/surfsense_web/content/docs/how-to/meta.json b/surfsense_web/content/docs/how-to/meta.json index 477fcafc4..329b7172e 100644 --- a/surfsense_web/content/docs/how-to/meta.json +++ b/surfsense_web/content/docs/how-to/meta.json @@ -1,6 +1,6 @@ { "title": "How to", - "pages": ["zero-sync", "realtime-collaboration", "web-search"], + "pages": ["zero-sync", "realtime-collaboration", "web-search", "ollama"], "icon": "Compass", "defaultOpen": false } diff --git a/surfsense_web/content/docs/how-to/ollama.mdx b/surfsense_web/content/docs/how-to/ollama.mdx new file mode 100644 index 000000000..48b231705 --- /dev/null +++ b/surfsense_web/content/docs/how-to/ollama.mdx @@ -0,0 +1,90 @@ +--- +title: Connect Ollama +description: Simple setup guide for using Ollama with SurfSense across local, Docker, remote, and cloud setups +--- + +# Connect Ollama + +Use this page to choose the correct **API Base URL** when adding an Ollama provider in SurfSense. + +## 1) Pick your API Base URL + +| Ollama location | SurfSense location | API Base URL | +|---|---|---| +| Same machine | No Docker | `http://localhost:11434` | +| Host machine (macOS/Windows) | Docker Desktop | `http://host.docker.internal:11434` | +| Host machine (Linux) | Docker Compose | `http://host.docker.internal:11434` | +| Same Docker Compose stack | Docker Compose | `http://ollama:11434` | +| Another machine in your network | Any | `http://:11434` | +| Public Ollama endpoint / proxy / cloud | Any | `http(s)://` | + +If SurfSense runs in Docker, do not use `localhost` unless Ollama is in the same container. + +## 2) Add Ollama in SurfSense + +Go to **Search Space Settings -> Agent Models -> Add Model** and set: + +- Provider: `OLLAMA` +- Model name: your model tag, for example `llama3.2` or `qwen3:8b` +- API Base URL: from the table above +- API key: + - local/self-hosted Ollama: any non-empty value + - Ollama cloud/proxied auth: real key or token required by that endpoint + +Save. SurfSense validates the connection immediately. + +## 3) Common setups + +### A) SurfSense in Docker Desktop, Ollama on your host + +Use: + +```text +http://host.docker.internal:11434 +``` + +### B) Ollama as a service in the same Compose + +Use API Base URL: + +```text +http://ollama:11434 +``` + +Minimal service example: + +```yaml +ollama: + image: ollama/ollama:latest + volumes: + - ollama_data:/root/.ollama + ports: + - "11434:11434" +``` + +### C) Ollama on another machine + +Ollama binds to `127.0.0.1` by default. Make it reachable on the network: + +- Set `OLLAMA_HOST=0.0.0.0:11434` on the machine/service running Ollama +- Open firewall port `11434` +- Use `http://:11434` in SurfSense's API Base URL + +## 4) Quick troubleshooting + +| Error | Cause | Fix | +|---|---|---| +| `Cannot connect to host localhost:11434` | Wrong URL from Dockerized backend | Use `host.docker.internal` or `ollama` | +| `Cannot connect to host :11434` | Ollama not exposed on network or firewall blocked | Set `OLLAMA_HOST=0.0.0.0:11434`, allow port 11434 | +| URL starts with `/%20http://...` | Leading space in URL | Re-enter API Base URL without spaces | +| `model not found` | Model not pulled on Ollama | Run `ollama pull ` | + +If needed, test from the backend container using the same host you put in **API Base URL**: + +```bash +docker compose exec backend curl -v /api/tags +``` + +## See also + +- [Docker Installation](/docs/docker-installation/docker-compose) \ No newline at end of file From 875c4c3cf45122937cf0967ada80c1843ef30301 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:28:30 +0200 Subject: [PATCH 010/113] add connector exception hierarchy --- .../app/connectors/exceptions.py | 98 +++++++++++++++++++ 1 file changed, 98 insertions(+) create mode 100644 surfsense_backend/app/connectors/exceptions.py diff --git a/surfsense_backend/app/connectors/exceptions.py b/surfsense_backend/app/connectors/exceptions.py new file mode 100644 index 000000000..32a1e7bdc --- /dev/null +++ b/surfsense_backend/app/connectors/exceptions.py @@ -0,0 +1,98 @@ +"""Standard exception hierarchy for all connectors. + +ConnectorError +├── ConnectorAuthError (401/403 — non-retryable) +├── ConnectorRateLimitError (429 — retryable, carries ``retry_after``) +├── ConnectorTimeoutError (timeout/504 — retryable) +└── ConnectorAPIError (5xx or unexpected — retryable when >= 500) +""" + +from __future__ import annotations + +from typing import Any + + +class ConnectorError(Exception): + + def __init__( + self, + message: str, + *, + service: str = "", + status_code: int | None = None, + response_body: Any = None, + ) -> None: + super().__init__(message) + self.service = service + self.status_code = status_code + self.response_body = response_body + + @property + def retryable(self) -> bool: + return False + + +class ConnectorAuthError(ConnectorError): + """Token expired, revoked, insufficient scopes, or needs re-auth (401/403).""" + + @property + def retryable(self) -> bool: + return False + + +class ConnectorRateLimitError(ConnectorError): + """429 Too Many Requests.""" + + def __init__( + self, + message: str = "Rate limited", + *, + service: str = "", + retry_after: float | None = None, + status_code: int = 429, + response_body: Any = None, + ) -> None: + super().__init__( + message, + service=service, + status_code=status_code, + response_body=response_body, + ) + self.retry_after = retry_after + + @property + def retryable(self) -> bool: + return True + + +class ConnectorTimeoutError(ConnectorError): + """Request timeout or gateway timeout (504).""" + + def __init__( + self, + message: str = "Request timed out", + *, + service: str = "", + status_code: int | None = None, + response_body: Any = None, + ) -> None: + super().__init__( + message, + service=service, + status_code=status_code, + response_body=response_body, + ) + + @property + def retryable(self) -> bool: + return True + + +class ConnectorAPIError(ConnectorError): + """Generic API error (5xx or unexpected status codes).""" + + @property + def retryable(self) -> bool: + if self.status_code is not None: + return self.status_code >= 500 + return False From 45acf9de15a9edf2e28e5746a71bf07e59ed9532 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:28:36 +0200 Subject: [PATCH 011/113] add async retry utility with tenacity --- surfsense_backend/app/utils/async_retry.py | 129 +++++++++++++++++++++ 1 file changed, 129 insertions(+) create mode 100644 surfsense_backend/app/utils/async_retry.py diff --git a/surfsense_backend/app/utils/async_retry.py b/surfsense_backend/app/utils/async_retry.py new file mode 100644 index 000000000..c3bdd5386 --- /dev/null +++ b/surfsense_backend/app/utils/async_retry.py @@ -0,0 +1,129 @@ +"""Async retry decorators for connector API calls, built on tenacity.""" + +from __future__ import annotations + +import logging +from collections.abc import Callable +from typing import TypeVar + +import httpx +from tenacity import ( + before_sleep_log, + retry, + retry_if_exception, + stop_after_attempt, + stop_after_delay, + wait_exponential_jitter, +) + +from app.connectors.exceptions import ( + ConnectorAPIError, + ConnectorAuthError, + ConnectorError, + ConnectorRateLimitError, + ConnectorTimeoutError, +) + +logger = logging.getLogger(__name__) + +F = TypeVar("F", bound=Callable) + + +def _is_retryable(exc: BaseException) -> bool: + if isinstance(exc, ConnectorError): + return exc.retryable + if isinstance(exc, (httpx.TimeoutException, httpx.ConnectError)): + return True + return False + + +def build_retry( + *, + max_attempts: int = 4, + max_delay: float = 60.0, + initial_delay: float = 1.0, + total_timeout: float = 180.0, + service: str = "", +) -> Callable: + """Configurable tenacity ``@retry`` decorator with exponential backoff + jitter.""" + _logger = logging.getLogger(f"connector.retry.{service}") if service else logger + + return retry( + retry=retry_if_exception(_is_retryable), + stop=(stop_after_attempt(max_attempts) | stop_after_delay(total_timeout)), + wait=wait_exponential_jitter(initial=initial_delay, max=max_delay), + reraise=True, + before_sleep=before_sleep_log(_logger, logging.WARNING), + ) + + +def retry_on_transient( + *, + service: str = "", + max_attempts: int = 4, +) -> Callable: + """Shorthand: retry up to *max_attempts* on rate-limits, timeouts, and 5xx.""" + return build_retry(max_attempts=max_attempts, service=service) + + +def raise_for_status( + response: httpx.Response, + *, + service: str = "", +) -> None: + """Map non-2xx httpx responses to the appropriate ``ConnectorError``.""" + if response.is_success: + return + + status = response.status_code + + try: + body = response.json() + except Exception: + body = response.text[:500] if response.text else None + + if status == 429: + retry_after_raw = response.headers.get("Retry-After") + retry_after: float | None = None + if retry_after_raw: + try: + retry_after = float(retry_after_raw) + except (ValueError, TypeError): + pass + raise ConnectorRateLimitError( + f"{service} rate limited (429)", + service=service, + retry_after=retry_after, + response_body=body, + ) + + if status in (401, 403): + raise ConnectorAuthError( + f"{service} authentication failed ({status})", + service=service, + status_code=status, + response_body=body, + ) + + if status == 504: + raise ConnectorTimeoutError( + f"{service} gateway timeout (504)", + service=service, + status_code=status, + response_body=body, + ) + + if status >= 500: + raise ConnectorAPIError( + f"{service} server error ({status})", + service=service, + status_code=status, + response_body=body, + ) + + raise ConnectorAPIError( + f"{service} request failed ({status})", + service=service, + status_code=status, + response_body=body, + ) From 474c35fb2a760a2a49b2435cf321de816540dfde Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:28:41 +0200 Subject: [PATCH 012/113] add standardized tool response helper --- .../agents/new_chat/tools/tool_response.py | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/tools/tool_response.py diff --git a/surfsense_backend/app/agents/new_chat/tools/tool_response.py b/surfsense_backend/app/agents/new_chat/tools/tool_response.py new file mode 100644 index 000000000..5fb1864b7 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/tool_response.py @@ -0,0 +1,41 @@ +"""Standardised response dict factories for LangChain agent tools.""" + +from __future__ import annotations + +from typing import Any + + +class ToolResponse: + + @staticmethod + def success(message: str, **data: Any) -> dict[str, Any]: + return {"status": "success", "message": message, **data} + + @staticmethod + def error(error: str, **data: Any) -> dict[str, Any]: + return {"status": "error", "error": error, **data} + + @staticmethod + def auth_error(service: str, **data: Any) -> dict[str, Any]: + return { + "status": "auth_error", + "error": ( + f"{service} authentication has expired or been revoked. " + "Please re-connect the integration in Settings → Connectors." + ), + **data, + } + + @staticmethod + def rejected(message: str = "Action was declined by the user.") -> dict[str, Any]: + return {"status": "rejected", "message": message} + + @staticmethod + def not_found( + resource: str, identifier: str, **data: Any + ) -> dict[str, Any]: + return { + "status": "not_found", + "error": f"{resource} '{identifier}' was not found.", + **data, + } From 6529889e7359b83b1ba171355a7ce46af74af446 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:28:45 +0200 Subject: [PATCH 013/113] add declarative connector gating to tool registry --- .../app/agents/new_chat/tools/registry.py | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/surfsense_backend/app/agents/new_chat/tools/registry.py b/surfsense_backend/app/agents/new_chat/tools/registry.py index e7378653b..f9b9287de 100644 --- a/surfsense_backend/app/agents/new_chat/tools/registry.py +++ b/surfsense_backend/app/agents/new_chat/tools/registry.py @@ -119,6 +119,8 @@ class ToolDefinition: factory: Callable that creates the tool. Receives a dict of dependencies. requires: List of dependency names this tool needs (e.g., "search_space_id", "db_session") enabled_by_default: Whether the tool is enabled when no explicit config is provided + required_connector: Searchable type string (e.g. ``"LINEAR_CONNECTOR"``) + that must be in ``available_connectors`` for the tool to be enabled. """ @@ -128,6 +130,7 @@ class ToolDefinition: requires: list[str] = field(default_factory=list) enabled_by_default: bool = True hidden: bool = False + required_connector: str | None = None # ============================================================================= @@ -265,6 +268,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="LINEAR_CONNECTOR", ), ToolDefinition( name="update_linear_issue", @@ -275,6 +279,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="LINEAR_CONNECTOR", ), ToolDefinition( name="delete_linear_issue", @@ -285,6 +290,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="LINEAR_CONNECTOR", ), # ========================================================================= # NOTION TOOLS - create, update, delete pages @@ -299,6 +305,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="NOTION_CONNECTOR", ), ToolDefinition( name="update_notion_page", @@ -309,6 +316,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="NOTION_CONNECTOR", ), ToolDefinition( name="delete_notion_page", @@ -319,6 +327,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="NOTION_CONNECTOR", ), # ========================================================================= # NOTION MCP TOOLS - MCP-backed variants (disabled until swap) @@ -333,6 +342,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="NOTION_CONNECTOR", ), ToolDefinition( name="update_notion_page_mcp", @@ -343,6 +353,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="NOTION_CONNECTOR", ), ToolDefinition( name="delete_notion_page_mcp", @@ -353,6 +364,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="NOTION_CONNECTOR", ), # ========================================================================= # GOOGLE DRIVE TOOLS - create files, delete files @@ -367,6 +379,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_DRIVE_FILE", ), ToolDefinition( name="delete_google_drive_file", @@ -377,6 +390,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_DRIVE_FILE", ), # ========================================================================= # DROPBOX TOOLS - create and trash files @@ -391,6 +405,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="DROPBOX_FILE", ), ToolDefinition( name="delete_dropbox_file", @@ -401,6 +416,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="DROPBOX_FILE", ), # ========================================================================= # ONEDRIVE TOOLS - create and trash files @@ -415,6 +431,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="ONEDRIVE_FILE", ), ToolDefinition( name="delete_onedrive_file", @@ -425,6 +442,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="ONEDRIVE_FILE", ), # ========================================================================= # GOOGLE CALENDAR TOOLS - create, update, delete events @@ -439,6 +457,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_CALENDAR_CONNECTOR", ), ToolDefinition( name="update_calendar_event", @@ -449,6 +468,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_CALENDAR_CONNECTOR", ), ToolDefinition( name="delete_calendar_event", @@ -459,6 +479,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_CALENDAR_CONNECTOR", ), # ========================================================================= # GMAIL TOOLS - create drafts, update drafts, send emails, trash emails @@ -473,6 +494,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_GMAIL_CONNECTOR", ), ToolDefinition( name="send_gmail_email", @@ -483,6 +505,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_GMAIL_CONNECTOR", ), ToolDefinition( name="trash_gmail_email", @@ -493,6 +516,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_GMAIL_CONNECTOR", ), ToolDefinition( name="update_gmail_draft", @@ -503,6 +527,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_GMAIL_CONNECTOR", ), # ========================================================================= # JIRA TOOLS - create, update, delete issues @@ -517,6 +542,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="JIRA_CONNECTOR", ), ToolDefinition( name="update_jira_issue", @@ -527,6 +553,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="JIRA_CONNECTOR", ), ToolDefinition( name="delete_jira_issue", @@ -537,6 +564,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="JIRA_CONNECTOR", ), # ========================================================================= # CONFLUENCE TOOLS - create, update, delete pages @@ -551,6 +579,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="CONFLUENCE_CONNECTOR", ), ToolDefinition( name="update_confluence_page", @@ -561,6 +590,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="CONFLUENCE_CONNECTOR", ), ToolDefinition( name="delete_confluence_page", @@ -571,6 +601,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="CONFLUENCE_CONNECTOR", ), ] @@ -588,6 +619,22 @@ def get_tool_by_name(name: str) -> ToolDefinition | None: return None +def get_connector_gated_tools( + available_connectors: list[str] | None, +) -> list[str]: + """Return tool names to disable""" + if available_connectors is None: + available = set() + else: + available = set(available_connectors) + + disabled: list[str] = [] + for tool_def in BUILTIN_TOOLS: + if tool_def.required_connector and tool_def.required_connector not in available: + disabled.append(tool_def.name) + return disabled + + def get_all_tool_names() -> list[str]: """Get names of all registered tools.""" return [tool_def.name for tool_def in BUILTIN_TOOLS] From a1804265b88b35ec88ec9c1abb5f046be35e45f2 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:28:53 +0200 Subject: [PATCH 014/113] replace manual connector checks with declarative gating --- .../app/agents/new_chat/chat_deepagent.py | 123 +++--------------- 1 file changed, 17 insertions(+), 106 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/chat_deepagent.py b/surfsense_backend/app/agents/new_chat/chat_deepagent.py index 6709715bd..480cae8c9 100644 --- a/surfsense_backend/app/agents/new_chat/chat_deepagent.py +++ b/surfsense_backend/app/agents/new_chat/chat_deepagent.py @@ -45,7 +45,7 @@ from app.agents.new_chat.system_prompt import ( build_configurable_system_prompt, build_surfsense_system_prompt, ) -from app.agents.new_chat.tools.registry import build_tools_async +from app.agents.new_chat.tools.registry import build_tools_async, get_connector_gated_tools from app.db import ChatVisibility from app.services.connector_service import ConnectorService from app.utils.perf import get_perf_logger @@ -285,120 +285,31 @@ async def create_surfsense_deep_agent( "llm": llm, } - # Disable Notion action tools if no Notion connector is configured. - # When an MCP-mode connector exists, use MCP tools; otherwise use direct-API tools. modified_disabled_tools = list(disabled_tools) if disabled_tools else [] + modified_disabled_tools.extend( + get_connector_gated_tools(available_connectors) + ) + + # TODO(phase-1): Remove Notion MCP gating after revert. has_notion_connector = ( available_connectors is not None and "NOTION_CONNECTOR" in available_connectors ) - _notion_direct_tools = [ - "create_notion_page", - "update_notion_page", - "delete_notion_page", - ] - _notion_mcp_tools = [ - "create_notion_page_mcp", - "update_notion_page_mcp", - "delete_notion_page_mcp", - ] - if not has_notion_connector: - modified_disabled_tools.extend(_notion_direct_tools) - modified_disabled_tools.extend(_notion_mcp_tools) - else: + if has_notion_connector: from app.services.notion_mcp import has_mcp_notion_connector _use_mcp = await has_mcp_notion_connector(db_session, search_space_id) if _use_mcp: - modified_disabled_tools.extend(_notion_direct_tools) + modified_disabled_tools.extend([ + "create_notion_page", + "update_notion_page", + "delete_notion_page", + ]) else: - modified_disabled_tools.extend(_notion_mcp_tools) - - # Disable Linear action tools if no Linear connector is configured - has_linear_connector = ( - available_connectors is not None and "LINEAR_CONNECTOR" in available_connectors - ) - if not has_linear_connector: - linear_tools = [ - "create_linear_issue", - "update_linear_issue", - "delete_linear_issue", - ] - modified_disabled_tools.extend(linear_tools) - - # Disable Google Drive action tools if no Google Drive connector is configured - has_google_drive_connector = ( - available_connectors is not None and "GOOGLE_DRIVE_FILE" in available_connectors - ) - if not has_google_drive_connector: - google_drive_tools = [ - "create_google_drive_file", - "delete_google_drive_file", - ] - modified_disabled_tools.extend(google_drive_tools) - - has_dropbox_connector = ( - available_connectors is not None and "DROPBOX_FILE" in available_connectors - ) - if not has_dropbox_connector: - modified_disabled_tools.extend(["create_dropbox_file", "delete_dropbox_file"]) - - has_onedrive_connector = ( - available_connectors is not None and "ONEDRIVE_FILE" in available_connectors - ) - if not has_onedrive_connector: - modified_disabled_tools.extend(["create_onedrive_file", "delete_onedrive_file"]) - - # Disable Google Calendar action tools if no Google Calendar connector is configured - has_google_calendar_connector = ( - available_connectors is not None - and "GOOGLE_CALENDAR_CONNECTOR" in available_connectors - ) - if not has_google_calendar_connector: - calendar_tools = [ - "create_calendar_event", - "update_calendar_event", - "delete_calendar_event", - ] - modified_disabled_tools.extend(calendar_tools) - - # Disable Gmail action tools if no Gmail connector is configured - has_gmail_connector = ( - available_connectors is not None - and "GOOGLE_GMAIL_CONNECTOR" in available_connectors - ) - if not has_gmail_connector: - gmail_tools = [ - "create_gmail_draft", - "update_gmail_draft", - "send_gmail_email", - "trash_gmail_email", - ] - modified_disabled_tools.extend(gmail_tools) - - # Disable Jira action tools if no Jira connector is configured - has_jira_connector = ( - available_connectors is not None and "JIRA_CONNECTOR" in available_connectors - ) - if not has_jira_connector: - jira_tools = [ - "create_jira_issue", - "update_jira_issue", - "delete_jira_issue", - ] - modified_disabled_tools.extend(jira_tools) - - # Disable Confluence action tools if no Confluence connector is configured - has_confluence_connector = ( - available_connectors is not None - and "CONFLUENCE_CONNECTOR" in available_connectors - ) - if not has_confluence_connector: - confluence_tools = [ - "create_confluence_page", - "update_confluence_page", - "delete_confluence_page", - ] - modified_disabled_tools.extend(confluence_tools) + modified_disabled_tools.extend([ + "create_notion_page_mcp", + "update_notion_page_mcp", + "delete_notion_page_mcp", + ]) # Remove direct KB search tool; we now pre-seed a scoped filesystem via middleware. if "search_knowledge_base" not in modified_disabled_tools: From 2dfe03b9b2771c7c2398209645c585397aaaf07e Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:29:03 +0200 Subject: [PATCH 015/113] add reusable OAuth connector route base class --- .../app/routes/oauth_connector_base.py | 620 ++++++++++++++++++ 1 file changed, 620 insertions(+) create mode 100644 surfsense_backend/app/routes/oauth_connector_base.py diff --git a/surfsense_backend/app/routes/oauth_connector_base.py b/surfsense_backend/app/routes/oauth_connector_base.py new file mode 100644 index 000000000..0483d2540 --- /dev/null +++ b/surfsense_backend/app/routes/oauth_connector_base.py @@ -0,0 +1,620 @@ +"""Reusable base for OAuth 2.0 connector routes. + +Subclasses override ``fetch_account_info``, ``build_connector_config``, +and ``get_connector_display_name`` to customise provider-specific behaviour. +Call ``build_router()`` to get a FastAPI ``APIRouter`` with ``/connector/add``, +``/connector/callback``, and ``/connector/reauth`` endpoints. +""" + +from __future__ import annotations + +import base64 +import logging +from datetime import UTC, datetime, timedelta +from typing import Any +from urllib.parse import urlencode +from uuid import UUID + +import httpx +from fastapi import APIRouter, Depends, HTTPException +from fastapi.responses import RedirectResponse +from sqlalchemy import select +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm.attributes import flag_modified + +from app.config import config +from app.db import ( + SearchSourceConnector, + SearchSourceConnectorType, + User, + get_async_session, +) +from app.users import current_active_user +from app.utils.connector_naming import ( + check_duplicate_connector, + generate_unique_connector_name, +) +from app.utils.oauth_security import OAuthStateManager, TokenEncryption + +logger = logging.getLogger(__name__) + + +class OAuthConnectorRoute: + + def __init__( + self, + *, + provider_name: str, + connector_type: SearchSourceConnectorType, + authorize_url: str, + token_url: str, + client_id_env: str, + client_secret_env: str, + redirect_uri_env: str, + scopes: list[str], + auth_prefix: str, + use_pkce: bool = False, + token_auth_method: str = "body", + is_indexable: bool = True, + extra_auth_params: dict[str, str] | None = None, + ) -> None: + self.provider_name = provider_name + self.connector_type = connector_type + self.authorize_url = authorize_url + self.token_url = token_url + self.client_id_env = client_id_env + self.client_secret_env = client_secret_env + self.redirect_uri_env = redirect_uri_env + self.scopes = scopes + self.auth_prefix = auth_prefix.rstrip("/") + self.use_pkce = use_pkce + self.token_auth_method = token_auth_method + self.is_indexable = is_indexable + self.extra_auth_params = extra_auth_params or {} + + self._state_manager: OAuthStateManager | None = None + self._token_encryption: TokenEncryption | None = None + + def _get_client_id(self) -> str: + value = getattr(config, self.client_id_env, None) + if not value: + raise HTTPException( + status_code=500, + detail=f"{self.provider_name.title()} OAuth not configured " + f"({self.client_id_env} missing).", + ) + return value + + def _get_client_secret(self) -> str: + value = getattr(config, self.client_secret_env, None) + if not value: + raise HTTPException( + status_code=500, + detail=f"{self.provider_name.title()} OAuth not configured " + f"({self.client_secret_env} missing).", + ) + return value + + def _get_redirect_uri(self) -> str: + value = getattr(config, self.redirect_uri_env, None) + if not value: + raise HTTPException( + status_code=500, + detail=f"{self.redirect_uri_env} not configured.", + ) + return value + + def _get_state_manager(self) -> OAuthStateManager: + if self._state_manager is None: + if not config.SECRET_KEY: + raise HTTPException( + status_code=500, + detail="SECRET_KEY not configured for OAuth security.", + ) + self._state_manager = OAuthStateManager(config.SECRET_KEY) + return self._state_manager + + def _get_token_encryption(self) -> TokenEncryption: + if self._token_encryption is None: + if not config.SECRET_KEY: + raise HTTPException( + status_code=500, + detail="SECRET_KEY not configured for token encryption.", + ) + self._token_encryption = TokenEncryption(config.SECRET_KEY) + return self._token_encryption + + def _frontend_redirect( + self, + space_id: int | None, + *, + success: bool = False, + connector_id: int | None = None, + error: str | None = None, + ) -> RedirectResponse: + if success and space_id: + connector_slug = f"{self.provider_name}-connector" + qs = f"success=true&connector={connector_slug}" + if connector_id: + qs += f"&connectorId={connector_id}" + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?{qs}" + ) + if error and space_id: + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?error={error}" + ) + if error: + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard?error={error}" + ) + return RedirectResponse(url=f"{config.NEXT_FRONTEND_URL}/dashboard") + + async def fetch_account_info(self, access_token: str) -> dict[str, Any]: + """Override to fetch account/workspace info after token exchange. + + Return dict is merged into connector config; key ``"name"`` is used + for the display name and dedup. + """ + return {} + + def build_connector_config( + self, + token_json: dict[str, Any], + account_info: dict[str, Any], + encryption: TokenEncryption, + ) -> dict[str, Any]: + """Override for custom config shapes. Default: standard encrypted OAuth fields.""" + access_token = token_json.get("access_token", "") + refresh_token = token_json.get("refresh_token") + + expires_at = None + if token_json.get("expires_in"): + expires_at = datetime.now(UTC) + timedelta( + seconds=int(token_json["expires_in"]) + ) + + cfg: dict[str, Any] = { + "access_token": encryption.encrypt_token(access_token), + "refresh_token": ( + encryption.encrypt_token(refresh_token) if refresh_token else None + ), + "token_type": token_json.get("token_type", "Bearer"), + "expires_in": token_json.get("expires_in"), + "expires_at": expires_at.isoformat() if expires_at else None, + "scope": token_json.get("scope"), + "_token_encrypted": True, + } + cfg.update(account_info) + return cfg + + def get_connector_display_name(self, account_info: dict[str, Any]) -> str: + return str(account_info.get("name", self.provider_name.title())) + + async def on_token_refresh_failure( + self, + session: AsyncSession, + connector: SearchSourceConnector, + ) -> None: + try: + connector.config = {**connector.config, "auth_expired": True} + flag_modified(connector, "config") + await session.commit() + await session.refresh(connector) + except Exception: + logger.warning( + "Failed to persist auth_expired flag for connector %s", + connector.id, + exc_info=True, + ) + + async def _exchange_code( + self, code: str, extra_state: dict[str, Any] + ) -> dict[str, Any]: + client_id = self._get_client_id() + client_secret = self._get_client_secret() + redirect_uri = self._get_redirect_uri() + + headers: dict[str, str] = { + "Content-Type": "application/x-www-form-urlencoded", + } + body: dict[str, str] = { + "grant_type": "authorization_code", + "code": code, + "redirect_uri": redirect_uri, + } + + if self.token_auth_method == "basic": + creds = base64.b64encode(f"{client_id}:{client_secret}".encode()).decode() + headers["Authorization"] = f"Basic {creds}" + else: + body["client_id"] = client_id + body["client_secret"] = client_secret + + if self.use_pkce: + verifier = extra_state.get("code_verifier") + if verifier: + body["code_verifier"] = verifier + + async with httpx.AsyncClient() as client: + resp = await client.post( + self.token_url, data=body, headers=headers, timeout=30.0 + ) + + if resp.status_code != 200: + detail = resp.text + try: + detail = resp.json().get("error_description", detail) + except Exception: + pass + raise HTTPException( + status_code=400, detail=f"Token exchange failed: {detail}" + ) + + return resp.json() + + async def refresh_token( + self, session: AsyncSession, connector: SearchSourceConnector + ) -> SearchSourceConnector: + encryption = self._get_token_encryption() + is_encrypted = connector.config.get("_token_encrypted", False) + + refresh_tok = connector.config.get("refresh_token") + if is_encrypted and refresh_tok: + try: + refresh_tok = encryption.decrypt_token(refresh_tok) + except Exception as e: + logger.error("Failed to decrypt refresh token: %s", e) + raise HTTPException( + status_code=500, detail="Failed to decrypt stored refresh token" + ) from e + + if not refresh_tok: + await self.on_token_refresh_failure(session, connector) + raise HTTPException( + status_code=400, + detail="No refresh token available. Please re-authenticate.", + ) + + client_id = self._get_client_id() + client_secret = self._get_client_secret() + + headers: dict[str, str] = { + "Content-Type": "application/x-www-form-urlencoded", + } + body: dict[str, str] = { + "grant_type": "refresh_token", + "refresh_token": refresh_tok, + } + + if self.token_auth_method == "basic": + creds = base64.b64encode(f"{client_id}:{client_secret}".encode()).decode() + headers["Authorization"] = f"Basic {creds}" + else: + body["client_id"] = client_id + body["client_secret"] = client_secret + + async with httpx.AsyncClient() as client: + resp = await client.post( + self.token_url, data=body, headers=headers, timeout=30.0 + ) + + if resp.status_code != 200: + error_detail = resp.text + try: + ej = resp.json() + error_detail = ej.get("error_description", error_detail) + error_code = ej.get("error", "") + except Exception: + error_code = "" + combined = (error_detail + error_code).lower() + if any(kw in combined for kw in ("invalid_grant", "expired", "revoked")): + await self.on_token_refresh_failure(session, connector) + raise HTTPException( + status_code=401, + detail=f"{self.provider_name.title()} authentication failed. " + "Please re-authenticate.", + ) + raise HTTPException( + status_code=400, detail=f"Token refresh failed: {error_detail}" + ) + + token_json = resp.json() + new_access = token_json.get("access_token") + if not new_access: + raise HTTPException( + status_code=400, detail="No access token received from refresh" + ) + + expires_at = None + if token_json.get("expires_in"): + expires_at = datetime.now(UTC) + timedelta( + seconds=int(token_json["expires_in"]) + ) + + updated_config = dict(connector.config) + updated_config["access_token"] = encryption.encrypt_token(new_access) + new_refresh = token_json.get("refresh_token") + if new_refresh: + updated_config["refresh_token"] = encryption.encrypt_token(new_refresh) + updated_config["expires_in"] = token_json.get("expires_in") + updated_config["expires_at"] = expires_at.isoformat() if expires_at else None + updated_config["scope"] = token_json.get("scope", updated_config.get("scope")) + updated_config["_token_encrypted"] = True + updated_config.pop("auth_expired", None) + + connector.config = updated_config + flag_modified(connector, "config") + await session.commit() + await session.refresh(connector) + + logger.info( + "Refreshed %s token for connector %s", + self.provider_name, + connector.id, + ) + return connector + + def build_router(self) -> APIRouter: + router = APIRouter() + oauth = self + + @router.get(f"{oauth.auth_prefix}/connector/add") + async def connect( + space_id: int, + user: User = Depends(current_active_user), + ): + if not space_id: + raise HTTPException(status_code=400, detail="space_id is required") + + client_id = oauth._get_client_id() + state_mgr = oauth._get_state_manager() + + extra_state: dict[str, Any] = {} + auth_params: dict[str, str] = { + "client_id": client_id, + "response_type": "code", + "redirect_uri": oauth._get_redirect_uri(), + "scope": " ".join(oauth.scopes), + } + + if oauth.use_pkce: + from app.utils.oauth_security import generate_pkce_pair + + verifier, challenge = generate_pkce_pair() + extra_state["code_verifier"] = verifier + auth_params["code_challenge"] = challenge + auth_params["code_challenge_method"] = "S256" + + auth_params.update(oauth.extra_auth_params) + + state_encoded = state_mgr.generate_secure_state( + space_id, user.id, **extra_state + ) + auth_params["state"] = state_encoded + auth_url = f"{oauth.authorize_url}?{urlencode(auth_params)}" + + logger.info( + "Generated %s OAuth URL for user %s, space %s", + oauth.provider_name, + user.id, + space_id, + ) + return {"auth_url": auth_url} + + @router.get(f"{oauth.auth_prefix}/connector/reauth") + async def reauth( + space_id: int, + connector_id: int, + return_url: str | None = None, + user: User = Depends(current_active_user), + session: AsyncSession = Depends(get_async_session), + ): + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == connector_id, + SearchSourceConnector.user_id == user.id, + SearchSourceConnector.search_space_id == space_id, + SearchSourceConnector.connector_type == oauth.connector_type, + ) + ) + if not result.scalars().first(): + raise HTTPException( + status_code=404, + detail=f"{oauth.provider_name.title()} connector not found " + "or access denied", + ) + + client_id = oauth._get_client_id() + state_mgr = oauth._get_state_manager() + + extra: dict[str, Any] = {"connector_id": connector_id} + if return_url and return_url.startswith("/"): + extra["return_url"] = return_url + + auth_params: dict[str, str] = { + "client_id": client_id, + "response_type": "code", + "redirect_uri": oauth._get_redirect_uri(), + "scope": " ".join(oauth.scopes), + } + + if oauth.use_pkce: + from app.utils.oauth_security import generate_pkce_pair + + verifier, challenge = generate_pkce_pair() + extra["code_verifier"] = verifier + auth_params["code_challenge"] = challenge + auth_params["code_challenge_method"] = "S256" + + auth_params.update(oauth.extra_auth_params) + + state_encoded = state_mgr.generate_secure_state( + space_id, user.id, **extra + ) + auth_params["state"] = state_encoded + auth_url = f"{oauth.authorize_url}?{urlencode(auth_params)}" + + logger.info( + "Initiating %s re-auth for user %s, connector %s", + oauth.provider_name, + user.id, + connector_id, + ) + return {"auth_url": auth_url} + + @router.get(f"{oauth.auth_prefix}/connector/callback") + async def callback( + code: str | None = None, + error: str | None = None, + state: str | None = None, + session: AsyncSession = Depends(get_async_session), + ): + error_label = f"{oauth.provider_name}_oauth_denied" + + if error: + logger.warning("%s OAuth error: %s", oauth.provider_name, error) + space_id = None + if state: + try: + data = oauth._get_state_manager().validate_state(state) + space_id = data.get("space_id") + except Exception: + pass + return oauth._frontend_redirect(space_id, error=error_label) + + if not code: + raise HTTPException( + status_code=400, detail="Missing authorization code" + ) + if not state: + raise HTTPException( + status_code=400, detail="Missing state parameter" + ) + + state_mgr = oauth._get_state_manager() + try: + data = state_mgr.validate_state(state) + except Exception as e: + raise HTTPException( + status_code=400, detail=f"Invalid state parameter: {e!s}" + ) from e + + user_id = UUID(data["user_id"]) + space_id = data["space_id"] + + token_json = await oauth._exchange_code(code, data) + + access_token = token_json.get("access_token", "") + if not access_token: + raise HTTPException( + status_code=400, + detail=f"No access token received from {oauth.provider_name.title()}", + ) + + account_info = await oauth.fetch_account_info(access_token) + encryption = oauth._get_token_encryption() + connector_config = oauth.build_connector_config( + token_json, account_info, encryption + ) + + display_name = oauth.get_connector_display_name(account_info) + + # --- Re-auth path --- + reauth_connector_id = data.get("connector_id") + reauth_return_url = data.get("return_url") + + if reauth_connector_id: + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == reauth_connector_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.search_space_id == space_id, + SearchSourceConnector.connector_type == oauth.connector_type, + ) + ) + db_connector = result.scalars().first() + if not db_connector: + raise HTTPException( + status_code=404, + detail="Connector not found or access denied during re-auth", + ) + + db_connector.config = connector_config + flag_modified(db_connector, "config") + await session.commit() + await session.refresh(db_connector) + + logger.info( + "Re-authenticated %s connector %s for user %s", + oauth.provider_name, + db_connector.id, + user_id, + ) + if reauth_return_url and reauth_return_url.startswith("/"): + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}{reauth_return_url}" + ) + return oauth._frontend_redirect( + space_id, success=True, connector_id=db_connector.id + ) + + # --- New connector path --- + is_dup = await check_duplicate_connector( + session, + oauth.connector_type, + space_id, + user_id, + display_name, + ) + if is_dup: + logger.warning( + "Duplicate %s connector for user %s (%s)", + oauth.provider_name, + user_id, + display_name, + ) + return oauth._frontend_redirect( + space_id, + error=f"duplicate_account&connector={oauth.provider_name}-connector", + ) + + connector_name = await generate_unique_connector_name( + session, + oauth.connector_type, + space_id, + user_id, + display_name, + ) + + new_connector = SearchSourceConnector( + name=connector_name, + connector_type=oauth.connector_type, + is_indexable=oauth.is_indexable, + config=connector_config, + search_space_id=space_id, + user_id=user_id, + ) + session.add(new_connector) + + try: + await session.commit() + except IntegrityError as e: + await session.rollback() + raise HTTPException( + status_code=409, detail=f"Database integrity error: {e!s}" + ) from e + + logger.info( + "Created %s connector %s for user %s in space %s", + oauth.provider_name, + new_connector.id, + user_id, + space_id, + ) + return oauth._frontend_redirect( + space_id, success=True, connector_id=new_connector.id + ) + + return router From ea3bda9ec39120ba223a6ab7b783f24ca7889b69 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:33:10 +0200 Subject: [PATCH 016/113] delete Notion MCP services, tools, and route --- .../new_chat/tools/notion_mcp/__init__.py | 5 - .../new_chat/tools/notion_mcp/create_page.py | 205 -------- .../new_chat/tools/notion_mcp/delete_page.py | 173 ------- .../new_chat/tools/notion_mcp/update_page.py | 179 ------- .../app/routes/notion_mcp_connector_route.py | 486 ------------------ .../app/services/notion_mcp/__init__.py | 27 - .../app/services/notion_mcp/adapter.py | 253 --------- .../app/services/notion_mcp/oauth.py | 298 ----------- .../services/notion_mcp/response_parser.py | 212 -------- 9 files changed, 1838 deletions(-) delete mode 100644 surfsense_backend/app/agents/new_chat/tools/notion_mcp/__init__.py delete mode 100644 surfsense_backend/app/agents/new_chat/tools/notion_mcp/create_page.py delete mode 100644 surfsense_backend/app/agents/new_chat/tools/notion_mcp/delete_page.py delete mode 100644 surfsense_backend/app/agents/new_chat/tools/notion_mcp/update_page.py delete mode 100644 surfsense_backend/app/routes/notion_mcp_connector_route.py delete mode 100644 surfsense_backend/app/services/notion_mcp/__init__.py delete mode 100644 surfsense_backend/app/services/notion_mcp/adapter.py delete mode 100644 surfsense_backend/app/services/notion_mcp/oauth.py delete mode 100644 surfsense_backend/app/services/notion_mcp/response_parser.py diff --git a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/__init__.py b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/__init__.py deleted file mode 100644 index 1e1515bfb..000000000 --- a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -"""MCP-backed Notion tool factories. - -Drop-in replacements for ``tools/notion/`` that route through -Notion's hosted MCP server instead of direct API calls. -""" diff --git a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/create_page.py b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/create_page.py deleted file mode 100644 index a73363a65..000000000 --- a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/create_page.py +++ /dev/null @@ -1,205 +0,0 @@ -import logging -from typing import Any - -from langchain_core.tools import tool -from sqlalchemy.ext.asyncio import AsyncSession - -from app.agents.new_chat.tools.hitl import request_approval -from app.services.notion import NotionToolMetadataService - -logger = logging.getLogger(__name__) - - -def _find_mcp_connector(connectors): - """Return the first connector with mcp_mode enabled, or None.""" - for c in connectors: - if (c.config or {}).get("mcp_mode"): - return c - return None - - -def create_create_notion_page_mcp_tool( - db_session: AsyncSession | None = None, - search_space_id: int | None = None, - user_id: str | None = None, - connector_id: int | None = None, -): - @tool - async def create_notion_page( - title: str, - content: str | None = None, - ) -> dict[str, Any]: - """Create a new page in Notion with the given title and content. - - Use this tool when the user asks you to create, save, or publish - something to Notion. The page will be created in the user's - configured Notion workspace. The user MUST specify a topic before you - call this tool. If the request does not contain a topic (e.g. "create a - notion page"), ask what the page should be about. Never call this tool - without a clear topic from the user. - - Args: - title: The title of the Notion page. - content: Optional markdown content for the page body (supports headings, lists, paragraphs). - Generate this yourself based on the user's topic. - - Returns: - Dictionary with: - - status: "success", "rejected", or "error" - - page_id: Created page ID (if success) - - url: URL to the created page (if success) - - title: Page title (if success) - - message: Result message - - IMPORTANT: If status is "rejected", the user explicitly declined the action. - Respond with a brief acknowledgment (e.g., "Understood, I didn't create the page.") - and move on. Do NOT troubleshoot or suggest alternatives. - - Examples: - - "Create a Notion page about our Q2 roadmap" - - "Save a summary of today's discussion to Notion" - """ - logger.info("create_notion_page (MCP) called: title='%s'", title) - - if db_session is None or search_space_id is None or user_id is None: - logger.error("Notion MCP tool not properly configured - missing required parameters") - return { - "status": "error", - "message": "Notion tool not properly configured. Please contact support.", - } - - try: - metadata_service = NotionToolMetadataService(db_session) - context = await metadata_service.get_creation_context(search_space_id, user_id) - - if "error" in context: - logger.error("Failed to fetch creation context: %s", context["error"]) - return {"status": "error", "message": context["error"]} - - accounts = context.get("accounts", []) - if accounts and all(a.get("auth_expired") for a in accounts): - return { - "status": "auth_error", - "message": "All connected Notion accounts need re-authentication. Please re-authenticate in your connector settings.", - "connector_type": "notion", - } - - result = request_approval( - action_type="notion_page_creation", - tool_name="create_notion_page", - params={ - "title": title, - "content": content, - "parent_page_id": None, - "connector_id": connector_id, - }, - context=context, - ) - - if result.rejected: - logger.info("Notion page creation rejected by user") - return { - "status": "rejected", - "message": "User declined. Do not retry or suggest alternatives.", - } - - final_title = result.params.get("title", title) - final_content = result.params.get("content", content) - final_parent_page_id = result.params.get("parent_page_id") - final_connector_id = result.params.get("connector_id", connector_id) - - if not final_title or not final_title.strip(): - return { - "status": "error", - "message": "Page title cannot be empty. Please provide a valid title.", - } - - from sqlalchemy.future import select - - from app.db import SearchSourceConnector, SearchSourceConnectorType - - actual_connector_id = final_connector_id - if actual_connector_id is None: - query_result = await db_session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.search_space_id == search_space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, - ) - ) - connectors = query_result.scalars().all() - connector = _find_mcp_connector(connectors) - - if not connector: - return { - "status": "error", - "message": "No Notion MCP connector found. Please connect Notion (MCP) in your workspace settings.", - } - actual_connector_id = connector.id - else: - query_result = await db_session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == actual_connector_id, - SearchSourceConnector.search_space_id == search_space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, - ) - ) - connector = query_result.scalars().first() - if not connector: - return { - "status": "error", - "message": "Selected Notion account is invalid or has been disconnected.", - } - - from app.services.notion_mcp.adapter import NotionMCPAdapter - - adapter = NotionMCPAdapter(session=db_session, connector_id=actual_connector_id) - result = await adapter.create_page( - title=final_title, - content=final_content, - parent_page_id=final_parent_page_id, - ) - logger.info("create_page (MCP) result: %s - %s", result.get("status"), result.get("message", "")) - - if result.get("status") == "success": - kb_message_suffix = "" - try: - from app.services.notion import NotionKBSyncService - - kb_service = NotionKBSyncService(db_session) - kb_result = await kb_service.sync_after_create( - page_id=result.get("page_id"), - page_title=result.get("title", final_title), - page_url=result.get("url"), - content=final_content, - connector_id=actual_connector_id, - search_space_id=search_space_id, - user_id=user_id, - ) - if kb_result["status"] == "success": - kb_message_suffix = " Your knowledge base has also been updated." - else: - kb_message_suffix = " This page will be added to your knowledge base in the next scheduled sync." - except Exception as kb_err: - logger.warning("KB sync after create failed: %s", kb_err) - kb_message_suffix = " This page will be added to your knowledge base in the next scheduled sync." - - result["message"] = result.get("message", "") + kb_message_suffix - - return result - - except Exception as e: - from langgraph.errors import GraphInterrupt - - if isinstance(e, GraphInterrupt): - raise - - logger.error("Error creating Notion page (MCP): %s", e, exc_info=True) - if isinstance(e, ValueError): - message = str(e) - else: - message = "Something went wrong while creating the page. Please try again." - return {"status": "error", "message": message} - - return create_notion_page diff --git a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/delete_page.py b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/delete_page.py deleted file mode 100644 index c0cf7642b..000000000 --- a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/delete_page.py +++ /dev/null @@ -1,173 +0,0 @@ -import logging -from typing import Any - -from langchain_core.tools import tool -from sqlalchemy.ext.asyncio import AsyncSession - -from app.agents.new_chat.tools.hitl import request_approval -from app.services.notion.tool_metadata_service import NotionToolMetadataService - -logger = logging.getLogger(__name__) - - -def create_delete_notion_page_mcp_tool( - db_session: AsyncSession | None = None, - search_space_id: int | None = None, - user_id: str | None = None, - connector_id: int | None = None, -): - @tool - async def delete_notion_page( - page_title: str, - delete_from_kb: bool = False, - ) -> dict[str, Any]: - """Delete (archive) a Notion page. - - Use this tool when the user asks you to delete, remove, or archive - a Notion page. Note that Notion doesn't permanently delete pages, - it archives them (they can be restored from trash). - - Args: - page_title: The title of the Notion page to delete. - delete_from_kb: Whether to also remove the page from the knowledge base. - Default is False. - - Returns: - Dictionary with: - - status: "success", "rejected", "not_found", or "error" - - page_id: Deleted page ID (if success) - - message: Success or error message - - deleted_from_kb: Whether the page was also removed from knowledge base (if success) - - Examples: - - "Delete the 'Meeting Notes' Notion page" - - "Remove the 'Old Project Plan' Notion page" - """ - logger.info( - "delete_notion_page (MCP) called: page_title='%s', delete_from_kb=%s", - page_title, - delete_from_kb, - ) - - if db_session is None or search_space_id is None or user_id is None: - logger.error("Notion MCP tool not properly configured - missing required parameters") - return { - "status": "error", - "message": "Notion tool not properly configured. Please contact support.", - } - - try: - metadata_service = NotionToolMetadataService(db_session) - context = await metadata_service.get_delete_context(search_space_id, user_id, page_title) - - if "error" in context: - error_msg = context["error"] - if "not found" in error_msg.lower(): - return {"status": "not_found", "message": error_msg} - return {"status": "error", "message": error_msg} - - account = context.get("account", {}) - if account.get("auth_expired"): - return { - "status": "auth_error", - "message": "The Notion account for this page needs re-authentication. Please re-authenticate in your connector settings.", - } - - page_id = context.get("page_id") - connector_id_from_context = account.get("id") - document_id = context.get("document_id") - - result = request_approval( - action_type="notion_page_deletion", - tool_name="delete_notion_page", - params={ - "page_id": page_id, - "connector_id": connector_id_from_context, - "delete_from_kb": delete_from_kb, - }, - context=context, - ) - - if result.rejected: - logger.info("Notion page deletion rejected by user") - return { - "status": "rejected", - "message": "User declined. Do not retry or suggest alternatives.", - } - - final_page_id = result.params.get("page_id", page_id) - final_connector_id = result.params.get("connector_id", connector_id_from_context) - final_delete_from_kb = result.params.get("delete_from_kb", delete_from_kb) - - from sqlalchemy.future import select - - from app.db import SearchSourceConnector, SearchSourceConnectorType - - if final_connector_id: - query_result = await db_session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == final_connector_id, - SearchSourceConnector.search_space_id == search_space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, - ) - ) - connector = query_result.scalars().first() - if not connector: - return { - "status": "error", - "message": "Selected Notion account is invalid or has been disconnected.", - } - actual_connector_id = connector.id - else: - return {"status": "error", "message": "No connector found for this page."} - - from app.services.notion_mcp.adapter import NotionMCPAdapter - - adapter = NotionMCPAdapter(session=db_session, connector_id=actual_connector_id) - result = await adapter.delete_page(page_id=final_page_id) - logger.info("delete_page (MCP) result: %s - %s", result.get("status"), result.get("message", "")) - - deleted_from_kb = False - if result.get("status") == "success" and final_delete_from_kb and document_id: - try: - from sqlalchemy.future import select - - from app.db import Document - - doc_result = await db_session.execute( - select(Document).filter(Document.id == document_id) - ) - document = doc_result.scalars().first() - - if document: - await db_session.delete(document) - await db_session.commit() - deleted_from_kb = True - logger.info("Deleted document %s from knowledge base", document_id) - except Exception as e: - logger.error("Failed to delete document from KB: %s", e) - await db_session.rollback() - result["warning"] = f"Page deleted from Notion, but failed to remove from knowledge base: {e!s}" - - if result.get("status") == "success": - result["deleted_from_kb"] = deleted_from_kb - if deleted_from_kb: - result["message"] = f"{result.get('message', '')} (also removed from knowledge base)" - - return result - - except Exception as e: - from langgraph.errors import GraphInterrupt - - if isinstance(e, GraphInterrupt): - raise - - logger.error("Error deleting Notion page (MCP): %s", e, exc_info=True) - if isinstance(e, ValueError): - message = str(e) - else: - message = "Something went wrong while deleting the page. Please try again." - return {"status": "error", "message": message} - - return delete_notion_page diff --git a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/update_page.py b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/update_page.py deleted file mode 100644 index 28599cbae..000000000 --- a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/update_page.py +++ /dev/null @@ -1,179 +0,0 @@ -import logging -from typing import Any - -from langchain_core.tools import tool -from sqlalchemy.ext.asyncio import AsyncSession - -from app.agents.new_chat.tools.hitl import request_approval -from app.services.notion import NotionToolMetadataService - -logger = logging.getLogger(__name__) - - -def create_update_notion_page_mcp_tool( - db_session: AsyncSession | None = None, - search_space_id: int | None = None, - user_id: str | None = None, - connector_id: int | None = None, -): - @tool - async def update_notion_page( - page_title: str, - content: str | None = None, - ) -> dict[str, Any]: - """Update an existing Notion page by appending new content. - - Use this tool when the user asks you to add content to, modify, or update - a Notion page. The new content will be appended to the existing page content. - The user MUST specify what to add before you call this tool. If the - request is vague, ask what content they want added. - - Args: - page_title: The title of the Notion page to update. - content: Optional markdown content to append to the page body (supports headings, lists, paragraphs). - Generate this yourself based on the user's request. - - Returns: - Dictionary with: - - status: "success", "rejected", "not_found", or "error" - - page_id: Updated page ID (if success) - - url: URL to the updated page (if success) - - title: Current page title (if success) - - message: Result message - - IMPORTANT: - - If status is "rejected", the user explicitly declined the action. - Respond with a brief acknowledgment (e.g., "Understood, I didn't update the page.") - and move on. Do NOT ask for alternatives or troubleshoot. - - If status is "not_found", inform the user conversationally using the exact message provided. - - Examples: - - "Add today's meeting notes to the 'Meeting Notes' Notion page" - - "Update the 'Project Plan' page with a status update on phase 1" - """ - logger.info( - "update_notion_page (MCP) called: page_title='%s', content_length=%d", - page_title, - len(content) if content else 0, - ) - - if db_session is None or search_space_id is None or user_id is None: - logger.error("Notion MCP tool not properly configured - missing required parameters") - return { - "status": "error", - "message": "Notion tool not properly configured. Please contact support.", - } - - if not content or not content.strip(): - return { - "status": "error", - "message": "Content is required to update the page. Please provide the actual content you want to add.", - } - - try: - metadata_service = NotionToolMetadataService(db_session) - context = await metadata_service.get_update_context(search_space_id, user_id, page_title) - - if "error" in context: - error_msg = context["error"] - if "not found" in error_msg.lower(): - return {"status": "not_found", "message": error_msg} - return {"status": "error", "message": error_msg} - - account = context.get("account", {}) - if account.get("auth_expired"): - return { - "status": "auth_error", - "message": "The Notion account for this page needs re-authentication. Please re-authenticate in your connector settings.", - } - - page_id = context.get("page_id") - document_id = context.get("document_id") - connector_id_from_context = account.get("id") - - result = request_approval( - action_type="notion_page_update", - tool_name="update_notion_page", - params={ - "page_id": page_id, - "content": content, - "connector_id": connector_id_from_context, - }, - context=context, - ) - - if result.rejected: - logger.info("Notion page update rejected by user") - return { - "status": "rejected", - "message": "User declined. Do not retry or suggest alternatives.", - } - - final_page_id = result.params.get("page_id", page_id) - final_content = result.params.get("content", content) - final_connector_id = result.params.get("connector_id", connector_id_from_context) - - from sqlalchemy.future import select - - from app.db import SearchSourceConnector, SearchSourceConnectorType - - if final_connector_id: - query_result = await db_session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == final_connector_id, - SearchSourceConnector.search_space_id == search_space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, - ) - ) - connector = query_result.scalars().first() - if not connector: - return { - "status": "error", - "message": "Selected Notion account is invalid or has been disconnected.", - } - actual_connector_id = connector.id - else: - return {"status": "error", "message": "No connector found for this page."} - - from app.services.notion_mcp.adapter import NotionMCPAdapter - - adapter = NotionMCPAdapter(session=db_session, connector_id=actual_connector_id) - result = await adapter.update_page(page_id=final_page_id, content=final_content) - logger.info("update_page (MCP) result: %s - %s", result.get("status"), result.get("message", "")) - - if result.get("status") == "success" and document_id is not None: - from app.services.notion import NotionKBSyncService - - kb_service = NotionKBSyncService(db_session) - kb_result = await kb_service.sync_after_update( - document_id=document_id, - appended_content=final_content, - user_id=user_id, - search_space_id=search_space_id, - appended_block_ids=result.get("appended_block_ids"), - ) - - if kb_result["status"] == "success": - result["message"] = f"{result['message']}. Your knowledge base has also been updated." - elif kb_result["status"] == "not_indexed": - result["message"] = f"{result['message']}. This page will be added to your knowledge base in the next scheduled sync." - else: - result["message"] = f"{result['message']}. Your knowledge base will be updated in the next scheduled sync." - - return result - - except Exception as e: - from langgraph.errors import GraphInterrupt - - if isinstance(e, GraphInterrupt): - raise - - logger.error("Error updating Notion page (MCP): %s", e, exc_info=True) - if isinstance(e, ValueError): - message = str(e) - else: - message = "Something went wrong while updating the page. Please try again." - return {"status": "error", "message": message} - - return update_notion_page diff --git a/surfsense_backend/app/routes/notion_mcp_connector_route.py b/surfsense_backend/app/routes/notion_mcp_connector_route.py deleted file mode 100644 index b9305cd74..000000000 --- a/surfsense_backend/app/routes/notion_mcp_connector_route.py +++ /dev/null @@ -1,486 +0,0 @@ -"""Notion MCP Connector OAuth Routes. - -Handles OAuth 2.0 + PKCE authentication for Notion's hosted MCP server. -Based on: https://developers.notion.com/guides/mcp/build-mcp-client - -This creates connectors with the same ``NOTION_CONNECTOR`` type as the -existing direct-API connector, but with ``mcp_mode: True`` in the config -so the adapter layer knows to route through MCP. -""" - -import logging -from uuid import UUID - -from fastapi import APIRouter, Depends, HTTPException, Request -from fastapi.responses import RedirectResponse -from sqlalchemy import select -from sqlalchemy.exc import IntegrityError -from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.orm.attributes import flag_modified - -from app.config import config -from app.db import ( - SearchSourceConnector, - SearchSourceConnectorType, - User, - get_async_session, -) -from app.services.notion_mcp.oauth import ( - ClientCredentials, - OAuthMetadata, - build_authorization_url, - discover_oauth_metadata, - exchange_code_for_tokens, - refresh_access_token, - register_client, -) -from app.users import current_active_user -from app.utils.connector_naming import ( - check_duplicate_connector, - extract_identifier_from_credentials, - generate_unique_connector_name, -) -from app.utils.oauth_security import OAuthStateManager, TokenEncryption, generate_pkce_pair - -logger = logging.getLogger(__name__) - -router = APIRouter() - -_state_manager: OAuthStateManager | None = None -_token_encryption: TokenEncryption | None = None -_oauth_metadata: OAuthMetadata | None = None - - -def _get_state_manager() -> OAuthStateManager: - global _state_manager - if _state_manager is None: - if not config.SECRET_KEY: - raise ValueError("SECRET_KEY must be set for OAuth security") - _state_manager = OAuthStateManager(config.SECRET_KEY) - return _state_manager - - -def _get_token_encryption() -> TokenEncryption: - global _token_encryption - if _token_encryption is None: - if not config.SECRET_KEY: - raise ValueError("SECRET_KEY must be set for token encryption") - _token_encryption = TokenEncryption(config.SECRET_KEY) - return _token_encryption - - -async def _get_oauth_metadata() -> OAuthMetadata: - global _oauth_metadata - if _oauth_metadata is None: - _oauth_metadata = await discover_oauth_metadata() - return _oauth_metadata - - -async def _fetch_workspace_info(access_token: str) -> dict: - """Fetch workspace metadata using the Notion API with the fresh token. - - The ``/v1/users/me`` endpoint returns bot info including workspace_name. - This populates connector config fields so naming and metadata services - work correctly. - """ - try: - import httpx - - async with httpx.AsyncClient(timeout=15.0) as client: - resp = await client.get( - "https://api.notion.com/v1/users/me", - headers={ - "Authorization": f"Bearer {access_token}", - "Notion-Version": "2022-06-28", - }, - ) - if resp.is_success: - data = resp.json() - bot_info = data.get("bot", {}) - return { - "bot_id": data.get("id"), - "workspace_name": bot_info.get("workspace_name", "Notion Workspace"), - "workspace_icon": data.get("avatar_url") or "📄", - } - except Exception as e: - logger.warning("Failed to fetch workspace info: %s", e) - return {} - - -NOTION_MCP_REDIRECT_URI = None - - -def _get_redirect_uri() -> str: - global NOTION_MCP_REDIRECT_URI - if NOTION_MCP_REDIRECT_URI is None: - backend = config.BACKEND_URL or "http://localhost:8000" - NOTION_MCP_REDIRECT_URI = f"{backend}/api/v1/auth/notion-mcp/connector/callback" - return NOTION_MCP_REDIRECT_URI - - -# --------------------------------------------------------------------------- -# Route: initiate OAuth -# --------------------------------------------------------------------------- - - -@router.get("/auth/notion-mcp/connector/add") -async def connect_notion_mcp( - space_id: int, - user: User = Depends(current_active_user), -): - """Initiate Notion MCP OAuth + PKCE flow.""" - if not config.SECRET_KEY: - raise HTTPException(status_code=500, detail="SECRET_KEY not configured.") - - try: - metadata = await _get_oauth_metadata() - - redirect_uri = _get_redirect_uri() - credentials = await register_client(metadata, redirect_uri) - - code_verifier, code_challenge = generate_pkce_pair() - - state_manager = _get_state_manager() - state_encoded = state_manager.generate_secure_state( - space_id, - user.id, - code_verifier=code_verifier, - mcp_client_id=credentials.client_id, - mcp_client_secret=credentials.client_secret or "", - ) - - auth_url = build_authorization_url( - metadata=metadata, - client_id=credentials.client_id, - redirect_uri=redirect_uri, - code_challenge=code_challenge, - state=state_encoded, - ) - - logger.info("Generated Notion MCP OAuth URL for user %s, space %s", user.id, space_id) - return {"auth_url": auth_url} - - except Exception as e: - logger.error("Failed to initiate Notion MCP OAuth: %s", e, exc_info=True) - raise HTTPException( - status_code=500, detail=f"Failed to initiate Notion MCP OAuth: {e!s}" - ) from e - - -# --------------------------------------------------------------------------- -# Route: re-authenticate existing connector -# --------------------------------------------------------------------------- - - -@router.get("/auth/notion-mcp/connector/reauth") -async def reauth_notion_mcp( - space_id: int, - connector_id: int, - return_url: str | None = None, - user: User = Depends(current_active_user), - session: AsyncSession = Depends(get_async_session), -): - """Initiate re-authentication for an existing Notion MCP connector.""" - result = await session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == connector_id, - SearchSourceConnector.user_id == user.id, - SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, - ) - ) - connector = result.scalars().first() - if not connector: - raise HTTPException(status_code=404, detail="Connector not found or access denied") - - if not config.SECRET_KEY: - raise HTTPException(status_code=500, detail="SECRET_KEY not configured.") - - try: - metadata = await _get_oauth_metadata() - redirect_uri = _get_redirect_uri() - credentials = await register_client(metadata, redirect_uri) - - code_verifier, code_challenge = generate_pkce_pair() - - extra: dict = { - "connector_id": connector_id, - "code_verifier": code_verifier, - "mcp_client_id": credentials.client_id, - "mcp_client_secret": credentials.client_secret or "", - } - if return_url and return_url.startswith("/"): - extra["return_url"] = return_url - - state_manager = _get_state_manager() - state_encoded = state_manager.generate_secure_state(space_id, user.id, **extra) - - auth_url = build_authorization_url( - metadata=metadata, - client_id=credentials.client_id, - redirect_uri=redirect_uri, - code_challenge=code_challenge, - state=state_encoded, - ) - - logger.info("Initiating Notion MCP re-auth for user %s, connector %s", user.id, connector_id) - return {"auth_url": auth_url} - - except HTTPException: - raise - except Exception as e: - logger.error("Failed to initiate Notion MCP re-auth: %s", e, exc_info=True) - raise HTTPException( - status_code=500, detail=f"Failed to initiate Notion MCP re-auth: {e!s}" - ) from e - - -# --------------------------------------------------------------------------- -# Route: OAuth callback -# --------------------------------------------------------------------------- - - -@router.get("/auth/notion-mcp/connector/callback") -async def notion_mcp_callback( - request: Request, - code: str | None = None, - error: str | None = None, - state: str | None = None, - session: AsyncSession = Depends(get_async_session), -): - """Handle the OAuth callback from Notion's MCP authorization server.""" - if error: - logger.warning("Notion MCP OAuth error: %s", error) - space_id = None - if state: - try: - data = _get_state_manager().validate_state(state) - space_id = data.get("space_id") - except Exception: - pass - if space_id: - return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?error=notion_mcp_oauth_denied" - ) - return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=notion_mcp_oauth_denied" - ) - - if not code: - raise HTTPException(status_code=400, detail="Missing authorization code") - if not state: - raise HTTPException(status_code=400, detail="Missing state parameter") - - state_manager = _get_state_manager() - try: - data = state_manager.validate_state(state) - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=400, detail=f"Invalid state: {e!s}") from e - - user_id = UUID(data["user_id"]) - space_id = data["space_id"] - code_verifier = data.get("code_verifier") - mcp_client_id = data.get("mcp_client_id") - mcp_client_secret = data.get("mcp_client_secret") or None - - if not code_verifier or not mcp_client_id: - raise HTTPException(status_code=400, detail="Missing PKCE or client data in state") - - try: - metadata = await _get_oauth_metadata() - redirect_uri = _get_redirect_uri() - - token_set = await exchange_code_for_tokens( - code=code, - code_verifier=code_verifier, - metadata=metadata, - client_id=mcp_client_id, - redirect_uri=redirect_uri, - client_secret=mcp_client_secret, - ) - except Exception as e: - logger.error("Notion MCP token exchange failed: %s", e, exc_info=True) - raise HTTPException(status_code=400, detail=f"Token exchange failed: {e!s}") from e - - token_encryption = _get_token_encryption() - - workspace_info = await _fetch_workspace_info(token_set.access_token) - - connector_config = { - "access_token": token_encryption.encrypt_token(token_set.access_token), - "refresh_token": token_encryption.encrypt_token(token_set.refresh_token) - if token_set.refresh_token - else None, - "expires_in": token_set.expires_in, - "expires_at": token_set.expires_at.isoformat() if token_set.expires_at else None, - "workspace_id": workspace_info.get("workspace_id"), - "workspace_name": workspace_info.get("workspace_name", "Notion Workspace"), - "workspace_icon": workspace_info.get("workspace_icon", "📄"), - "bot_id": workspace_info.get("bot_id"), - "mcp_mode": True, - "mcp_client_id": mcp_client_id, - "mcp_client_secret": token_encryption.encrypt_token(mcp_client_secret) - if mcp_client_secret - else None, - "_token_encrypted": True, - } - - reauth_connector_id = data.get("connector_id") - reauth_return_url = data.get("return_url") - - # --- Re-auth path --- - if reauth_connector_id: - result = await session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == reauth_connector_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, - ) - ) - db_connector = result.scalars().first() - if not db_connector: - raise HTTPException(status_code=404, detail="Connector not found during re-auth") - - db_connector.config = connector_config - flag_modified(db_connector, "config") - await session.commit() - await session.refresh(db_connector) - - logger.info("Re-authenticated Notion MCP connector %s for user %s", db_connector.id, user_id) - if reauth_return_url and reauth_return_url.startswith("/"): - return RedirectResponse(url=f"{config.NEXT_FRONTEND_URL}{reauth_return_url}") - return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?success=true&connector=notion-connector&connectorId={db_connector.id}" - ) - - # --- New connector path --- - connector_identifier = extract_identifier_from_credentials( - SearchSourceConnectorType.NOTION_CONNECTOR, connector_config - ) - - is_duplicate = await check_duplicate_connector( - session, - SearchSourceConnectorType.NOTION_CONNECTOR, - space_id, - user_id, - connector_identifier, - ) - if is_duplicate: - logger.warning("Duplicate Notion MCP connector for user %s", user_id) - return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?error=duplicate_account&connector=notion-connector" - ) - - connector_name = await generate_unique_connector_name( - session, - SearchSourceConnectorType.NOTION_CONNECTOR, - space_id, - user_id, - connector_identifier, - ) - - new_connector = SearchSourceConnector( - name=connector_name, - connector_type=SearchSourceConnectorType.NOTION_CONNECTOR, - is_indexable=True, - config=connector_config, - search_space_id=space_id, - user_id=user_id, - ) - session.add(new_connector) - - try: - await session.commit() - logger.info("Created Notion MCP connector for user %s in space %s", user_id, space_id) - return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?success=true&connector=notion-connector&connectorId={new_connector.id}" - ) - except IntegrityError as e: - await session.rollback() - raise HTTPException(status_code=409, detail=f"Database integrity error: {e!s}") from e - except Exception as e: - await session.rollback() - raise HTTPException( - status_code=500, detail=f"Failed to create connector: {e!s}" - ) from e - - -# --------------------------------------------------------------------------- -# Token refresh helper (used by the adapter) -# --------------------------------------------------------------------------- - - -async def refresh_notion_mcp_token( - session: AsyncSession, - connector: SearchSourceConnector, -) -> SearchSourceConnector: - """Refresh the MCP access token for a connector. - - Handles refresh-token rotation: persists both new access_token - and new refresh_token atomically. - """ - token_encryption = _get_token_encryption() - - cfg = connector.config or {} - encrypted_refresh = cfg.get("refresh_token") - if not encrypted_refresh: - raise HTTPException(status_code=400, detail="No refresh token available. Please re-authenticate.") - - try: - refresh_token = token_encryption.decrypt_token(encrypted_refresh) - except Exception as e: - raise HTTPException(status_code=500, detail=f"Failed to decrypt refresh token: {e!s}") from e - - mcp_client_id = cfg.get("mcp_client_id") - mcp_client_secret_encrypted = cfg.get("mcp_client_secret") - mcp_client_secret = ( - token_encryption.decrypt_token(mcp_client_secret_encrypted) - if mcp_client_secret_encrypted - else None - ) - - if not mcp_client_id: - raise HTTPException(status_code=400, detail="Missing MCP client_id. Please re-authenticate.") - - metadata = await _get_oauth_metadata() - - try: - token_set = await refresh_access_token( - refresh_token=refresh_token, - metadata=metadata, - client_id=mcp_client_id, - client_secret=mcp_client_secret, - ) - except ValueError as e: - if "REAUTH_REQUIRED" in str(e): - connector.config = {**connector.config, "auth_expired": True} - flag_modified(connector, "config") - await session.commit() - await session.refresh(connector) - raise HTTPException( - status_code=401, detail="Notion MCP authentication expired. Please re-authenticate." - ) from e - raise HTTPException(status_code=400, detail=f"Token refresh failed: {e!s}") from e - - updated_config = { - **connector.config, - "access_token": token_encryption.encrypt_token(token_set.access_token), - "refresh_token": token_encryption.encrypt_token(token_set.refresh_token) - if token_set.refresh_token - else connector.config.get("refresh_token"), - "expires_in": token_set.expires_in, - "expires_at": token_set.expires_at.isoformat() if token_set.expires_at else None, - "_token_encrypted": True, - } - updated_config.pop("auth_expired", None) - - connector.config = updated_config - flag_modified(connector, "config") - await session.commit() - await session.refresh(connector) - - logger.info("Refreshed Notion MCP token for connector %s", connector.id) - return connector diff --git a/surfsense_backend/app/services/notion_mcp/__init__.py b/surfsense_backend/app/services/notion_mcp/__init__.py deleted file mode 100644 index 6a57500b6..000000000 --- a/surfsense_backend/app/services/notion_mcp/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Notion MCP integration. - -Routes Notion operations through Notion's hosted MCP server -at https://mcp.notion.com/mcp instead of direct API calls. -""" - -from sqlalchemy import select -from sqlalchemy.ext.asyncio import AsyncSession - -from app.db import SearchSourceConnector, SearchSourceConnectorType - - -async def has_mcp_notion_connector( - session: AsyncSession, - search_space_id: int, -) -> bool: - """Check whether the search space has at least one MCP-mode Notion connector.""" - result = await session.execute( - select(SearchSourceConnector.id, SearchSourceConnector.config).filter( - SearchSourceConnector.search_space_id == search_space_id, - SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, - ) - ) - for _, config in result.all(): - if isinstance(config, dict) and config.get("mcp_mode"): - return True - return False diff --git a/surfsense_backend/app/services/notion_mcp/adapter.py b/surfsense_backend/app/services/notion_mcp/adapter.py deleted file mode 100644 index 76eac6305..000000000 --- a/surfsense_backend/app/services/notion_mcp/adapter.py +++ /dev/null @@ -1,253 +0,0 @@ -"""Notion MCP Adapter. - -Connects to Notion's hosted MCP server at ``https://mcp.notion.com/mcp`` -and exposes the same method signatures as ``NotionHistoryConnector``'s -write operations so that tool factories can swap with a one-line change. - -Includes an optional fallback to ``NotionHistoryConnector`` when the MCP -server returns known serialization errors (GitHub issues #215, #216). -""" - -import logging -from datetime import UTC, datetime -from typing import Any - -from mcp import ClientSession -from mcp.client.streamable_http import streamablehttp_client -from sqlalchemy import select -from sqlalchemy.ext.asyncio import AsyncSession - -from app.config import config -from app.db import SearchSourceConnector -from app.schemas.notion_auth_credentials import NotionAuthCredentialsBase -from app.utils.oauth_security import TokenEncryption - -from .response_parser import ( - extract_text_from_mcp_response, - is_mcp_serialization_error, - parse_create_page_response, - parse_delete_page_response, - parse_fetch_page_response, - parse_health_check_response, - parse_update_page_response, -) - -logger = logging.getLogger(__name__) - -NOTION_MCP_URL = "https://mcp.notion.com/mcp" - - -class NotionMCPAdapter: - """Routes Notion operations through the hosted MCP server. - - Drop-in replacement for ``NotionHistoryConnector`` write methods. - Returns the same dict structure so KB sync works unchanged. - """ - - def __init__(self, session: AsyncSession, connector_id: int): - self._session = session - self._connector_id = connector_id - self._access_token: str | None = None - - async def _get_valid_token(self) -> str: - """Get a valid MCP access token, refreshing if expired.""" - result = await self._session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == self._connector_id - ) - ) - connector = result.scalars().first() - if not connector: - raise ValueError(f"Connector {self._connector_id} not found") - - cfg = connector.config or {} - - if not cfg.get("mcp_mode"): - raise ValueError( - f"Connector {self._connector_id} is not an MCP connector" - ) - - access_token = cfg.get("access_token") - if not access_token: - raise ValueError("No access token in MCP connector config") - - is_encrypted = cfg.get("_token_encrypted", False) - if is_encrypted and config.SECRET_KEY: - token_encryption = TokenEncryption(config.SECRET_KEY) - access_token = token_encryption.decrypt_token(access_token) - - expires_at_str = cfg.get("expires_at") - if expires_at_str: - expires_at = datetime.fromisoformat(expires_at_str) - if expires_at.tzinfo is None: - expires_at = expires_at.replace(tzinfo=UTC) - if expires_at <= datetime.now(UTC): - from app.routes.notion_mcp_connector_route import refresh_notion_mcp_token - - connector = await refresh_notion_mcp_token(self._session, connector) - cfg = connector.config or {} - access_token = cfg.get("access_token", "") - if is_encrypted and config.SECRET_KEY: - token_encryption = TokenEncryption(config.SECRET_KEY) - access_token = token_encryption.decrypt_token(access_token) - - self._access_token = access_token - return access_token - - async def _call_mcp_tool( - self, tool_name: str, arguments: dict[str, Any] - ) -> str: - """Connect to Notion MCP server and call a tool. Returns raw text.""" - token = await self._get_valid_token() - headers = {"Authorization": f"Bearer {token}"} - - async with ( - streamablehttp_client(NOTION_MCP_URL, headers=headers) as (read, write, _), - ClientSession(read, write) as session, - ): - await session.initialize() - response = await session.call_tool(tool_name, arguments=arguments) - return extract_text_from_mcp_response(response) - - async def _call_with_fallback( - self, - tool_name: str, - arguments: dict[str, Any], - parser, - fallback_method: str | None = None, - fallback_kwargs: dict[str, Any] | None = None, - ) -> dict[str, Any]: - """Call MCP tool, parse response, and fall back on serialization errors.""" - try: - raw_text = await self._call_mcp_tool(tool_name, arguments) - result = parser(raw_text) - - if result.get("mcp_serialization_error") and fallback_method: - logger.warning( - "MCP tool '%s' hit serialization bug, falling back to direct API", - tool_name, - ) - return await self._fallback(fallback_method, fallback_kwargs or {}) - - return result - - except Exception as e: - error_str = str(e) - if is_mcp_serialization_error(error_str) and fallback_method: - logger.warning( - "MCP tool '%s' raised serialization error, falling back: %s", - tool_name, - error_str, - ) - return await self._fallback(fallback_method, fallback_kwargs or {}) - - logger.error("MCP tool '%s' failed: %s", tool_name, e, exc_info=True) - return {"status": "error", "message": f"MCP call failed: {e!s}"} - - async def _fallback( - self, method_name: str, kwargs: dict[str, Any] - ) -> dict[str, Any]: - """Fall back to NotionHistoryConnector for the given method. - - Uses the already-refreshed MCP access token directly with the - Notion SDK, bypassing the connector's config-based token loading. - """ - from app.connectors.notion_history import NotionHistoryConnector - from app.schemas.notion_auth_credentials import NotionAuthCredentialsBase - - token = self._access_token - if not token: - token = await self._get_valid_token() - - connector = NotionHistoryConnector( - session=self._session, - connector_id=self._connector_id, - ) - connector._credentials = NotionAuthCredentialsBase(access_token=token) - connector._using_legacy_token = True - - method = getattr(connector, method_name) - return await method(**kwargs) - - # ------------------------------------------------------------------ - # Public API — same signatures as NotionHistoryConnector - # ------------------------------------------------------------------ - - async def create_page( - self, - title: str, - content: str, - parent_page_id: str | None = None, - ) -> dict[str, Any]: - arguments: dict[str, Any] = { - "pages": [ - { - "title": title, - "content": content, - } - ] - } - if parent_page_id: - arguments["pages"][0]["parent_page_url"] = parent_page_id - - return await self._call_with_fallback( - tool_name="notion-create-pages", - arguments=arguments, - parser=parse_create_page_response, - fallback_method="create_page", - fallback_kwargs={ - "title": title, - "content": content, - "parent_page_id": parent_page_id, - }, - ) - - async def update_page( - self, - page_id: str, - content: str | None = None, - ) -> dict[str, Any]: - arguments: dict[str, Any] = { - "page_id": page_id, - "command": "replace_content", - } - if content: - arguments["new_str"] = content - - return await self._call_with_fallback( - tool_name="notion-update-page", - arguments=arguments, - parser=parse_update_page_response, - fallback_method="update_page", - fallback_kwargs={"page_id": page_id, "content": content}, - ) - - async def delete_page(self, page_id: str) -> dict[str, Any]: - arguments: dict[str, Any] = { - "page_id": page_id, - "command": "update_properties", - "archived": True, - } - - return await self._call_with_fallback( - tool_name="notion-update-page", - arguments=arguments, - parser=parse_delete_page_response, - fallback_method="delete_page", - fallback_kwargs={"page_id": page_id}, - ) - - async def fetch_page(self, page_url_or_id: str) -> dict[str, Any]: - """Fetch page content via ``notion-fetch``.""" - raw_text = await self._call_mcp_tool( - "notion-fetch", {"url": page_url_or_id} - ) - return parse_fetch_page_response(raw_text) - - async def health_check(self) -> dict[str, Any]: - """Check MCP connection via ``notion-get-self``.""" - try: - raw_text = await self._call_mcp_tool("notion-get-self", {}) - return parse_health_check_response(raw_text) - except Exception as e: - return {"status": "error", "message": str(e)} diff --git a/surfsense_backend/app/services/notion_mcp/oauth.py b/surfsense_backend/app/services/notion_mcp/oauth.py deleted file mode 100644 index cfa6ad3e0..000000000 --- a/surfsense_backend/app/services/notion_mcp/oauth.py +++ /dev/null @@ -1,298 +0,0 @@ -"""OAuth 2.0 + PKCE utilities for Notion's remote MCP server. - -Implements the flow described in the official guide: -https://developers.notion.com/guides/mcp/build-mcp-client - -Steps: - 1. Discover OAuth metadata (RFC 9470 → RFC 8414) - 2. Dynamic client registration (RFC 7591) - 3. Build authorization URL with PKCE code_challenge - 4. Exchange authorization code + code_verifier for tokens - 5. Refresh access tokens (with refresh-token rotation) - -All functions are stateless — callers (route handlers) manage storage. -""" - -import logging -from dataclasses import dataclass -from datetime import UTC, datetime, timedelta -from typing import Any - -import httpx - -logger = logging.getLogger(__name__) - -NOTION_MCP_SERVER_URL = "https://mcp.notion.com/mcp" -_HTTP_TIMEOUT = 30.0 - - -@dataclass(frozen=True) -class OAuthMetadata: - issuer: str - authorization_endpoint: str - token_endpoint: str - registration_endpoint: str | None - code_challenge_methods_supported: list[str] - - -@dataclass(frozen=True) -class ClientCredentials: - client_id: str - client_secret: str | None = None - client_id_issued_at: int | None = None - client_secret_expires_at: int | None = None - - -@dataclass(frozen=True) -class TokenSet: - access_token: str - refresh_token: str | None - token_type: str - expires_in: int | None - expires_at: datetime | None - scope: str | None - - -# --------------------------------------------------------------------------- -# Step 1 — OAuth discovery -# --------------------------------------------------------------------------- - - -async def discover_oauth_metadata( - mcp_server_url: str = NOTION_MCP_SERVER_URL, -) -> OAuthMetadata: - """Discover OAuth endpoints via RFC 9470 + RFC 8414. - - 1. Fetch protected-resource metadata to find the authorization server. - 2. Fetch authorization-server metadata to get OAuth endpoints. - """ - from urllib.parse import urlparse - - parsed = urlparse(mcp_server_url) - origin = f"{parsed.scheme}://{parsed.netloc}" - path = parsed.path.rstrip("/") - - async with httpx.AsyncClient(timeout=_HTTP_TIMEOUT) as client: - # RFC 9470 — Protected Resource Metadata - # URL format: {origin}/.well-known/oauth-protected-resource{path} - pr_url = f"{origin}/.well-known/oauth-protected-resource{path}" - pr_resp = await client.get(pr_url) - pr_resp.raise_for_status() - pr_data = pr_resp.json() - - auth_servers = pr_data.get("authorization_servers", []) - if not auth_servers: - raise ValueError("No authorization_servers in protected resource metadata") - auth_server_url = auth_servers[0] - - # RFC 8414 — Authorization Server Metadata - as_url = f"{auth_server_url}/.well-known/oauth-authorization-server" - as_resp = await client.get(as_url) - as_resp.raise_for_status() - as_data = as_resp.json() - - if not as_data.get("authorization_endpoint") or not as_data.get("token_endpoint"): - raise ValueError("Missing required OAuth endpoints in server metadata") - - return OAuthMetadata( - issuer=as_data.get("issuer", auth_server_url), - authorization_endpoint=as_data["authorization_endpoint"], - token_endpoint=as_data["token_endpoint"], - registration_endpoint=as_data.get("registration_endpoint"), - code_challenge_methods_supported=as_data.get( - "code_challenge_methods_supported", [] - ), - ) - - -# --------------------------------------------------------------------------- -# Step 2 — Dynamic client registration (RFC 7591) -# --------------------------------------------------------------------------- - - -async def register_client( - metadata: OAuthMetadata, - redirect_uri: str, - client_name: str = "SurfSense", -) -> ClientCredentials: - """Dynamically register an OAuth client with the Notion MCP server.""" - if not metadata.registration_endpoint: - raise ValueError("Server does not support dynamic client registration") - - payload = { - "client_name": client_name, - "redirect_uris": [redirect_uri], - "grant_types": ["authorization_code", "refresh_token"], - "response_types": ["code"], - "token_endpoint_auth_method": "none", - } - - async with httpx.AsyncClient(timeout=_HTTP_TIMEOUT) as client: - resp = await client.post( - metadata.registration_endpoint, - json=payload, - headers={"Content-Type": "application/json", "Accept": "application/json"}, - ) - if not resp.is_success: - logger.error( - "Dynamic client registration failed (%s): %s", - resp.status_code, - resp.text, - ) - resp.raise_for_status() - data = resp.json() - - return ClientCredentials( - client_id=data["client_id"], - client_secret=data.get("client_secret"), - client_id_issued_at=data.get("client_id_issued_at"), - client_secret_expires_at=data.get("client_secret_expires_at"), - ) - - -# --------------------------------------------------------------------------- -# Step 3 — Build authorization URL -# --------------------------------------------------------------------------- - - -def build_authorization_url( - metadata: OAuthMetadata, - client_id: str, - redirect_uri: str, - code_challenge: str, - state: str, -) -> str: - """Build the OAuth authorization URL with PKCE parameters.""" - from urllib.parse import urlencode - - params = { - "response_type": "code", - "client_id": client_id, - "redirect_uri": redirect_uri, - "code_challenge": code_challenge, - "code_challenge_method": "S256", - "state": state, - "prompt": "consent", - } - return f"{metadata.authorization_endpoint}?{urlencode(params)}" - - -# --------------------------------------------------------------------------- -# Step 4 — Exchange authorization code for tokens -# --------------------------------------------------------------------------- - - -async def exchange_code_for_tokens( - code: str, - code_verifier: str, - metadata: OAuthMetadata, - client_id: str, - redirect_uri: str, - client_secret: str | None = None, -) -> TokenSet: - """Exchange an authorization code + PKCE verifier for tokens.""" - form_data: dict[str, Any] = { - "grant_type": "authorization_code", - "code": code, - "client_id": client_id, - "redirect_uri": redirect_uri, - "code_verifier": code_verifier, - } - if client_secret: - form_data["client_secret"] = client_secret - - async with httpx.AsyncClient(timeout=_HTTP_TIMEOUT) as client: - resp = await client.post( - metadata.token_endpoint, - data=form_data, - headers={ - "Content-Type": "application/x-www-form-urlencoded", - "Accept": "application/json", - }, - ) - if not resp.is_success: - body = resp.text - raise ValueError(f"Token exchange failed ({resp.status_code}): {body}") - tokens = resp.json() - - if not tokens.get("access_token"): - raise ValueError("No access_token in token response") - - expires_at = None - if tokens.get("expires_in"): - expires_at = datetime.now(UTC) + timedelta(seconds=int(tokens["expires_in"])) - - return TokenSet( - access_token=tokens["access_token"], - refresh_token=tokens.get("refresh_token"), - token_type=tokens.get("token_type", "Bearer"), - expires_in=tokens.get("expires_in"), - expires_at=expires_at, - scope=tokens.get("scope"), - ) - - -# --------------------------------------------------------------------------- -# Step 5 — Refresh access token -# --------------------------------------------------------------------------- - - -async def refresh_access_token( - refresh_token: str, - metadata: OAuthMetadata, - client_id: str, - client_secret: str | None = None, -) -> TokenSet: - """Refresh an access token. - - Notion MCP uses refresh-token rotation: each refresh returns a new - refresh_token and invalidates the old one. Callers MUST persist the - new refresh_token atomically with the new access_token. - """ - form_data: dict[str, Any] = { - "grant_type": "refresh_token", - "refresh_token": refresh_token, - "client_id": client_id, - } - if client_secret: - form_data["client_secret"] = client_secret - - async with httpx.AsyncClient(timeout=_HTTP_TIMEOUT) as client: - resp = await client.post( - metadata.token_endpoint, - data=form_data, - headers={ - "Content-Type": "application/x-www-form-urlencoded", - "Accept": "application/json", - }, - ) - - if not resp.is_success: - body = resp.text - try: - error_data = resp.json() - error_code = error_data.get("error", "") - if error_code == "invalid_grant": - raise ValueError("REAUTH_REQUIRED") - except ValueError: - if "REAUTH_REQUIRED" in str(resp.text) or resp.status_code == 401: - raise - raise ValueError(f"Token refresh failed ({resp.status_code}): {body}") - - tokens = resp.json() - - if not tokens.get("access_token"): - raise ValueError("No access_token in refresh response") - - expires_at = None - if tokens.get("expires_in"): - expires_at = datetime.now(UTC) + timedelta(seconds=int(tokens["expires_in"])) - - return TokenSet( - access_token=tokens["access_token"], - refresh_token=tokens.get("refresh_token"), - token_type=tokens.get("token_type", "Bearer"), - expires_in=tokens.get("expires_in"), - expires_at=expires_at, - scope=tokens.get("scope"), - ) diff --git a/surfsense_backend/app/services/notion_mcp/response_parser.py b/surfsense_backend/app/services/notion_mcp/response_parser.py deleted file mode 100644 index 34d5ef332..000000000 --- a/surfsense_backend/app/services/notion_mcp/response_parser.py +++ /dev/null @@ -1,212 +0,0 @@ -"""Parse Notion MCP tool responses into structured dicts. - -The Notion MCP server returns responses as MCP TextContent where the -``text`` field contains JSON-stringified Notion API response data. -See: https://deepwiki.com/makenotion/notion-mcp-server/4.3-request-and-response-handling - -This module extracts that JSON and normalises it into the same dict -format that ``NotionHistoryConnector`` methods return, so downstream -code (KB sync, tool factories) works unchanged. -""" - -import json -import logging -from typing import Any - -logger = logging.getLogger(__name__) - -MCP_SERIALIZATION_ERROR_MARKERS = [ - "Expected array, received string", - "Expected object, received string", - "should be defined, instead was `undefined`", -] - - -def is_mcp_serialization_error(text: str) -> bool: - """Return True if the MCP error text matches a known serialization bug.""" - return any(marker in text for marker in MCP_SERIALIZATION_ERROR_MARKERS) - - -def extract_text_from_mcp_response(response) -> str: - """Pull the concatenated text out of an MCP ``CallToolResult``. - - Args: - response: The ``CallToolResult`` returned by ``session.call_tool()``. - - Returns: - Concatenated text content from the response. - """ - parts: list[str] = [] - for content in response.content: - if hasattr(content, "text"): - parts.append(content.text) - elif hasattr(content, "data"): - parts.append(str(content.data)) - else: - parts.append(str(content)) - return "\n".join(parts) if parts else "" - - -def _try_parse_json(text: str) -> dict[str, Any] | None: - """Attempt to parse *text* as JSON, returning None on failure.""" - try: - parsed = json.loads(text) - if isinstance(parsed, dict): - return parsed - except (json.JSONDecodeError, TypeError): - pass - return None - - -def _extract_page_title(page_data: dict[str, Any]) -> str: - """Best-effort extraction of the page title from a Notion page object.""" - props = page_data.get("properties", {}) - for prop in props.values(): - if prop.get("type") == "title": - title_parts = prop.get("title", []) - if title_parts: - return " ".join(t.get("plain_text", "") for t in title_parts) - return page_data.get("id", "Untitled") - - -def parse_create_page_response(raw_text: str) -> dict[str, Any]: - """Parse a ``notion-create-pages`` MCP response. - - Returns a dict compatible with ``NotionHistoryConnector.create_page()``: - ``{status, page_id, url, title, message}`` - """ - data = _try_parse_json(raw_text) - - if data is None: - if is_mcp_serialization_error(raw_text): - return { - "status": "mcp_error", - "message": raw_text, - "mcp_serialization_error": True, - } - return {"status": "error", "message": f"Unexpected MCP response: {raw_text[:500]}"} - - if data.get("status") == "error" or "error" in data: - return { - "status": "error", - "message": data.get("message", data.get("error", str(data))), - } - - page_id = data.get("id", "") - url = data.get("url", "") - title = _extract_page_title(data) - - return { - "status": "success", - "page_id": page_id, - "url": url, - "title": title, - "message": f"Created Notion page '{title}'", - } - - -def parse_update_page_response(raw_text: str) -> dict[str, Any]: - """Parse a ``notion-update-page`` MCP response. - - Returns a dict compatible with ``NotionHistoryConnector.update_page()``: - ``{status, page_id, url, title, message}`` - """ - data = _try_parse_json(raw_text) - - if data is None: - if is_mcp_serialization_error(raw_text): - return { - "status": "mcp_error", - "message": raw_text, - "mcp_serialization_error": True, - } - return {"status": "error", "message": f"Unexpected MCP response: {raw_text[:500]}"} - - if data.get("status") == "error" or "error" in data: - return { - "status": "error", - "message": data.get("message", data.get("error", str(data))), - } - - page_id = data.get("id", "") - url = data.get("url", "") - title = _extract_page_title(data) - - return { - "status": "success", - "page_id": page_id, - "url": url, - "title": title, - "message": f"Updated Notion page '{title}' (content appended)", - } - - -def parse_delete_page_response(raw_text: str) -> dict[str, Any]: - """Parse an archive (delete) MCP response. - - The Notion API responds to ``pages.update(archived=True)`` with - the archived page object. - - Returns a dict compatible with ``NotionHistoryConnector.delete_page()``: - ``{status, page_id, message}`` - """ - data = _try_parse_json(raw_text) - - if data is None: - if is_mcp_serialization_error(raw_text): - return { - "status": "mcp_error", - "message": raw_text, - "mcp_serialization_error": True, - } - return {"status": "error", "message": f"Unexpected MCP response: {raw_text[:500]}"} - - if data.get("status") == "error" or "error" in data: - return { - "status": "error", - "message": data.get("message", data.get("error", str(data))), - } - - page_id = data.get("id", "") - title = _extract_page_title(data) - - return { - "status": "success", - "page_id": page_id, - "message": f"Deleted Notion page '{title}'", - } - - -def parse_fetch_page_response(raw_text: str) -> dict[str, Any]: - """Parse a ``notion-fetch`` MCP response. - - Returns the raw parsed dict (Notion page/block data) or an error dict. - """ - data = _try_parse_json(raw_text) - - if data is None: - return {"status": "error", "message": f"Unexpected MCP response: {raw_text[:500]}"} - - if data.get("status") == "error" or "error" in data: - return { - "status": "error", - "message": data.get("message", data.get("error", str(data))), - } - - return {"status": "success", "data": data} - - -def parse_health_check_response(raw_text: str) -> dict[str, Any]: - """Parse a ``notion-get-self`` MCP response for health checking.""" - data = _try_parse_json(raw_text) - - if data is None: - return {"status": "error", "message": raw_text[:500]} - - if data.get("status") == "error" or "error" in data: - return { - "status": "error", - "message": data.get("message", data.get("error", str(data))), - } - - return {"status": "success", "data": data} From 291c1078c3ada5702c891d48e332c8af88e3d24c Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:33:18 +0200 Subject: [PATCH 017/113] remove Notion MCP router from routes --- surfsense_backend/app/routes/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py index faec7fe09..ad40666cd 100644 --- a/surfsense_backend/app/routes/__init__.py +++ b/surfsense_backend/app/routes/__init__.py @@ -37,7 +37,6 @@ from .new_llm_config_routes import router as new_llm_config_router from .notes_routes import router as notes_router from .notifications_routes import router as notifications_router from .notion_add_connector_route import router as notion_add_connector_router -from .notion_mcp_connector_route import router as notion_mcp_connector_router from .onedrive_add_connector_route import router as onedrive_add_connector_router from .podcasts_routes import router as podcasts_router from .prompts_routes import router as prompts_router @@ -82,7 +81,6 @@ router.include_router(airtable_add_connector_router) router.include_router(linear_add_connector_router) router.include_router(luma_add_connector_router) router.include_router(notion_add_connector_router) -router.include_router(notion_mcp_connector_router) router.include_router(slack_add_connector_router) router.include_router(teams_add_connector_router) router.include_router(onedrive_add_connector_router) From 48158740aec368179b5531a5fa014c26552b2999 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:33:22 +0200 Subject: [PATCH 018/113] remove Notion MCP tool definitions from registry --- .../app/agents/new_chat/tools/registry.py | 42 ------------------- 1 file changed, 42 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/tools/registry.py b/surfsense_backend/app/agents/new_chat/tools/registry.py index f9b9287de..6f7a5a03f 100644 --- a/surfsense_backend/app/agents/new_chat/tools/registry.py +++ b/surfsense_backend/app/agents/new_chat/tools/registry.py @@ -86,11 +86,6 @@ from .notion import ( create_delete_notion_page_tool, create_update_notion_page_tool, ) -from .notion_mcp import ( - create_page as notion_mcp_create_page_mod, - delete_page as notion_mcp_delete_page_mod, - update_page as notion_mcp_update_page_mod, -) from .onedrive import ( create_create_onedrive_file_tool, create_delete_onedrive_file_tool, @@ -330,43 +325,6 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ required_connector="NOTION_CONNECTOR", ), # ========================================================================= - # NOTION MCP TOOLS - MCP-backed variants (disabled until swap) - # These route through Notion's hosted MCP server instead of direct API. - # ========================================================================= - ToolDefinition( - name="create_notion_page_mcp", - description="Create a new page in Notion via MCP server", - factory=lambda deps: notion_mcp_create_page_mod.create_create_notion_page_mcp_tool( - db_session=deps["db_session"], - search_space_id=deps["search_space_id"], - user_id=deps["user_id"], - ), - requires=["db_session", "search_space_id", "user_id"], - required_connector="NOTION_CONNECTOR", - ), - ToolDefinition( - name="update_notion_page_mcp", - description="Append new content to an existing Notion page via MCP server", - factory=lambda deps: notion_mcp_update_page_mod.create_update_notion_page_mcp_tool( - db_session=deps["db_session"], - search_space_id=deps["search_space_id"], - user_id=deps["user_id"], - ), - requires=["db_session", "search_space_id", "user_id"], - required_connector="NOTION_CONNECTOR", - ), - ToolDefinition( - name="delete_notion_page_mcp", - description="Delete an existing Notion page via MCP server", - factory=lambda deps: notion_mcp_delete_page_mod.create_delete_notion_page_mcp_tool( - db_session=deps["db_session"], - search_space_id=deps["search_space_id"], - user_id=deps["user_id"], - ), - requires=["db_session", "search_space_id", "user_id"], - required_connector="NOTION_CONNECTOR", - ), - # ========================================================================= # GOOGLE DRIVE TOOLS - create files, delete files # Auto-disabled when no Google Drive connector is configured (see chat_deepagent.py) # ========================================================================= From 177a34667388e7fd84e60edba4130d557c5e195f Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:33:30 +0200 Subject: [PATCH 019/113] remove Notion MCP gating from agent --- .../app/agents/new_chat/chat_deepagent.py | 21 ------------------- 1 file changed, 21 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/chat_deepagent.py b/surfsense_backend/app/agents/new_chat/chat_deepagent.py index 480cae8c9..17334d66a 100644 --- a/surfsense_backend/app/agents/new_chat/chat_deepagent.py +++ b/surfsense_backend/app/agents/new_chat/chat_deepagent.py @@ -290,27 +290,6 @@ async def create_surfsense_deep_agent( get_connector_gated_tools(available_connectors) ) - # TODO(phase-1): Remove Notion MCP gating after revert. - has_notion_connector = ( - available_connectors is not None and "NOTION_CONNECTOR" in available_connectors - ) - if has_notion_connector: - from app.services.notion_mcp import has_mcp_notion_connector - - _use_mcp = await has_mcp_notion_connector(db_session, search_space_id) - if _use_mcp: - modified_disabled_tools.extend([ - "create_notion_page", - "update_notion_page", - "delete_notion_page", - ]) - else: - modified_disabled_tools.extend([ - "create_notion_page_mcp", - "update_notion_page_mcp", - "delete_notion_page_mcp", - ]) - # Remove direct KB search tool; we now pre-seed a scoped filesystem via middleware. if "search_knowledge_base" not in modified_disabled_tools: modified_disabled_tools.append("search_knowledge_base") From 978a8e2e071bedbc97557db4c5c6d4cf9e58879e Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:33:35 +0200 Subject: [PATCH 020/113] remove MCP health check branch from Notion metadata service --- .../services/notion/tool_metadata_service.py | 21 ------------------- 1 file changed, 21 deletions(-) diff --git a/surfsense_backend/app/services/notion/tool_metadata_service.py b/surfsense_backend/app/services/notion/tool_metadata_service.py index 8a58d5e62..19dc1fd89 100644 --- a/surfsense_backend/app/services/notion/tool_metadata_service.py +++ b/surfsense_backend/app/services/notion/tool_metadata_service.py @@ -227,30 +227,9 @@ class NotionToolMetadataService: async def _check_account_health(self, connector_id: int) -> bool: """Check if a Notion connector's token is still valid. - For regular connectors: uses ``users.me()`` via the Notion SDK. - For MCP-mode connectors: uses ``notion-get-self`` via the MCP adapter. - Returns True if the token is expired/invalid, False if healthy. """ try: - result = await self._db_session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == connector_id - ) - ) - db_connector = result.scalars().first() - if not db_connector: - return True - - if (db_connector.config or {}).get("mcp_mode"): - from app.services.notion_mcp.adapter import NotionMCPAdapter - - adapter = NotionMCPAdapter( - session=self._db_session, connector_id=connector_id - ) - health = await adapter.health_check() - return health.get("status") != "success" - connector = NotionHistoryConnector( session=self._db_session, connector_id=connector_id ) From e02fbbef6c5c2054e9ed0db3452e5742de68715c Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:33:57 +0200 Subject: [PATCH 021/113] remove MCP-mode skip from Notion indexer --- .../app/tasks/connector_indexers/notion_indexer.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/surfsense_backend/app/tasks/connector_indexers/notion_indexer.py b/surfsense_backend/app/tasks/connector_indexers/notion_indexer.py index 6a3a99b5c..77aac795a 100644 --- a/surfsense_backend/app/tasks/connector_indexers/notion_indexer.py +++ b/surfsense_backend/app/tasks/connector_indexers/notion_indexer.py @@ -129,18 +129,6 @@ async def index_notion_pages( f"Connector with ID {connector_id} not found or is not a Notion connector", ) - if (connector.config or {}).get("mcp_mode"): - msg = ( - f"Connector {connector_id} is an MCP-mode connector. " - "Background indexing is not supported for MCP connectors — " - "use a regular Notion connector for indexing." - ) - logger.info(msg) - await task_logger.log_task_completion( - log_entry, msg, {"skipped": True, "reason": "mcp_mode"} - ) - return 0, 0, None - if not connector.config.get("access_token") and not connector.config.get( "NOTION_INTEGRATION_TOKEN" ): From c70f0ccf49b21343ddae20153e2caa4ce45a60e4 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:34:08 +0200 Subject: [PATCH 022/113] revert Notion auth URLs to classic OAuth endpoints --- .../connector-configs/views/connector-edit-view.tsx | 2 +- .../connector-popup/constants/connector-constants.ts | 2 +- .../connector-popup/views/connector-accounts-list-view.tsx | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx index 274fc0fc7..e19600ab2 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx @@ -21,7 +21,7 @@ import { getConnectorConfigComponent } from "../index"; const REAUTH_ENDPOINTS: Partial> = { [EnumConnectorName.LINEAR_CONNECTOR]: "/api/v1/auth/linear/connector/reauth", - [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion-mcp/connector/reauth", + [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion/connector/reauth", [EnumConnectorName.GOOGLE_DRIVE_CONNECTOR]: "/api/v1/auth/google/drive/connector/reauth", [EnumConnectorName.GOOGLE_GMAIL_CONNECTOR]: "/api/v1/auth/google/gmail/connector/reauth", [EnumConnectorName.GOOGLE_CALENDAR_CONNECTOR]: "/api/v1/auth/google/calendar/connector/reauth", diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index 0e517b38e..5b61e8bdf 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -38,7 +38,7 @@ export const OAUTH_CONNECTORS = [ title: "Notion", description: "Search your Notion pages", connectorType: EnumConnectorName.NOTION_CONNECTOR, - authEndpoint: "/api/v1/auth/notion-mcp/connector/add", + authEndpoint: "/api/v1/auth/notion/connector/add", }, { id: "linear-connector", diff --git a/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx index 6cdd535db..b4c049c5c 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx @@ -18,7 +18,7 @@ import { getConnectorDisplayName } from "../tabs/all-connectors-tab"; const REAUTH_ENDPOINTS: Partial> = { [EnumConnectorName.LINEAR_CONNECTOR]: "/api/v1/auth/linear/connector/reauth", - [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion-mcp/connector/reauth", + [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion/connector/reauth", [EnumConnectorName.GOOGLE_DRIVE_CONNECTOR]: "/api/v1/auth/google/drive/connector/reauth", [EnumConnectorName.GOOGLE_GMAIL_CONNECTOR]: "/api/v1/auth/google/gmail/connector/reauth", [EnumConnectorName.GOOGLE_CALENDAR_CONNECTOR]: "/api/v1/auth/google/calendar/connector/reauth", From d2cb778c08bf6f8dbc81d06b2d422ab8f5f51b44 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:48:40 +0200 Subject: [PATCH 023/113] add Gmail search and read email tools --- .../agents/new_chat/tools/gmail/__init__.py | 8 + .../agents/new_chat/tools/gmail/read_email.py | 87 ++++++++++ .../new_chat/tools/gmail/search_emails.py | 148 ++++++++++++++++++ 3 files changed, 243 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/tools/gmail/read_email.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/gmail/search_emails.py diff --git a/surfsense_backend/app/agents/new_chat/tools/gmail/__init__.py b/surfsense_backend/app/agents/new_chat/tools/gmail/__init__.py index efb2fb0fa..294840122 100644 --- a/surfsense_backend/app/agents/new_chat/tools/gmail/__init__.py +++ b/surfsense_backend/app/agents/new_chat/tools/gmail/__init__.py @@ -1,6 +1,12 @@ from app.agents.new_chat.tools.gmail.create_draft import ( create_create_gmail_draft_tool, ) +from app.agents.new_chat.tools.gmail.read_email import ( + create_read_gmail_email_tool, +) +from app.agents.new_chat.tools.gmail.search_emails import ( + create_search_gmail_tool, +) from app.agents.new_chat.tools.gmail.send_email import ( create_send_gmail_email_tool, ) @@ -13,6 +19,8 @@ from app.agents.new_chat.tools.gmail.update_draft import ( __all__ = [ "create_create_gmail_draft_tool", + "create_read_gmail_email_tool", + "create_search_gmail_tool", "create_send_gmail_email_tool", "create_trash_gmail_email_tool", "create_update_gmail_draft_tool", diff --git a/surfsense_backend/app/agents/new_chat/tools/gmail/read_email.py b/surfsense_backend/app/agents/new_chat/tools/gmail/read_email.py new file mode 100644 index 000000000..9071f129a --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/gmail/read_email.py @@ -0,0 +1,87 @@ +import logging +from typing import Any + +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.db import SearchSourceConnector, SearchSourceConnectorType + +logger = logging.getLogger(__name__) + +_GMAIL_TYPES = [ + SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR, + SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR, +] + + +def create_read_gmail_email_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def read_gmail_email(message_id: str) -> dict[str, Any]: + """Read the full content of a specific Gmail email by its message ID. + + Use after search_gmail to get the complete body of an email. + + Args: + message_id: The Gmail message ID (from search_gmail results). + + Returns: + Dictionary with status and the full email content formatted as markdown. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Gmail tool not properly configured."} + + try: + result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type.in_(_GMAIL_TYPES), + ) + ) + connector = result.scalars().first() + if not connector: + return { + "status": "error", + "message": "No Gmail connector found. Please connect Gmail in your workspace settings.", + } + + from app.agents.new_chat.tools.gmail.search_emails import _build_credentials + + creds = _build_credentials(connector) + + from app.connectors.google_gmail_connector import GoogleGmailConnector + + gmail = GoogleGmailConnector( + credentials=creds, + session=db_session, + user_id=user_id, + connector_id=connector.id, + ) + + detail, error = await gmail.get_message_details(message_id) + if error: + if "re-authenticate" in error.lower() or "authentication failed" in error.lower(): + return {"status": "auth_error", "message": error, "connector_type": "gmail"} + return {"status": "error", "message": error} + + if not detail: + return {"status": "not_found", "message": f"Email with ID '{message_id}' not found."} + + content = gmail.format_message_to_markdown(detail) + + return {"status": "success", "message_id": message_id, "content": content} + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error reading Gmail email: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to read email. Please try again."} + + return read_gmail_email diff --git a/surfsense_backend/app/agents/new_chat/tools/gmail/search_emails.py b/surfsense_backend/app/agents/new_chat/tools/gmail/search_emails.py new file mode 100644 index 000000000..bfc328389 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/gmail/search_emails.py @@ -0,0 +1,148 @@ +import logging +from datetime import datetime +from typing import Any + +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.db import SearchSourceConnector, SearchSourceConnectorType + +logger = logging.getLogger(__name__) + +_GMAIL_TYPES = [ + SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR, + SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR, +] + + +def _build_credentials(connector: SearchSourceConnector): + """Build Google OAuth Credentials from a Gmail connector's config.""" + if connector.connector_type == SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR: + from app.utils.google_credentials import build_composio_credentials + + cca_id = connector.config.get("composio_connected_account_id") + if not cca_id: + raise ValueError("Composio connected account ID not found.") + return build_composio_credentials(cca_id) + + from google.oauth2.credentials import Credentials + + from app.config import config + from app.utils.oauth_security import TokenEncryption + + cfg = dict(connector.config) + if cfg.get("_token_encrypted") and config.SECRET_KEY: + enc = TokenEncryption(config.SECRET_KEY) + for key in ("token", "refresh_token", "client_secret"): + if cfg.get(key): + cfg[key] = enc.decrypt_token(cfg[key]) + + exp = (cfg.get("expiry") or "").replace("Z", "") + return Credentials( + token=cfg.get("token"), + refresh_token=cfg.get("refresh_token"), + token_uri=cfg.get("token_uri"), + client_id=cfg.get("client_id"), + client_secret=cfg.get("client_secret"), + scopes=cfg.get("scopes", []), + expiry=datetime.fromisoformat(exp) if exp else None, + ) + + +def create_search_gmail_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def search_gmail( + query: str, + max_results: int = 10, + ) -> dict[str, Any]: + """Search emails in the user's Gmail inbox using Gmail search syntax. + + Args: + query: Gmail search query, same syntax as the Gmail search bar. + Examples: "from:alice@example.com", "subject:meeting", + "is:unread", "after:2024/01/01 before:2024/02/01", + "has:attachment", "in:sent". + max_results: Number of emails to return (default 10, max 20). + + Returns: + Dictionary with status and a list of email summaries including + message_id, subject, from, date, snippet. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Gmail tool not properly configured."} + + max_results = min(max_results, 20) + + try: + result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type.in_(_GMAIL_TYPES), + ) + ) + connector = result.scalars().first() + if not connector: + return { + "status": "error", + "message": "No Gmail connector found. Please connect Gmail in your workspace settings.", + } + + creds = _build_credentials(connector) + + from app.connectors.google_gmail_connector import GoogleGmailConnector + + gmail = GoogleGmailConnector( + credentials=creds, + session=db_session, + user_id=user_id, + connector_id=connector.id, + ) + + messages_list, error = await gmail.get_messages_list( + max_results=max_results, query=query + ) + if error: + if "re-authenticate" in error.lower() or "authentication failed" in error.lower(): + return {"status": "auth_error", "message": error, "connector_type": "gmail"} + return {"status": "error", "message": error} + + if not messages_list: + return {"status": "success", "emails": [], "total": 0, "message": "No emails found."} + + emails = [] + for msg in messages_list: + detail, err = await gmail.get_message_details(msg["id"]) + if err: + continue + headers = { + h["name"].lower(): h["value"] + for h in detail.get("payload", {}).get("headers", []) + } + emails.append({ + "message_id": detail.get("id"), + "thread_id": detail.get("threadId"), + "subject": headers.get("subject", "No Subject"), + "from": headers.get("from", "Unknown"), + "to": headers.get("to", ""), + "date": headers.get("date", ""), + "snippet": detail.get("snippet", ""), + "labels": detail.get("labelIds", []), + }) + + return {"status": "success", "emails": emails, "total": len(emails)} + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error searching Gmail: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to search Gmail. Please try again."} + + return search_gmail From 07a5fac15d5f5a10722c9febed527bd2632e3023 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:48:47 +0200 Subject: [PATCH 024/113] add Calendar search events tool --- .../tools/google_calendar/__init__.py | 4 + .../tools/google_calendar/search_events.py | 148 ++++++++++++++++++ 2 files changed, 152 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/tools/google_calendar/search_events.py diff --git a/surfsense_backend/app/agents/new_chat/tools/google_calendar/__init__.py b/surfsense_backend/app/agents/new_chat/tools/google_calendar/__init__.py index d1ce4e795..13d4c06cb 100644 --- a/surfsense_backend/app/agents/new_chat/tools/google_calendar/__init__.py +++ b/surfsense_backend/app/agents/new_chat/tools/google_calendar/__init__.py @@ -4,6 +4,9 @@ from app.agents.new_chat.tools.google_calendar.create_event import ( from app.agents.new_chat.tools.google_calendar.delete_event import ( create_delete_calendar_event_tool, ) +from app.agents.new_chat.tools.google_calendar.search_events import ( + create_search_calendar_events_tool, +) from app.agents.new_chat.tools.google_calendar.update_event import ( create_update_calendar_event_tool, ) @@ -11,5 +14,6 @@ from app.agents.new_chat.tools.google_calendar.update_event import ( __all__ = [ "create_create_calendar_event_tool", "create_delete_calendar_event_tool", + "create_search_calendar_events_tool", "create_update_calendar_event_tool", ] diff --git a/surfsense_backend/app/agents/new_chat/tools/google_calendar/search_events.py b/surfsense_backend/app/agents/new_chat/tools/google_calendar/search_events.py new file mode 100644 index 000000000..ad66775ef --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/google_calendar/search_events.py @@ -0,0 +1,148 @@ +import logging +from datetime import datetime +from typing import Any + +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.db import SearchSourceConnector, SearchSourceConnectorType + +logger = logging.getLogger(__name__) + +_CALENDAR_TYPES = [ + SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR, + SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR, +] + + +def _build_credentials(connector: SearchSourceConnector): + """Build Google OAuth Credentials from a Calendar connector's config.""" + if connector.connector_type == SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR: + from app.utils.google_credentials import build_composio_credentials + + cca_id = connector.config.get("composio_connected_account_id") + if not cca_id: + raise ValueError("Composio connected account ID not found.") + return build_composio_credentials(cca_id) + + from google.oauth2.credentials import Credentials + + from app.config import config + from app.utils.oauth_security import TokenEncryption + + cfg = dict(connector.config) + if cfg.get("_token_encrypted") and config.SECRET_KEY: + enc = TokenEncryption(config.SECRET_KEY) + for key in ("token", "refresh_token", "client_secret"): + if cfg.get(key): + cfg[key] = enc.decrypt_token(cfg[key]) + + exp = (cfg.get("expiry") or "").replace("Z", "") + return Credentials( + token=cfg.get("token"), + refresh_token=cfg.get("refresh_token"), + token_uri=cfg.get("token_uri"), + client_id=cfg.get("client_id"), + client_secret=cfg.get("client_secret"), + scopes=cfg.get("scopes", []), + expiry=datetime.fromisoformat(exp) if exp else None, + ) + + +def create_search_calendar_events_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def search_calendar_events( + start_date: str, + end_date: str, + max_results: int = 25, + ) -> dict[str, Any]: + """Search Google Calendar events within a date range. + + Args: + start_date: Start date in YYYY-MM-DD format (e.g. "2026-04-01"). + end_date: End date in YYYY-MM-DD format (e.g. "2026-04-30"). + max_results: Maximum number of events to return (default 25, max 50). + + Returns: + Dictionary with status and a list of events including + event_id, summary, start, end, location, attendees. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Calendar tool not properly configured."} + + max_results = min(max_results, 50) + + try: + result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type.in_(_CALENDAR_TYPES), + ) + ) + connector = result.scalars().first() + if not connector: + return { + "status": "error", + "message": "No Google Calendar connector found. Please connect Google Calendar in your workspace settings.", + } + + creds = _build_credentials(connector) + + from app.connectors.google_calendar_connector import GoogleCalendarConnector + + cal = GoogleCalendarConnector( + credentials=creds, + session=db_session, + user_id=user_id, + connector_id=connector.id, + ) + + events_raw, error = await cal.get_all_primary_calendar_events( + start_date=start_date, + end_date=end_date, + max_results=max_results, + ) + + if error: + if "re-authenticate" in error.lower() or "authentication failed" in error.lower(): + return {"status": "auth_error", "message": error, "connector_type": "google_calendar"} + if "no events found" in error.lower(): + return {"status": "success", "events": [], "total": 0, "message": error} + return {"status": "error", "message": error} + + events = [] + for ev in events_raw: + start = ev.get("start", {}) + end = ev.get("end", {}) + attendees_raw = ev.get("attendees", []) + events.append({ + "event_id": ev.get("id"), + "summary": ev.get("summary", "No Title"), + "start": start.get("dateTime") or start.get("date", ""), + "end": end.get("dateTime") or end.get("date", ""), + "location": ev.get("location", ""), + "description": ev.get("description", ""), + "html_link": ev.get("htmlLink", ""), + "attendees": [ + a.get("email", "") for a in attendees_raw[:10] + ], + "status": ev.get("status", ""), + }) + + return {"status": "success", "events": events, "total": len(events)} + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error searching calendar events: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to search calendar events. Please try again."} + + return search_calendar_events From 1de2517eae9b381d6fec4dd8a7ffa21f3de7ce18 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:49:02 +0200 Subject: [PATCH 025/113] add Discord list channels, read messages, send message tools --- .../agents/new_chat/tools/discord/__init__.py | 15 +++ .../agents/new_chat/tools/discord/_auth.py | 46 +++++++++ .../new_chat/tools/discord/list_channels.py | 67 +++++++++++++ .../new_chat/tools/discord/read_messages.py | 80 ++++++++++++++++ .../new_chat/tools/discord/send_message.py | 96 +++++++++++++++++++ 5 files changed, 304 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/tools/discord/__init__.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/discord/_auth.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/discord/list_channels.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/discord/read_messages.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/discord/send_message.py diff --git a/surfsense_backend/app/agents/new_chat/tools/discord/__init__.py b/surfsense_backend/app/agents/new_chat/tools/discord/__init__.py new file mode 100644 index 000000000..b4eaec1f0 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/discord/__init__.py @@ -0,0 +1,15 @@ +from app.agents.new_chat.tools.discord.list_channels import ( + create_list_discord_channels_tool, +) +from app.agents.new_chat.tools.discord.read_messages import ( + create_read_discord_messages_tool, +) +from app.agents.new_chat.tools.discord.send_message import ( + create_send_discord_message_tool, +) + +__all__ = [ + "create_list_discord_channels_tool", + "create_read_discord_messages_tool", + "create_send_discord_message_tool", +] diff --git a/surfsense_backend/app/agents/new_chat/tools/discord/_auth.py b/surfsense_backend/app/agents/new_chat/tools/discord/_auth.py new file mode 100644 index 000000000..b369c10f1 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/discord/_auth.py @@ -0,0 +1,46 @@ +"""Shared auth helper for Discord agent tools (REST API, not gateway bot).""" + +import logging + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.config import config +from app.db import SearchSourceConnector, SearchSourceConnectorType +from app.utils.oauth_security import TokenEncryption + +logger = logging.getLogger(__name__) + +DISCORD_API = "https://discord.com/api/v10" + + +async def get_discord_connector( + db_session: AsyncSession, + search_space_id: int, + user_id: str, +) -> SearchSourceConnector | None: + result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR, + ) + ) + return result.scalars().first() + + +def get_bot_token(connector: SearchSourceConnector) -> str: + """Extract and decrypt the bot token from connector config.""" + cfg = dict(connector.config) + if cfg.get("_token_encrypted") and config.SECRET_KEY: + enc = TokenEncryption(config.SECRET_KEY) + if cfg.get("bot_token"): + cfg["bot_token"] = enc.decrypt_token(cfg["bot_token"]) + token = cfg.get("bot_token") + if not token: + raise ValueError("Discord bot token not found in connector config.") + return token + + +def get_guild_id(connector: SearchSourceConnector) -> str | None: + return connector.config.get("guild_id") diff --git a/surfsense_backend/app/agents/new_chat/tools/discord/list_channels.py b/surfsense_backend/app/agents/new_chat/tools/discord/list_channels.py new file mode 100644 index 000000000..a33b88aa0 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/discord/list_channels.py @@ -0,0 +1,67 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from ._auth import DISCORD_API, get_bot_token, get_discord_connector, get_guild_id + +logger = logging.getLogger(__name__) + + +def create_list_discord_channels_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def list_discord_channels() -> dict[str, Any]: + """List text channels in the connected Discord server. + + Returns: + Dictionary with status and a list of channels (id, name). + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Discord tool not properly configured."} + + try: + connector = await get_discord_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Discord connector found."} + + guild_id = get_guild_id(connector) + if not guild_id: + return {"status": "error", "message": "No guild ID in Discord connector config."} + + token = get_bot_token(connector) + + async with httpx.AsyncClient() as client: + resp = await client.get( + f"{DISCORD_API}/guilds/{guild_id}/channels", + headers={"Authorization": f"Bot {token}"}, + timeout=15.0, + ) + + if resp.status_code == 401: + return {"status": "auth_error", "message": "Discord bot token is invalid.", "connector_type": "discord"} + if resp.status_code != 200: + return {"status": "error", "message": f"Discord API error: {resp.status_code}"} + + # Type 0 = text channel + channels = [ + {"id": ch["id"], "name": ch["name"]} + for ch in resp.json() + if ch.get("type") == 0 + ] + return {"status": "success", "guild_id": guild_id, "channels": channels, "total": len(channels)} + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error listing Discord channels: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to list Discord channels."} + + return list_discord_channels diff --git a/surfsense_backend/app/agents/new_chat/tools/discord/read_messages.py b/surfsense_backend/app/agents/new_chat/tools/discord/read_messages.py new file mode 100644 index 000000000..852a9297b --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/discord/read_messages.py @@ -0,0 +1,80 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from ._auth import DISCORD_API, get_bot_token, get_discord_connector + +logger = logging.getLogger(__name__) + + +def create_read_discord_messages_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def read_discord_messages( + channel_id: str, + limit: int = 25, + ) -> dict[str, Any]: + """Read recent messages from a Discord text channel. + + Args: + channel_id: The Discord channel ID (from list_discord_channels). + limit: Number of messages to fetch (default 25, max 50). + + Returns: + Dictionary with status and a list of messages including + id, author, content, timestamp. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Discord tool not properly configured."} + + limit = min(limit, 50) + + try: + connector = await get_discord_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Discord connector found."} + + token = get_bot_token(connector) + + async with httpx.AsyncClient() as client: + resp = await client.get( + f"{DISCORD_API}/channels/{channel_id}/messages", + headers={"Authorization": f"Bot {token}"}, + params={"limit": limit}, + timeout=15.0, + ) + + if resp.status_code == 401: + return {"status": "auth_error", "message": "Discord bot token is invalid.", "connector_type": "discord"} + if resp.status_code == 403: + return {"status": "error", "message": "Bot lacks permission to read this channel."} + if resp.status_code != 200: + return {"status": "error", "message": f"Discord API error: {resp.status_code}"} + + messages = [ + { + "id": m["id"], + "author": m.get("author", {}).get("username", "Unknown"), + "content": m.get("content", ""), + "timestamp": m.get("timestamp", ""), + } + for m in resp.json() + ] + + return {"status": "success", "channel_id": channel_id, "messages": messages, "total": len(messages)} + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error reading Discord messages: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to read Discord messages."} + + return read_discord_messages diff --git a/surfsense_backend/app/agents/new_chat/tools/discord/send_message.py b/surfsense_backend/app/agents/new_chat/tools/discord/send_message.py new file mode 100644 index 000000000..be4e6fdb2 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/discord/send_message.py @@ -0,0 +1,96 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from app.agents.new_chat.tools.hitl import request_approval + +from ._auth import DISCORD_API, get_bot_token, get_discord_connector + +logger = logging.getLogger(__name__) + + +def create_send_discord_message_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def send_discord_message( + channel_id: str, + content: str, + ) -> dict[str, Any]: + """Send a message to a Discord text channel. + + Args: + channel_id: The Discord channel ID (from list_discord_channels). + content: The message text (max 2000 characters). + + Returns: + Dictionary with status, message_id on success. + + IMPORTANT: + - If status is "rejected", the user explicitly declined. Do NOT retry. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Discord tool not properly configured."} + + if len(content) > 2000: + return {"status": "error", "message": "Message exceeds Discord's 2000-character limit."} + + try: + connector = await get_discord_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Discord connector found."} + + result = request_approval( + action_type="discord_send_message", + tool_name="send_discord_message", + params={"channel_id": channel_id, "content": content}, + context={"connector_id": connector.id}, + ) + + if result.rejected: + return {"status": "rejected", "message": "User declined. Message was not sent."} + + final_content = result.params.get("content", content) + final_channel = result.params.get("channel_id", channel_id) + + token = get_bot_token(connector) + + async with httpx.AsyncClient() as client: + resp = await client.post( + f"{DISCORD_API}/channels/{final_channel}/messages", + headers={ + "Authorization": f"Bot {token}", + "Content-Type": "application/json", + }, + json={"content": final_content}, + timeout=15.0, + ) + + if resp.status_code == 401: + return {"status": "auth_error", "message": "Discord bot token is invalid.", "connector_type": "discord"} + if resp.status_code == 403: + return {"status": "error", "message": "Bot lacks permission to send messages in this channel."} + if resp.status_code not in (200, 201): + return {"status": "error", "message": f"Discord API error: {resp.status_code}"} + + msg_data = resp.json() + return { + "status": "success", + "message_id": msg_data.get("id"), + "message": f"Message sent to channel {final_channel}.", + } + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error sending Discord message: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to send Discord message."} + + return send_discord_message From 49f8d1abd449d4eb24a5db4e9de93ec850fefa32 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:49:50 +0200 Subject: [PATCH 026/113] add Teams list channels, read messages, send message tools --- .../agents/new_chat/tools/teams/__init__.py | 15 +++ .../app/agents/new_chat/tools/teams/_auth.py | 43 ++++++++ .../new_chat/tools/teams/list_channels.py | 77 +++++++++++++ .../new_chat/tools/teams/read_messages.py | 91 ++++++++++++++++ .../new_chat/tools/teams/send_message.py | 101 ++++++++++++++++++ .../app/routes/teams_add_connector_route.py | 1 + 6 files changed, 328 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/tools/teams/__init__.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/teams/_auth.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/teams/list_channels.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/teams/read_messages.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/teams/send_message.py diff --git a/surfsense_backend/app/agents/new_chat/tools/teams/__init__.py b/surfsense_backend/app/agents/new_chat/tools/teams/__init__.py new file mode 100644 index 000000000..60e2add49 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/teams/__init__.py @@ -0,0 +1,15 @@ +from app.agents.new_chat.tools.teams.list_channels import ( + create_list_teams_channels_tool, +) +from app.agents.new_chat.tools.teams.read_messages import ( + create_read_teams_messages_tool, +) +from app.agents.new_chat.tools.teams.send_message import ( + create_send_teams_message_tool, +) + +__all__ = [ + "create_list_teams_channels_tool", + "create_read_teams_messages_tool", + "create_send_teams_message_tool", +] diff --git a/surfsense_backend/app/agents/new_chat/tools/teams/_auth.py b/surfsense_backend/app/agents/new_chat/tools/teams/_auth.py new file mode 100644 index 000000000..989fce7c6 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/teams/_auth.py @@ -0,0 +1,43 @@ +"""Shared auth helper for Teams agent tools (Microsoft Graph REST API).""" + +import logging + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.config import config +from app.db import SearchSourceConnector, SearchSourceConnectorType +from app.utils.oauth_security import TokenEncryption + +logger = logging.getLogger(__name__) + +GRAPH_API = "https://graph.microsoft.com/v1.0" + + +async def get_teams_connector( + db_session: AsyncSession, + search_space_id: int, + user_id: str, +) -> SearchSourceConnector | None: + result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.TEAMS_CONNECTOR, + ) + ) + return result.scalars().first() + + +async def get_access_token( + db_session: AsyncSession, + connector: SearchSourceConnector, +) -> str: + """Get a valid Microsoft Graph access token, refreshing if expired.""" + from app.connectors.teams_connector import TeamsConnector + + tc = TeamsConnector( + session=db_session, + connector_id=connector.id, + ) + return await tc._get_valid_token() diff --git a/surfsense_backend/app/agents/new_chat/tools/teams/list_channels.py b/surfsense_backend/app/agents/new_chat/tools/teams/list_channels.py new file mode 100644 index 000000000..a676595c1 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/teams/list_channels.py @@ -0,0 +1,77 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from ._auth import GRAPH_API, get_access_token, get_teams_connector + +logger = logging.getLogger(__name__) + + +def create_list_teams_channels_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def list_teams_channels() -> dict[str, Any]: + """List all Microsoft Teams and their channels the user has access to. + + Returns: + Dictionary with status and a list of teams, each containing + team_id, team_name, and a list of channels (id, name). + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Teams tool not properly configured."} + + try: + connector = await get_teams_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Teams connector found."} + + token = await get_access_token(db_session, connector) + headers = {"Authorization": f"Bearer {token}"} + + async with httpx.AsyncClient(timeout=20.0) as client: + teams_resp = await client.get(f"{GRAPH_API}/me/joinedTeams", headers=headers) + + if teams_resp.status_code == 401: + return {"status": "auth_error", "message": "Teams token expired. Please re-authenticate.", "connector_type": "teams"} + if teams_resp.status_code != 200: + return {"status": "error", "message": f"Graph API error: {teams_resp.status_code}"} + + teams_data = teams_resp.json().get("value", []) + result_teams = [] + + async with httpx.AsyncClient(timeout=20.0) as client: + for team in teams_data: + team_id = team["id"] + ch_resp = await client.get( + f"{GRAPH_API}/teams/{team_id}/channels", + headers=headers, + ) + channels = [] + if ch_resp.status_code == 200: + channels = [ + {"id": ch["id"], "name": ch.get("displayName", "")} + for ch in ch_resp.json().get("value", []) + ] + result_teams.append({ + "team_id": team_id, + "team_name": team.get("displayName", ""), + "channels": channels, + }) + + return {"status": "success", "teams": result_teams, "total_teams": len(result_teams)} + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error listing Teams channels: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to list Teams channels."} + + return list_teams_channels diff --git a/surfsense_backend/app/agents/new_chat/tools/teams/read_messages.py b/surfsense_backend/app/agents/new_chat/tools/teams/read_messages.py new file mode 100644 index 000000000..90896cb95 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/teams/read_messages.py @@ -0,0 +1,91 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from ._auth import GRAPH_API, get_access_token, get_teams_connector + +logger = logging.getLogger(__name__) + + +def create_read_teams_messages_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def read_teams_messages( + team_id: str, + channel_id: str, + limit: int = 25, + ) -> dict[str, Any]: + """Read recent messages from a Microsoft Teams channel. + + Args: + team_id: The team ID (from list_teams_channels). + channel_id: The channel ID (from list_teams_channels). + limit: Number of messages to fetch (default 25, max 50). + + Returns: + Dictionary with status and a list of messages including + id, sender, content, timestamp. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Teams tool not properly configured."} + + limit = min(limit, 50) + + try: + connector = await get_teams_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Teams connector found."} + + token = await get_access_token(db_session, connector) + + async with httpx.AsyncClient(timeout=20.0) as client: + resp = await client.get( + f"{GRAPH_API}/teams/{team_id}/channels/{channel_id}/messages", + headers={"Authorization": f"Bearer {token}"}, + params={"$top": limit}, + ) + + if resp.status_code == 401: + return {"status": "auth_error", "message": "Teams token expired. Please re-authenticate.", "connector_type": "teams"} + if resp.status_code == 403: + return {"status": "error", "message": "Insufficient permissions to read this channel."} + if resp.status_code != 200: + return {"status": "error", "message": f"Graph API error: {resp.status_code}"} + + raw_msgs = resp.json().get("value", []) + messages = [] + for m in raw_msgs: + sender = m.get("from", {}) + user_info = sender.get("user", {}) if sender else {} + body = m.get("body", {}) + messages.append({ + "id": m.get("id"), + "sender": user_info.get("displayName", "Unknown"), + "content": body.get("content", ""), + "content_type": body.get("contentType", "text"), + "timestamp": m.get("createdDateTime", ""), + }) + + return { + "status": "success", + "team_id": team_id, + "channel_id": channel_id, + "messages": messages, + "total": len(messages), + } + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error reading Teams messages: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to read Teams messages."} + + return read_teams_messages diff --git a/surfsense_backend/app/agents/new_chat/tools/teams/send_message.py b/surfsense_backend/app/agents/new_chat/tools/teams/send_message.py new file mode 100644 index 000000000..ba3a515d9 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/teams/send_message.py @@ -0,0 +1,101 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from app.agents.new_chat.tools.hitl import request_approval + +from ._auth import GRAPH_API, get_access_token, get_teams_connector + +logger = logging.getLogger(__name__) + + +def create_send_teams_message_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def send_teams_message( + team_id: str, + channel_id: str, + content: str, + ) -> dict[str, Any]: + """Send a message to a Microsoft Teams channel. + + Requires the ChannelMessage.Send OAuth scope. If the user gets a + permission error, they may need to re-authenticate with updated scopes. + + Args: + team_id: The team ID (from list_teams_channels). + channel_id: The channel ID (from list_teams_channels). + content: The message text (HTML supported). + + Returns: + Dictionary with status, message_id on success. + + IMPORTANT: + - If status is "rejected", the user explicitly declined. Do NOT retry. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Teams tool not properly configured."} + + try: + connector = await get_teams_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Teams connector found."} + + result = request_approval( + action_type="teams_send_message", + tool_name="send_teams_message", + params={"team_id": team_id, "channel_id": channel_id, "content": content}, + context={"connector_id": connector.id}, + ) + + if result.rejected: + return {"status": "rejected", "message": "User declined. Message was not sent."} + + final_content = result.params.get("content", content) + final_team = result.params.get("team_id", team_id) + final_channel = result.params.get("channel_id", channel_id) + + token = await get_access_token(db_session, connector) + + async with httpx.AsyncClient(timeout=20.0) as client: + resp = await client.post( + f"{GRAPH_API}/teams/{final_team}/channels/{final_channel}/messages", + headers={ + "Authorization": f"Bearer {token}", + "Content-Type": "application/json", + }, + json={"body": {"content": final_content}}, + ) + + if resp.status_code == 401: + return {"status": "auth_error", "message": "Teams token expired. Please re-authenticate.", "connector_type": "teams"} + if resp.status_code == 403: + return { + "status": "insufficient_permissions", + "message": "Missing ChannelMessage.Send permission. Please re-authenticate with updated scopes.", + } + if resp.status_code not in (200, 201): + return {"status": "error", "message": f"Graph API error: {resp.status_code} — {resp.text[:200]}"} + + msg_data = resp.json() + return { + "status": "success", + "message_id": msg_data.get("id"), + "message": f"Message sent to Teams channel.", + } + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error sending Teams message: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to send Teams message."} + + return send_teams_message diff --git a/surfsense_backend/app/routes/teams_add_connector_route.py b/surfsense_backend/app/routes/teams_add_connector_route.py index 4442307ba..bbaae3a5f 100644 --- a/surfsense_backend/app/routes/teams_add_connector_route.py +++ b/surfsense_backend/app/routes/teams_add_connector_route.py @@ -45,6 +45,7 @@ SCOPES = [ "Team.ReadBasic.All", # Read basic team information "Channel.ReadBasic.All", # Read basic channel information "ChannelMessage.Read.All", # Read messages in channels + "ChannelMessage.Send", # Send messages in channels ] # Initialize security utilities From ba8e3133b9281c07ab366039a1eb36c8e231afe8 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:50:31 +0200 Subject: [PATCH 027/113] add Luma list events, read event, create event tools --- .../agents/new_chat/tools/luma/__init__.py | 15 +++ .../app/agents/new_chat/tools/luma/_auth.py | 42 +++++++ .../new_chat/tools/luma/create_event.py | 116 ++++++++++++++++++ .../agents/new_chat/tools/luma/list_events.py | 100 +++++++++++++++ .../agents/new_chat/tools/luma/read_event.py | 82 +++++++++++++ 5 files changed, 355 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/tools/luma/__init__.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/luma/_auth.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/luma/create_event.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/luma/list_events.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/luma/read_event.py diff --git a/surfsense_backend/app/agents/new_chat/tools/luma/__init__.py b/surfsense_backend/app/agents/new_chat/tools/luma/__init__.py new file mode 100644 index 000000000..255119bee --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/luma/__init__.py @@ -0,0 +1,15 @@ +from app.agents.new_chat.tools.luma.create_event import ( + create_create_luma_event_tool, +) +from app.agents.new_chat.tools.luma.list_events import ( + create_list_luma_events_tool, +) +from app.agents.new_chat.tools.luma.read_event import ( + create_read_luma_event_tool, +) + +__all__ = [ + "create_create_luma_event_tool", + "create_list_luma_events_tool", + "create_read_luma_event_tool", +] diff --git a/surfsense_backend/app/agents/new_chat/tools/luma/_auth.py b/surfsense_backend/app/agents/new_chat/tools/luma/_auth.py new file mode 100644 index 000000000..ef2fa8540 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/luma/_auth.py @@ -0,0 +1,42 @@ +"""Shared auth helper for Luma agent tools.""" + +import logging + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.db import SearchSourceConnector, SearchSourceConnectorType + +logger = logging.getLogger(__name__) + +LUMA_API = "https://public-api.luma.com/v1" + + +async def get_luma_connector( + db_session: AsyncSession, + search_space_id: int, + user_id: str, +) -> SearchSourceConnector | None: + result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.LUMA_CONNECTOR, + ) + ) + return result.scalars().first() + + +def get_api_key(connector: SearchSourceConnector) -> str: + """Extract the API key from connector config (handles both key names).""" + key = connector.config.get("api_key") or connector.config.get("LUMA_API_KEY") + if not key: + raise ValueError("Luma API key not found in connector config.") + return key + + +def luma_headers(api_key: str) -> dict[str, str]: + return { + "Content-Type": "application/json", + "x-luma-api-key": api_key, + } diff --git a/surfsense_backend/app/agents/new_chat/tools/luma/create_event.py b/surfsense_backend/app/agents/new_chat/tools/luma/create_event.py new file mode 100644 index 000000000..2217d29e6 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/luma/create_event.py @@ -0,0 +1,116 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from app.agents.new_chat.tools.hitl import request_approval + +from ._auth import LUMA_API, get_api_key, get_luma_connector, luma_headers + +logger = logging.getLogger(__name__) + + +def create_create_luma_event_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def create_luma_event( + name: str, + start_at: str, + end_at: str, + description: str | None = None, + timezone: str = "UTC", + ) -> dict[str, Any]: + """Create a new event on Luma. + + Args: + name: The event title. + start_at: Start time in ISO 8601 format (e.g. "2026-05-01T18:00:00"). + end_at: End time in ISO 8601 format (e.g. "2026-05-01T20:00:00"). + description: Optional event description (markdown supported). + timezone: Timezone string (default "UTC", e.g. "America/New_York"). + + Returns: + Dictionary with status, event_id on success. + + IMPORTANT: + - If status is "rejected", the user explicitly declined. Do NOT retry. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Luma tool not properly configured."} + + try: + connector = await get_luma_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Luma connector found."} + + result = request_approval( + action_type="luma_create_event", + tool_name="create_luma_event", + params={ + "name": name, + "start_at": start_at, + "end_at": end_at, + "description": description, + "timezone": timezone, + }, + context={"connector_id": connector.id}, + ) + + if result.rejected: + return {"status": "rejected", "message": "User declined. Event was not created."} + + final_name = result.params.get("name", name) + final_start = result.params.get("start_at", start_at) + final_end = result.params.get("end_at", end_at) + final_desc = result.params.get("description", description) + final_tz = result.params.get("timezone", timezone) + + api_key = get_api_key(connector) + headers = luma_headers(api_key) + + body: dict[str, Any] = { + "name": final_name, + "start_at": final_start, + "end_at": final_end, + "timezone": final_tz, + } + if final_desc: + body["description_md"] = final_desc + + async with httpx.AsyncClient(timeout=20.0) as client: + resp = await client.post( + f"{LUMA_API}/event/create", + headers=headers, + json=body, + ) + + if resp.status_code == 401: + return {"status": "auth_error", "message": "Luma API key is invalid.", "connector_type": "luma"} + if resp.status_code == 403: + return {"status": "error", "message": "Luma Plus subscription required to create events via API."} + if resp.status_code not in (200, 201): + return {"status": "error", "message": f"Luma API error: {resp.status_code} — {resp.text[:200]}"} + + data = resp.json() + event_id = data.get("api_id") or data.get("event", {}).get("api_id") + + return { + "status": "success", + "event_id": event_id, + "message": f"Event '{final_name}' created on Luma.", + } + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error creating Luma event: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to create Luma event."} + + return create_luma_event diff --git a/surfsense_backend/app/agents/new_chat/tools/luma/list_events.py b/surfsense_backend/app/agents/new_chat/tools/luma/list_events.py new file mode 100644 index 000000000..cd4721758 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/luma/list_events.py @@ -0,0 +1,100 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from ._auth import LUMA_API, get_api_key, get_luma_connector, luma_headers + +logger = logging.getLogger(__name__) + + +def create_list_luma_events_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def list_luma_events( + max_results: int = 25, + ) -> dict[str, Any]: + """List upcoming and recent Luma events. + + Args: + max_results: Maximum events to return (default 25, max 50). + + Returns: + Dictionary with status and a list of events including + event_id, name, start_at, end_at, location, url. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Luma tool not properly configured."} + + max_results = min(max_results, 50) + + try: + connector = await get_luma_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Luma connector found."} + + api_key = get_api_key(connector) + headers = luma_headers(api_key) + + all_entries: list[dict] = [] + cursor = None + + async with httpx.AsyncClient(timeout=20.0) as client: + while len(all_entries) < max_results: + params: dict[str, Any] = {"limit": min(100, max_results - len(all_entries))} + if cursor: + params["cursor"] = cursor + + resp = await client.get( + f"{LUMA_API}/calendar/list-events", + headers=headers, + params=params, + ) + + if resp.status_code == 401: + return {"status": "auth_error", "message": "Luma API key is invalid.", "connector_type": "luma"} + if resp.status_code != 200: + return {"status": "error", "message": f"Luma API error: {resp.status_code}"} + + data = resp.json() + entries = data.get("entries", []) + if not entries: + break + all_entries.extend(entries) + + next_cursor = data.get("next_cursor") + if not next_cursor: + break + cursor = next_cursor + + events = [] + for entry in all_entries[:max_results]: + ev = entry.get("event", {}) + geo = ev.get("geo_info", {}) + events.append({ + "event_id": entry.get("api_id"), + "name": ev.get("name", "Untitled"), + "start_at": ev.get("start_at", ""), + "end_at": ev.get("end_at", ""), + "timezone": ev.get("timezone", ""), + "location": geo.get("name", ""), + "url": ev.get("url", ""), + "visibility": ev.get("visibility", ""), + }) + + return {"status": "success", "events": events, "total": len(events)} + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error listing Luma events: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to list Luma events."} + + return list_luma_events diff --git a/surfsense_backend/app/agents/new_chat/tools/luma/read_event.py b/surfsense_backend/app/agents/new_chat/tools/luma/read_event.py new file mode 100644 index 000000000..eb3ac55c6 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/luma/read_event.py @@ -0,0 +1,82 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from ._auth import LUMA_API, get_api_key, get_luma_connector, luma_headers + +logger = logging.getLogger(__name__) + + +def create_read_luma_event_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def read_luma_event(event_id: str) -> dict[str, Any]: + """Read detailed information about a specific Luma event. + + Args: + event_id: The Luma event API ID (from list_luma_events). + + Returns: + Dictionary with status and full event details including + description, attendees count, meeting URL. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Luma tool not properly configured."} + + try: + connector = await get_luma_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Luma connector found."} + + api_key = get_api_key(connector) + headers = luma_headers(api_key) + + async with httpx.AsyncClient(timeout=15.0) as client: + resp = await client.get( + f"{LUMA_API}/events/{event_id}", + headers=headers, + ) + + if resp.status_code == 401: + return {"status": "auth_error", "message": "Luma API key is invalid.", "connector_type": "luma"} + if resp.status_code == 404: + return {"status": "not_found", "message": f"Event '{event_id}' not found."} + if resp.status_code != 200: + return {"status": "error", "message": f"Luma API error: {resp.status_code}"} + + data = resp.json() + ev = data.get("event", data) + geo = ev.get("geo_info", {}) + + event_detail = { + "event_id": event_id, + "name": ev.get("name", ""), + "description": ev.get("description", ""), + "start_at": ev.get("start_at", ""), + "end_at": ev.get("end_at", ""), + "timezone": ev.get("timezone", ""), + "location_name": geo.get("name", ""), + "address": geo.get("address", ""), + "url": ev.get("url", ""), + "meeting_url": ev.get("meeting_url", ""), + "visibility": ev.get("visibility", ""), + "cover_url": ev.get("cover_url", ""), + } + + return {"status": "success", "event": event_detail} + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error reading Luma event: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to read Luma event."} + + return read_luma_event From 575b2c64d7a20f1a4673f7e2866515dca240e138 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:50:42 +0200 Subject: [PATCH 028/113] register all new live connector tools in registry --- .../app/agents/new_chat/tools/registry.py | 166 +++++++++++++++++- 1 file changed, 164 insertions(+), 2 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/tools/registry.py b/surfsense_backend/app/agents/new_chat/tools/registry.py index 6f7a5a03f..f74b4271f 100644 --- a/surfsense_backend/app/agents/new_chat/tools/registry.py +++ b/surfsense_backend/app/agents/new_chat/tools/registry.py @@ -50,6 +50,11 @@ from .confluence import ( create_delete_confluence_page_tool, create_update_confluence_page_tool, ) +from .discord import ( + create_list_discord_channels_tool, + create_read_discord_messages_tool, + create_send_discord_message_tool, +) from .dropbox import ( create_create_dropbox_file_tool, create_delete_dropbox_file_tool, @@ -57,6 +62,8 @@ from .dropbox import ( from .generate_image import create_generate_image_tool from .gmail import ( create_create_gmail_draft_tool, + create_read_gmail_email_tool, + create_search_gmail_tool, create_send_gmail_email_tool, create_trash_gmail_email_tool, create_update_gmail_draft_tool, @@ -64,6 +71,7 @@ from .gmail import ( from .google_calendar import ( create_create_calendar_event_tool, create_delete_calendar_event_tool, + create_search_calendar_events_tool, create_update_calendar_event_tool, ) from .google_drive import ( @@ -80,6 +88,11 @@ from .linear import ( create_delete_linear_issue_tool, create_update_linear_issue_tool, ) +from .luma import ( + create_create_luma_event_tool, + create_list_luma_events_tool, + create_read_luma_event_tool, +) from .mcp_tool import load_mcp_tools from .notion import ( create_create_notion_page_tool, @@ -95,6 +108,11 @@ from .report import create_generate_report_tool from .resume import create_generate_resume_tool from .scrape_webpage import create_scrape_webpage_tool from .search_surfsense_docs import create_search_surfsense_docs_tool +from .teams import ( + create_list_teams_channels_tool, + create_read_teams_messages_tool, + create_send_teams_message_tool, +) from .update_memory import create_update_memory_tool, create_update_team_memory_tool from .video_presentation import create_generate_video_presentation_tool from .web_search import create_web_search_tool @@ -403,9 +421,20 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ required_connector="ONEDRIVE_FILE", ), # ========================================================================= - # GOOGLE CALENDAR TOOLS - create, update, delete events + # GOOGLE CALENDAR TOOLS - search, create, update, delete events # Auto-disabled when no Google Calendar connector is configured # ========================================================================= + ToolDefinition( + name="search_calendar_events", + description="Search Google Calendar events within a date range", + factory=lambda deps: create_search_calendar_events_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_CALENDAR_CONNECTOR", + ), ToolDefinition( name="create_calendar_event", description="Create a new event on Google Calendar", @@ -440,9 +469,31 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ required_connector="GOOGLE_CALENDAR_CONNECTOR", ), # ========================================================================= - # GMAIL TOOLS - create drafts, update drafts, send emails, trash emails + # GMAIL TOOLS - search, read, create drafts, update drafts, send, trash # Auto-disabled when no Gmail connector is configured # ========================================================================= + ToolDefinition( + name="search_gmail", + description="Search emails in Gmail using Gmail search syntax", + factory=lambda deps: create_search_gmail_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_GMAIL_CONNECTOR", + ), + ToolDefinition( + name="read_gmail_email", + description="Read the full content of a specific Gmail email", + factory=lambda deps: create_read_gmail_email_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_GMAIL_CONNECTOR", + ), ToolDefinition( name="create_gmail_draft", description="Create a draft email in Gmail", @@ -561,6 +612,117 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ requires=["db_session", "search_space_id", "user_id"], required_connector="CONFLUENCE_CONNECTOR", ), + # ========================================================================= + # DISCORD TOOLS - list channels, read messages, send messages + # Auto-disabled when no Discord connector is configured + # ========================================================================= + ToolDefinition( + name="list_discord_channels", + description="List text channels in the connected Discord server", + factory=lambda deps: create_list_discord_channels_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="DISCORD_CONNECTOR", + ), + ToolDefinition( + name="read_discord_messages", + description="Read recent messages from a Discord text channel", + factory=lambda deps: create_read_discord_messages_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="DISCORD_CONNECTOR", + ), + ToolDefinition( + name="send_discord_message", + description="Send a message to a Discord text channel", + factory=lambda deps: create_send_discord_message_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="DISCORD_CONNECTOR", + ), + # ========================================================================= + # TEAMS TOOLS - list channels, read messages, send messages + # Auto-disabled when no Teams connector is configured + # ========================================================================= + ToolDefinition( + name="list_teams_channels", + description="List Microsoft Teams and their channels", + factory=lambda deps: create_list_teams_channels_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="TEAMS_CONNECTOR", + ), + ToolDefinition( + name="read_teams_messages", + description="Read recent messages from a Microsoft Teams channel", + factory=lambda deps: create_read_teams_messages_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="TEAMS_CONNECTOR", + ), + ToolDefinition( + name="send_teams_message", + description="Send a message to a Microsoft Teams channel", + factory=lambda deps: create_send_teams_message_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="TEAMS_CONNECTOR", + ), + # ========================================================================= + # LUMA TOOLS - list events, read event details, create events + # Auto-disabled when no Luma connector is configured + # ========================================================================= + ToolDefinition( + name="list_luma_events", + description="List upcoming and recent Luma events", + factory=lambda deps: create_list_luma_events_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="LUMA_CONNECTOR", + ), + ToolDefinition( + name="read_luma_event", + description="Read detailed information about a specific Luma event", + factory=lambda deps: create_read_luma_event_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="LUMA_CONNECTOR", + ), + ToolDefinition( + name="create_luma_event", + description="Create a new event on Luma", + factory=lambda deps: create_create_luma_event_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="LUMA_CONNECTOR", + ), ] From 7133655eebd04453366f6f416d417c075d0ca841 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:19:08 +0200 Subject: [PATCH 029/113] add MCP service registry for Linear, Jira, ClickUp --- .../app/services/mcp_oauth/__init__.py | 0 .../app/services/mcp_oauth/registry.py | 41 +++++++++++++++++++ 2 files changed, 41 insertions(+) create mode 100644 surfsense_backend/app/services/mcp_oauth/__init__.py create mode 100644 surfsense_backend/app/services/mcp_oauth/registry.py diff --git a/surfsense_backend/app/services/mcp_oauth/__init__.py b/surfsense_backend/app/services/mcp_oauth/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py new file mode 100644 index 000000000..93d5d5448 --- /dev/null +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -0,0 +1,41 @@ +"""Registry of MCP services with OAuth 2.1 support. + +Each entry maps a URL-safe service key to its MCP server endpoint and +authentication strategy. Services with ``supports_dcr=True`` will use +RFC 7591 Dynamic Client Registration; the rest require pre-configured +credentials via environment variables. +""" + +from __future__ import annotations + +from dataclasses import dataclass, field + + +@dataclass(frozen=True) +class MCPServiceConfig: + name: str + mcp_url: str + supports_dcr: bool = True + client_id_env: str | None = None + client_secret_env: str | None = None + scopes: list[str] = field(default_factory=list) + + +MCP_SERVICES: dict[str, MCPServiceConfig] = { + "linear": MCPServiceConfig( + name="Linear", + mcp_url="https://mcp.linear.app/mcp", + ), + "jira": MCPServiceConfig( + name="Jira", + mcp_url="https://mcp.atlassian.com/v1/mcp", + ), + "clickup": MCPServiceConfig( + name="ClickUp", + mcp_url="https://mcp.clickup.com/mcp", + ), +} + + +def get_service(key: str) -> MCPServiceConfig | None: + return MCP_SERVICES.get(key) From 4efdee5aed65bc41f61f24d37f1502fe4ece5bc4 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:19:15 +0200 Subject: [PATCH 030/113] add MCP OAuth discovery, DCR, and token exchange --- .../app/services/mcp_oauth/discovery.py | 111 ++++++++++++++++++ 1 file changed, 111 insertions(+) create mode 100644 surfsense_backend/app/services/mcp_oauth/discovery.py diff --git a/surfsense_backend/app/services/mcp_oauth/discovery.py b/surfsense_backend/app/services/mcp_oauth/discovery.py new file mode 100644 index 000000000..e8bcd7076 --- /dev/null +++ b/surfsense_backend/app/services/mcp_oauth/discovery.py @@ -0,0 +1,111 @@ +"""MCP OAuth 2.1 metadata discovery, Dynamic Client Registration, and token exchange.""" + +from __future__ import annotations + +import base64 +import logging +from urllib.parse import urlparse + +import httpx + +logger = logging.getLogger(__name__) + + +async def discover_oauth_metadata(mcp_url: str, *, timeout: float = 15.0) -> dict: + """Fetch OAuth 2.1 metadata from the MCP server's well-known endpoint. + + Per the MCP spec the discovery document lives at the *origin* of the + MCP server URL, not at the MCP endpoint path. + """ + parsed = urlparse(mcp_url) + origin = f"{parsed.scheme}://{parsed.netloc}" + discovery_url = f"{origin}/.well-known/oauth-authorization-server" + + async with httpx.AsyncClient(follow_redirects=True) as client: + resp = await client.get(discovery_url, timeout=timeout) + resp.raise_for_status() + return resp.json() + + +async def register_client( + registration_endpoint: str, + redirect_uri: str, + *, + client_name: str = "SurfSense", + timeout: float = 15.0, +) -> dict: + """Perform Dynamic Client Registration (RFC 7591).""" + payload = { + "client_name": client_name, + "redirect_uris": [redirect_uri], + "grant_types": ["authorization_code", "refresh_token"], + "response_types": ["code"], + "token_endpoint_auth_method": "client_secret_basic", + } + + async with httpx.AsyncClient(follow_redirects=True) as client: + resp = await client.post( + registration_endpoint, json=payload, timeout=timeout, + ) + resp.raise_for_status() + return resp.json() + + +async def exchange_code_for_tokens( + token_endpoint: str, + code: str, + redirect_uri: str, + client_id: str, + client_secret: str, + code_verifier: str, + *, + timeout: float = 30.0, +) -> dict: + """Exchange an authorization code for access + refresh tokens.""" + creds = base64.b64encode(f"{client_id}:{client_secret}".encode()).decode() + + async with httpx.AsyncClient(follow_redirects=True) as client: + resp = await client.post( + token_endpoint, + data={ + "grant_type": "authorization_code", + "code": code, + "redirect_uri": redirect_uri, + "code_verifier": code_verifier, + }, + headers={ + "Content-Type": "application/x-www-form-urlencoded", + "Authorization": f"Basic {creds}", + }, + timeout=timeout, + ) + resp.raise_for_status() + return resp.json() + + +async def refresh_access_token( + token_endpoint: str, + refresh_token: str, + client_id: str, + client_secret: str, + *, + timeout: float = 30.0, +) -> dict: + """Refresh an expired access token.""" + creds = base64.b64encode(f"{client_id}:{client_secret}".encode()).decode() + + async with httpx.AsyncClient(follow_redirects=True) as client: + resp = await client.post( + token_endpoint, + data={ + "grant_type": "refresh_token", + "refresh_token": refresh_token, + }, + headers={ + "Content-Type": "application/x-www-form-urlencoded", + "Authorization": f"Basic {creds}", + }, + timeout=timeout, + ) + resp.raise_for_status() + return resp.json() From 45867e5c56a81bf0c308eae56bc8b274d87a980a Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:19:32 +0200 Subject: [PATCH 031/113] add generic MCP OAuth route with DCR + PKCE --- .../app/routes/mcp_oauth_route.py | 508 ++++++++++++++++++ 1 file changed, 508 insertions(+) create mode 100644 surfsense_backend/app/routes/mcp_oauth_route.py diff --git a/surfsense_backend/app/routes/mcp_oauth_route.py b/surfsense_backend/app/routes/mcp_oauth_route.py new file mode 100644 index 000000000..689914ee8 --- /dev/null +++ b/surfsense_backend/app/routes/mcp_oauth_route.py @@ -0,0 +1,508 @@ +"""Generic MCP OAuth 2.1 route for services with official MCP servers. + +Handles the full flow: discovery → DCR → PKCE authorization → token exchange +→ MCP_CONNECTOR creation. Currently supports Linear, Jira, and ClickUp. +""" + +from __future__ import annotations + +import logging +from datetime import UTC, datetime, timedelta +from urllib.parse import urlencode +from uuid import UUID + +from fastapi import APIRouter, Depends, HTTPException +from fastapi.responses import RedirectResponse +from sqlalchemy import select +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm.attributes import flag_modified + +from app.config import config +from app.db import ( + SearchSourceConnector, + SearchSourceConnectorType, + User, + get_async_session, +) +from app.users import current_active_user +from app.utils.connector_naming import generate_unique_connector_name +from app.utils.oauth_security import OAuthStateManager, TokenEncryption, generate_pkce_pair + +logger = logging.getLogger(__name__) + +router = APIRouter() + +_state_manager: OAuthStateManager | None = None +_token_encryption: TokenEncryption | None = None + + +def _get_state_manager() -> OAuthStateManager: + global _state_manager + if _state_manager is None: + if not config.SECRET_KEY: + raise HTTPException(status_code=500, detail="SECRET_KEY not configured.") + _state_manager = OAuthStateManager(config.SECRET_KEY) + return _state_manager + + +def _get_token_encryption() -> TokenEncryption: + global _token_encryption + if _token_encryption is None: + if not config.SECRET_KEY: + raise HTTPException(status_code=500, detail="SECRET_KEY not configured.") + _token_encryption = TokenEncryption(config.SECRET_KEY) + return _token_encryption + + +def _build_redirect_uri(service: str) -> str: + base = config.BACKEND_URL + if not base: + raise HTTPException(status_code=500, detail="BACKEND_URL not configured.") + return f"{base.rstrip('/')}/api/v1/auth/mcp/{service}/connector/callback" + + +def _frontend_redirect( + space_id: int | None, + *, + success: bool = False, + connector_id: int | None = None, + error: str | None = None, + service: str = "mcp", +) -> RedirectResponse: + if success and space_id: + qs = f"success=true&connector={service}-mcp-connector" + if connector_id: + qs += f"&connectorId={connector_id}" + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?{qs}" + ) + if error and space_id: + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?error={error}" + ) + return RedirectResponse(url=f"{config.NEXT_FRONTEND_URL}/dashboard") + + +# --------------------------------------------------------------------------- +# /add — start MCP OAuth flow +# --------------------------------------------------------------------------- + +@router.get("/auth/mcp/{service}/connector/add") +async def connect_mcp_service( + service: str, + space_id: int, + user: User = Depends(current_active_user), +): + from app.services.mcp_oauth.registry import get_service + + svc = get_service(service) + if not svc: + raise HTTPException(status_code=404, detail=f"Unknown MCP service: {service}") + + try: + from app.services.mcp_oauth.discovery import ( + discover_oauth_metadata, + register_client, + ) + + metadata = await discover_oauth_metadata(svc.mcp_url) + auth_endpoint = metadata.get("authorization_endpoint") + token_endpoint = metadata.get("token_endpoint") + registration_endpoint = metadata.get("registration_endpoint") + + if not auth_endpoint or not token_endpoint: + raise HTTPException( + status_code=502, + detail=f"{svc.name} MCP server returned incomplete OAuth metadata.", + ) + + redirect_uri = _build_redirect_uri(service) + + if svc.supports_dcr and registration_endpoint: + dcr = await register_client(registration_endpoint, redirect_uri) + client_id = dcr.get("client_id") + client_secret = dcr.get("client_secret", "") + if not client_id: + raise HTTPException( + status_code=502, + detail=f"DCR for {svc.name} did not return a client_id.", + ) + elif not svc.supports_dcr and svc.client_id_env: + client_id = getattr(config, svc.client_id_env, None) + client_secret = getattr(config, svc.client_secret_env or "", None) or "" + if not client_id: + raise HTTPException( + status_code=500, + detail=f"{svc.name} MCP OAuth not configured ({svc.client_id_env}).", + ) + else: + raise HTTPException( + status_code=502, + detail=f"{svc.name} MCP server has no DCR and no fallback credentials.", + ) + + verifier, challenge = generate_pkce_pair() + enc = _get_token_encryption() + + state = _get_state_manager().generate_secure_state( + space_id, + user.id, + service=service, + code_verifier=verifier, + mcp_client_id=client_id, + mcp_client_secret=enc.encrypt_token(client_secret) if client_secret else "", + mcp_token_endpoint=token_endpoint, + mcp_url=svc.mcp_url, + ) + + auth_params: dict[str, str] = { + "client_id": client_id, + "response_type": "code", + "redirect_uri": redirect_uri, + "code_challenge": challenge, + "code_challenge_method": "S256", + "state": state, + } + if svc.scopes: + auth_params["scope"] = " ".join(svc.scopes) + + auth_url = f"{auth_endpoint}?{urlencode(auth_params)}" + + logger.info( + "Generated %s MCP OAuth URL for user %s, space %s", + svc.name, user.id, space_id, + ) + return {"auth_url": auth_url} + + except HTTPException: + raise + except Exception as e: + logger.error("Failed to initiate %s MCP OAuth: %s", service, e, exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to initiate {service} MCP OAuth: {e!s}", + ) from e + + +# --------------------------------------------------------------------------- +# /callback — handle OAuth redirect +# --------------------------------------------------------------------------- + +@router.get("/auth/mcp/{service}/connector/callback") +async def mcp_oauth_callback( + service: str, + code: str | None = None, + error: str | None = None, + state: str | None = None, + session: AsyncSession = Depends(get_async_session), +): + if error: + logger.warning("%s MCP OAuth error: %s", service, error) + space_id = None + if state: + try: + data = _get_state_manager().validate_state(state) + space_id = data.get("space_id") + except Exception: + pass + return _frontend_redirect( + space_id, error=f"{service}_mcp_oauth_denied", service=service, + ) + + if not code: + raise HTTPException(status_code=400, detail="Missing authorization code") + if not state: + raise HTTPException(status_code=400, detail="Missing state parameter") + + data = _get_state_manager().validate_state(state) + user_id = UUID(data["user_id"]) + space_id = data["space_id"] + svc_key = data.get("service", service) + + from app.services.mcp_oauth.registry import get_service + + svc = get_service(svc_key) + if not svc: + raise HTTPException(status_code=404, detail=f"Unknown MCP service: {svc_key}") + + try: + from app.services.mcp_oauth.discovery import exchange_code_for_tokens + + enc = _get_token_encryption() + client_id = data["mcp_client_id"] + client_secret = ( + enc.decrypt_token(data["mcp_client_secret"]) + if data.get("mcp_client_secret") + else "" + ) + token_endpoint = data["mcp_token_endpoint"] + code_verifier = data["code_verifier"] + mcp_url = data["mcp_url"] + redirect_uri = _build_redirect_uri(service) + + token_json = await exchange_code_for_tokens( + token_endpoint=token_endpoint, + code=code, + redirect_uri=redirect_uri, + client_id=client_id, + client_secret=client_secret, + code_verifier=code_verifier, + ) + + access_token = token_json.get("access_token") + if not access_token: + raise HTTPException( + status_code=400, + detail=f"No access token received from {svc.name}.", + ) + + refresh_token = token_json.get("refresh_token") + expires_at = None + if token_json.get("expires_in"): + expires_at = datetime.now(UTC) + timedelta( + seconds=int(token_json["expires_in"]) + ) + + connector_config = { + "server_config": { + "transport": "streamable-http", + "url": mcp_url, + "headers": {"Authorization": f"Bearer {access_token}"}, + }, + "mcp_service": svc_key, + "mcp_oauth": { + "client_id": client_id, + "client_secret": enc.encrypt_token(client_secret) if client_secret else "", + "token_endpoint": token_endpoint, + "access_token": enc.encrypt_token(access_token), + "refresh_token": enc.encrypt_token(refresh_token) if refresh_token else None, + "expires_at": expires_at.isoformat() if expires_at else None, + "scope": token_json.get("scope"), + }, + "_token_encrypted": True, + } + + # ---- Re-auth path ---- + reauth_connector_id = data.get("connector_id") + if reauth_connector_id: + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == reauth_connector_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.search_space_id == space_id, + SearchSourceConnector.connector_type + == SearchSourceConnectorType.MCP_CONNECTOR, + ) + ) + db_connector = result.scalars().first() + if not db_connector: + raise HTTPException( + status_code=404, + detail="Connector not found during re-auth", + ) + + db_connector.config = connector_config + flag_modified(db_connector, "config") + await session.commit() + await session.refresh(db_connector) + + _invalidate_cache(space_id) + + logger.info( + "Re-authenticated %s MCP connector %s for user %s", + svc.name, db_connector.id, user_id, + ) + reauth_return_url = data.get("return_url") + if reauth_return_url and reauth_return_url.startswith("/"): + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}{reauth_return_url}" + ) + return _frontend_redirect( + space_id, success=True, connector_id=db_connector.id, service=service, + ) + + # ---- New connector path ---- + connector_name = await generate_unique_connector_name( + session, + SearchSourceConnectorType.MCP_CONNECTOR, + space_id, + user_id, + f"{svc.name} MCP", + ) + + new_connector = SearchSourceConnector( + name=connector_name, + connector_type=SearchSourceConnectorType.MCP_CONNECTOR, + is_indexable=False, + config=connector_config, + search_space_id=space_id, + user_id=user_id, + ) + session.add(new_connector) + + try: + await session.commit() + except IntegrityError as e: + await session.rollback() + raise HTTPException( + status_code=409, detail=f"Database integrity error: {e!s}", + ) from e + + _invalidate_cache(space_id) + + logger.info( + "Created %s MCP connector %s for user %s in space %s", + svc.name, new_connector.id, user_id, space_id, + ) + return _frontend_redirect( + space_id, success=True, connector_id=new_connector.id, service=service, + ) + + except HTTPException: + raise + except Exception as e: + logger.error( + "Failed to complete %s MCP OAuth: %s", service, e, exc_info=True, + ) + raise HTTPException( + status_code=500, + detail=f"Failed to complete {service} MCP OAuth: {e!s}", + ) from e + + +# --------------------------------------------------------------------------- +# /reauth — re-authenticate an existing MCP connector +# --------------------------------------------------------------------------- + +@router.get("/auth/mcp/{service}/connector/reauth") +async def reauth_mcp_service( + service: str, + space_id: int, + connector_id: int, + return_url: str | None = None, + user: User = Depends(current_active_user), + session: AsyncSession = Depends(get_async_session), +): + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == connector_id, + SearchSourceConnector.user_id == user.id, + SearchSourceConnector.search_space_id == space_id, + SearchSourceConnector.connector_type + == SearchSourceConnectorType.MCP_CONNECTOR, + ) + ) + if not result.scalars().first(): + raise HTTPException( + status_code=404, detail="MCP connector not found or access denied", + ) + + from app.services.mcp_oauth.registry import get_service + + svc = get_service(service) + if not svc: + raise HTTPException(status_code=404, detail=f"Unknown MCP service: {service}") + + try: + from app.services.mcp_oauth.discovery import ( + discover_oauth_metadata, + register_client, + ) + + metadata = await discover_oauth_metadata(svc.mcp_url) + auth_endpoint = metadata.get("authorization_endpoint") + token_endpoint = metadata.get("token_endpoint") + registration_endpoint = metadata.get("registration_endpoint") + + if not auth_endpoint or not token_endpoint: + raise HTTPException( + status_code=502, + detail=f"{svc.name} MCP server returned incomplete OAuth metadata.", + ) + + redirect_uri = _build_redirect_uri(service) + + if svc.supports_dcr and registration_endpoint: + dcr = await register_client(registration_endpoint, redirect_uri) + client_id = dcr.get("client_id") + client_secret = dcr.get("client_secret", "") + if not client_id: + raise HTTPException( + status_code=502, + detail=f"DCR for {svc.name} did not return a client_id.", + ) + elif not svc.supports_dcr and svc.client_id_env: + client_id = getattr(config, svc.client_id_env, None) + client_secret = getattr(config, svc.client_secret_env or "", None) or "" + if not client_id: + raise HTTPException( + status_code=500, + detail=f"{svc.name} MCP OAuth not configured ({svc.client_id_env}).", + ) + else: + raise HTTPException( + status_code=502, + detail=f"{svc.name} MCP server has no DCR and no fallback credentials.", + ) + + verifier, challenge = generate_pkce_pair() + enc = _get_token_encryption() + + extra: dict = { + "service": service, + "code_verifier": verifier, + "mcp_client_id": client_id, + "mcp_client_secret": enc.encrypt_token(client_secret) if client_secret else "", + "mcp_token_endpoint": token_endpoint, + "mcp_url": svc.mcp_url, + "connector_id": connector_id, + } + if return_url and return_url.startswith("/"): + extra["return_url"] = return_url + + state = _get_state_manager().generate_secure_state( + space_id, user.id, **extra, + ) + + auth_params: dict[str, str] = { + "client_id": client_id, + "response_type": "code", + "redirect_uri": redirect_uri, + "code_challenge": challenge, + "code_challenge_method": "S256", + "state": state, + } + if svc.scopes: + auth_params["scope"] = " ".join(svc.scopes) + + auth_url = f"{auth_endpoint}?{urlencode(auth_params)}" + + logger.info( + "Initiating %s MCP re-auth for user %s, connector %s", + svc.name, user.id, connector_id, + ) + return {"auth_url": auth_url} + + except HTTPException: + raise + except Exception as e: + logger.error( + "Failed to initiate %s MCP re-auth: %s", service, e, exc_info=True, + ) + raise HTTPException( + status_code=500, + detail=f"Failed to initiate {service} MCP re-auth: {e!s}", + ) from e + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +def _invalidate_cache(space_id: int) -> None: + try: + from app.agents.new_chat.tools.mcp_tool import invalidate_mcp_tools_cache + + invalidate_mcp_tools_cache(space_id) + except Exception: + logger.debug("MCP cache invalidation skipped", exc_info=True) From 81711c9e5b168a9acc4aa5838fe77d3d8260a7ec Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:19:57 +0200 Subject: [PATCH 032/113] wire MCP OAuth route into app router --- surfsense_backend/app/routes/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py index ad40666cd..925c207a6 100644 --- a/surfsense_backend/app/routes/__init__.py +++ b/surfsense_backend/app/routes/__init__.py @@ -30,6 +30,7 @@ from .jira_add_connector_route import router as jira_add_connector_router from .linear_add_connector_route import router as linear_add_connector_router from .logs_routes import router as logs_router from .luma_add_connector_route import router as luma_add_connector_router +from .mcp_oauth_route import router as mcp_oauth_router from .memory_routes import router as memory_router from .model_list_routes import router as model_list_router from .new_chat_routes import router as new_chat_router @@ -95,6 +96,7 @@ router.include_router(logs_router) router.include_router(circleback_webhook_router) # Circleback meeting webhooks router.include_router(surfsense_docs_router) # Surfsense documentation for citations router.include_router(notifications_router) # Notifications with Zero sync +router.include_router(mcp_oauth_router) # MCP OAuth 2.1 for Linear, Jira, ClickUp router.include_router(composio_router) # Composio OAuth and toolkit management router.include_router(public_chat_router) # Public chat sharing and cloning router.include_router(incentive_tasks_router) # Incentive tasks for earning free pages From 9b78fbfe15c36c02e1ed0b958519958d4f93c555 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:20:12 +0200 Subject: [PATCH 033/113] add automatic token refresh for MCP OAuth connectors --- .../app/agents/new_chat/tools/mcp_tool.py | 124 +++++++++++++++++- 1 file changed, 121 insertions(+), 3 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py b/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py index 9743d049d..cf3e51166 100644 --- a/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py +++ b/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py @@ -377,6 +377,118 @@ async def _load_http_mcp_tools( return tools +_TOKEN_REFRESH_BUFFER_SECONDS = 300 # refresh 5 min before expiry + + +async def _maybe_refresh_mcp_oauth_token( + session: AsyncSession, + connector: "SearchSourceConnector", + cfg: dict[str, Any], + server_config: dict[str, Any], +) -> dict[str, Any]: + """Refresh the access token for an MCP OAuth connector if it is about to expire. + + Returns the (possibly updated) ``server_config``. + """ + from datetime import UTC, datetime, timedelta + + mcp_oauth = cfg.get("mcp_oauth", {}) + expires_at_str = mcp_oauth.get("expires_at") + if not expires_at_str: + return server_config + + try: + expires_at = datetime.fromisoformat(expires_at_str) + if expires_at.tzinfo is None: + from datetime import timezone + expires_at = expires_at.replace(tzinfo=timezone.utc) + + if datetime.now(UTC) < expires_at - timedelta(seconds=_TOKEN_REFRESH_BUFFER_SECONDS): + return server_config + except (ValueError, TypeError): + return server_config + + refresh_token = mcp_oauth.get("refresh_token") + if not refresh_token: + logger.warning( + "MCP connector %s token expired but no refresh_token available", + connector.id, + ) + return server_config + + try: + from app.config import config as app_config + from app.services.mcp_oauth.discovery import refresh_access_token + from app.utils.oauth_security import TokenEncryption + + enc = TokenEncryption(app_config.SECRET_KEY) + decrypted_refresh = enc.decrypt_token(refresh_token) + decrypted_secret = ( + enc.decrypt_token(mcp_oauth["client_secret"]) + if mcp_oauth.get("client_secret") + else "" + ) + + token_json = await refresh_access_token( + token_endpoint=mcp_oauth["token_endpoint"], + refresh_token=decrypted_refresh, + client_id=mcp_oauth["client_id"], + client_secret=decrypted_secret, + ) + + new_access = token_json.get("access_token") + if not new_access: + logger.warning( + "MCP connector %s token refresh returned no access_token", + connector.id, + ) + return server_config + + new_expires_at = None + if token_json.get("expires_in"): + new_expires_at = datetime.now(UTC) + timedelta( + seconds=int(token_json["expires_in"]) + ) + + updated_oauth = dict(mcp_oauth) + updated_oauth["access_token"] = enc.encrypt_token(new_access) + if token_json.get("refresh_token"): + updated_oauth["refresh_token"] = enc.encrypt_token( + token_json["refresh_token"] + ) + updated_oauth["expires_at"] = ( + new_expires_at.isoformat() if new_expires_at else None + ) + + updated_server_config = dict(server_config) + updated_server_config["headers"] = { + **server_config.get("headers", {}), + "Authorization": f"Bearer {new_access}", + } + + from sqlalchemy.orm.attributes import flag_modified + + connector.config = { + **cfg, + "server_config": updated_server_config, + "mcp_oauth": updated_oauth, + } + flag_modified(connector, "config") + await session.commit() + await session.refresh(connector) + + logger.info("Refreshed MCP OAuth token for connector %s", connector.id) + return updated_server_config + + except Exception: + logger.warning( + "Failed to refresh MCP OAuth token for connector %s", + connector.id, + exc_info=True, + ) + return server_config + + def invalidate_mcp_tools_cache(search_space_id: int | None = None) -> None: """Invalidate cached MCP tools. @@ -429,9 +541,9 @@ async def load_mcp_tools( tools: list[StructuredTool] = [] for connector in result.scalars(): try: - config = connector.config or {} - server_config = config.get("server_config", {}) - trusted_tools = config.get("trusted_tools", []) + cfg = connector.config or {} + server_config = cfg.get("server_config", {}) + trusted_tools = cfg.get("trusted_tools", []) if not server_config or not isinstance(server_config, dict): logger.warning( @@ -439,6 +551,12 @@ async def load_mcp_tools( ) continue + # Refresh OAuth token for MCP OAuth connectors before connecting + if cfg.get("mcp_oauth"): + server_config = await _maybe_refresh_mcp_oauth_token( + session, connector, cfg, server_config, + ) + transport = server_config.get("transport", "stdio") if transport in ("streamable-http", "http", "sse"): From c414cc257f392f84a82da6100e46701bf630404b Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:20:54 +0200 Subject: [PATCH 034/113] add frontend tiles for Linear, Jira, ClickUp MCP connectors --- .../constants/connector-constants.ts | 25 +++++++++++++++++++ .../tabs/all-connectors-tab.tsx | 23 ++++++++++++++++- 2 files changed, 47 insertions(+), 1 deletion(-) diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index 5b61e8bdf..5ce94809a 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -105,6 +105,31 @@ export const OAUTH_CONNECTORS = [ }, ] as const; +// MCP OAuth Connectors (one-click connect via official MCP servers) +export const MCP_OAUTH_CONNECTORS = [ + { + id: "linear-mcp-connector", + title: "Linear (MCP)", + description: "Interact with Linear issues via MCP", + connectorType: EnumConnectorName.MCP_CONNECTOR, + authEndpoint: "/api/v1/auth/mcp/linear/connector/add/", + }, + { + id: "jira-mcp-connector", + title: "Jira (MCP)", + description: "Interact with Jira issues via MCP", + connectorType: EnumConnectorName.MCP_CONNECTOR, + authEndpoint: "/api/v1/auth/mcp/jira/connector/add/", + }, + { + id: "clickup-mcp-connector", + title: "ClickUp (MCP)", + description: "Interact with ClickUp tasks via MCP", + connectorType: EnumConnectorName.MCP_CONNECTOR, + authEndpoint: "/api/v1/auth/mcp/clickup/connector/add/", + }, +] as const; + // Content Sources (tools that extract and import content from external sources) export const CRAWLERS = [ { diff --git a/surfsense_web/components/assistant-ui/connector-popup/tabs/all-connectors-tab.tsx b/surfsense_web/components/assistant-ui/connector-popup/tabs/all-connectors-tab.tsx index 814959ec4..d4f5e2fc1 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/tabs/all-connectors-tab.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/tabs/all-connectors-tab.tsx @@ -10,12 +10,14 @@ import { ConnectorCard } from "../components/connector-card"; import { COMPOSIO_CONNECTORS, CRAWLERS, + MCP_OAUTH_CONNECTORS, OAUTH_CONNECTORS, OTHER_CONNECTORS, } from "../constants/connector-constants"; import { getDocumentCountForConnector } from "../utils/connector-document-mapping"; type OAuthConnector = (typeof OAUTH_CONNECTORS)[number]; +type MCPOAuthConnector = (typeof MCP_OAUTH_CONNECTORS)[number]; type ComposioConnector = (typeof COMPOSIO_CONNECTORS)[number]; type OtherConnector = (typeof OTHER_CONNECTORS)[number]; type CrawlerConnector = (typeof CRAWLERS)[number]; @@ -128,6 +130,10 @@ export const AllConnectorsTab: FC = ({ (c) => c.connectorType === EnumConnectorName.AIRTABLE_CONNECTOR ); + const filteredMCPOAuth = MCP_OAUTH_CONNECTORS.filter( + (c) => matchesSearch(c.title, c.description), + ); + const moreIntegrationsComposio = filteredComposio.filter( (c) => !DOCUMENT_FILE_CONNECTOR_TYPES.has(c.connectorType) && @@ -279,6 +285,7 @@ export const AllConnectorsTab: FC = ({ nativeGoogleDriveConnectors.length > 0 || composioGoogleDriveConnectors.length > 0 || fileStorageConnectors.length > 0; + const hasMCPOAuth = filteredMCPOAuth.length > 0; const hasMoreIntegrations = otherDocumentYouTubeConnectors.length > 0 || otherDocumentNotionConnectors.length > 0 || @@ -288,7 +295,7 @@ export const AllConnectorsTab: FC = ({ moreIntegrationsOther.length > 0 || moreIntegrationsCrawlers.length > 0; - const hasAnyResults = hasDocumentFileConnectors || hasMoreIntegrations; + const hasAnyResults = hasDocumentFileConnectors || hasMCPOAuth || hasMoreIntegrations; if (!hasAnyResults && searchQuery) { return ( @@ -318,6 +325,20 @@ export const AllConnectorsTab: FC = ({ )} + {/* Live MCP Integrations */} + {hasMCPOAuth && ( +
+
+

+ Live MCP Integrations +

+
+
+ {filteredMCPOAuth.map((connector) => renderOAuthCard(connector as OAuthConnector | ComposioConnector))} +
+
+ )} + {/* More Integrations */} {hasMoreIntegrations && (
From 8b8c9b1f5dd8b8c88e0d351c91adbc1fda5030a0 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:38:24 +0200 Subject: [PATCH 035/113] add Slack and Airtable MCP OAuth support --- .../app/routes/mcp_oauth_route.py | 8 ++++++-- .../app/services/mcp_oauth/discovery.py | 18 ++++++++++++++---- .../app/services/mcp_oauth/registry.py | 13 +++++++++++++ .../constants/connector-constants.ts | 14 ++++++++++++++ 4 files changed, 47 insertions(+), 6 deletions(-) diff --git a/surfsense_backend/app/routes/mcp_oauth_route.py b/surfsense_backend/app/routes/mcp_oauth_route.py index 689914ee8..e47dc0a62 100644 --- a/surfsense_backend/app/routes/mcp_oauth_route.py +++ b/surfsense_backend/app/routes/mcp_oauth_route.py @@ -106,7 +106,9 @@ async def connect_mcp_service( register_client, ) - metadata = await discover_oauth_metadata(svc.mcp_url) + metadata = await discover_oauth_metadata( + svc.mcp_url, origin_override=svc.oauth_discovery_origin, + ) auth_endpoint = metadata.get("authorization_endpoint") token_endpoint = metadata.get("token_endpoint") registration_endpoint = metadata.get("registration_endpoint") @@ -409,7 +411,9 @@ async def reauth_mcp_service( register_client, ) - metadata = await discover_oauth_metadata(svc.mcp_url) + metadata = await discover_oauth_metadata( + svc.mcp_url, origin_override=svc.oauth_discovery_origin, + ) auth_endpoint = metadata.get("authorization_endpoint") token_endpoint = metadata.get("token_endpoint") registration_endpoint = metadata.get("registration_endpoint") diff --git a/surfsense_backend/app/services/mcp_oauth/discovery.py b/surfsense_backend/app/services/mcp_oauth/discovery.py index e8bcd7076..b0f3fef2a 100644 --- a/surfsense_backend/app/services/mcp_oauth/discovery.py +++ b/surfsense_backend/app/services/mcp_oauth/discovery.py @@ -11,14 +11,24 @@ import httpx logger = logging.getLogger(__name__) -async def discover_oauth_metadata(mcp_url: str, *, timeout: float = 15.0) -> dict: +async def discover_oauth_metadata( + mcp_url: str, + *, + origin_override: str | None = None, + timeout: float = 15.0, +) -> dict: """Fetch OAuth 2.1 metadata from the MCP server's well-known endpoint. Per the MCP spec the discovery document lives at the *origin* of the - MCP server URL, not at the MCP endpoint path. + MCP server URL. ``origin_override`` can be used when the OAuth server + lives on a different domain (e.g. Airtable: MCP at ``mcp.airtable.com``, + OAuth at ``airtable.com``). """ - parsed = urlparse(mcp_url) - origin = f"{parsed.scheme}://{parsed.netloc}" + if origin_override: + origin = origin_override.rstrip("/") + else: + parsed = urlparse(mcp_url) + origin = f"{parsed.scheme}://{parsed.netloc}" discovery_url = f"{origin}/.well-known/oauth-authorization-server" async with httpx.AsyncClient(follow_redirects=True) as client: diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index 93d5d5448..3f9a03fbc 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -16,6 +16,7 @@ class MCPServiceConfig: name: str mcp_url: str supports_dcr: bool = True + oauth_discovery_origin: str | None = None client_id_env: str | None = None client_secret_env: str | None = None scopes: list[str] = field(default_factory=list) @@ -34,6 +35,18 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { name="ClickUp", mcp_url="https://mcp.clickup.com/mcp", ), + "slack": MCPServiceConfig( + name="Slack", + mcp_url="https://mcp.slack.com/mcp", + supports_dcr=False, + client_id_env="SLACK_CLIENT_ID", + client_secret_env="SLACK_CLIENT_SECRET", + ), + "airtable": MCPServiceConfig( + name="Airtable", + mcp_url="https://mcp.airtable.com/mcp", + oauth_discovery_origin="https://airtable.com", + ), } diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index 5ce94809a..dcd63f525 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -128,6 +128,20 @@ export const MCP_OAUTH_CONNECTORS = [ connectorType: EnumConnectorName.MCP_CONNECTOR, authEndpoint: "/api/v1/auth/mcp/clickup/connector/add/", }, + { + id: "slack-mcp-connector", + title: "Slack (MCP)", + description: "Interact with Slack channels via MCP", + connectorType: EnumConnectorName.MCP_CONNECTOR, + authEndpoint: "/api/v1/auth/mcp/slack/connector/add/", + }, + { + id: "airtable-mcp-connector", + title: "Airtable (MCP)", + description: "Interact with Airtable bases via MCP", + connectorType: EnumConnectorName.MCP_CONNECTOR, + authEndpoint: "/api/v1/auth/mcp/airtable/connector/add/", + }, ] as const; // Content Sources (tools that extract and import content from external sources) From 5ff0ec5d5de7ab9d880cb9e6911ecebdf54fed14 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:51:40 +0200 Subject: [PATCH 036/113] disable periodic indexing for live connectors --- .../celery_tasks/schedule_checker_task.py | 22 +++---------------- 1 file changed, 3 insertions(+), 19 deletions(-) diff --git a/surfsense_backend/app/tasks/celery_tasks/schedule_checker_task.py b/surfsense_backend/app/tasks/celery_tasks/schedule_checker_task.py index e6890b0a8..3aee5a4ca 100644 --- a/surfsense_backend/app/tasks/celery_tasks/schedule_checker_task.py +++ b/surfsense_backend/app/tasks/celery_tasks/schedule_checker_task.py @@ -51,43 +51,27 @@ async def _check_and_trigger_schedules(): logger.info(f"Found {len(due_connectors)} connectors due for indexing") - # Import all indexing tasks + # Import indexing tasks for KB connectors only. + # Live connectors (Linear, Slack, Jira, ClickUp, Airtable, Discord, + # Teams, Gmail, Calendar, Luma) use real-time tools instead. from app.tasks.celery_tasks.connector_tasks import ( - index_airtable_records_task, - index_clickup_tasks_task, index_confluence_pages_task, index_crawled_urls_task, - index_discord_messages_task, index_elasticsearch_documents_task, index_github_repos_task, index_google_calendar_events_task, index_google_drive_files_task, index_google_gmail_messages_task, - index_jira_issues_task, - index_linear_issues_task, - index_luma_events_task, index_notion_pages_task, - index_slack_messages_task, ) - # Map connector types to their tasks task_map = { - SearchSourceConnectorType.SLACK_CONNECTOR: index_slack_messages_task, SearchSourceConnectorType.NOTION_CONNECTOR: index_notion_pages_task, SearchSourceConnectorType.GITHUB_CONNECTOR: index_github_repos_task, - SearchSourceConnectorType.LINEAR_CONNECTOR: index_linear_issues_task, - SearchSourceConnectorType.JIRA_CONNECTOR: index_jira_issues_task, SearchSourceConnectorType.CONFLUENCE_CONNECTOR: index_confluence_pages_task, - SearchSourceConnectorType.CLICKUP_CONNECTOR: index_clickup_tasks_task, - SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR: index_google_calendar_events_task, - SearchSourceConnectorType.AIRTABLE_CONNECTOR: index_airtable_records_task, - SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR: index_google_gmail_messages_task, - SearchSourceConnectorType.DISCORD_CONNECTOR: index_discord_messages_task, - SearchSourceConnectorType.LUMA_CONNECTOR: index_luma_events_task, SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR: index_elasticsearch_documents_task, SearchSourceConnectorType.WEBCRAWLER_CONNECTOR: index_crawled_urls_task, SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR: index_google_drive_files_task, - # Composio connector types (unified with native Google tasks) SearchSourceConnectorType.COMPOSIO_GOOGLE_DRIVE_CONNECTOR: index_google_drive_files_task, SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR: index_google_gmail_messages_task, SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR: index_google_calendar_events_task, From 328219e46fdc9c0b88d194c1e47b5dbc9d4b5d91 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:52:17 +0200 Subject: [PATCH 037/113] disable first-run indexing for live connectors --- .../app/utils/periodic_scheduler.py | 30 ------------------- 1 file changed, 30 deletions(-) diff --git a/surfsense_backend/app/utils/periodic_scheduler.py b/surfsense_backend/app/utils/periodic_scheduler.py index 9ea45df63..923f969d5 100644 --- a/surfsense_backend/app/utils/periodic_scheduler.py +++ b/surfsense_backend/app/utils/periodic_scheduler.py @@ -18,19 +18,9 @@ logger = logging.getLogger(__name__) # Mapping of connector types to their corresponding Celery task names CONNECTOR_TASK_MAP = { - SearchSourceConnectorType.SLACK_CONNECTOR: "index_slack_messages", - SearchSourceConnectorType.TEAMS_CONNECTOR: "index_teams_messages", SearchSourceConnectorType.NOTION_CONNECTOR: "index_notion_pages", SearchSourceConnectorType.GITHUB_CONNECTOR: "index_github_repos", - SearchSourceConnectorType.LINEAR_CONNECTOR: "index_linear_issues", - SearchSourceConnectorType.JIRA_CONNECTOR: "index_jira_issues", SearchSourceConnectorType.CONFLUENCE_CONNECTOR: "index_confluence_pages", - SearchSourceConnectorType.CLICKUP_CONNECTOR: "index_clickup_tasks", - SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR: "index_google_calendar_events", - SearchSourceConnectorType.AIRTABLE_CONNECTOR: "index_airtable_records", - SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR: "index_google_gmail_messages", - SearchSourceConnectorType.DISCORD_CONNECTOR: "index_discord_messages", - SearchSourceConnectorType.LUMA_CONNECTOR: "index_luma_events", SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR: "index_elasticsearch_documents", SearchSourceConnectorType.WEBCRAWLER_CONNECTOR: "index_crawled_urls", SearchSourceConnectorType.BOOKSTACK_CONNECTOR: "index_bookstack_pages", @@ -84,40 +74,20 @@ def create_periodic_schedule( f"(frequency: {frequency_minutes} minutes). Triggering first run..." ) - # Import all indexing tasks from app.tasks.celery_tasks.connector_tasks import ( - index_airtable_records_task, index_bookstack_pages_task, - index_clickup_tasks_task, index_confluence_pages_task, index_crawled_urls_task, - index_discord_messages_task, index_elasticsearch_documents_task, index_github_repos_task, - index_google_calendar_events_task, - index_google_gmail_messages_task, - index_jira_issues_task, - index_linear_issues_task, - index_luma_events_task, index_notion_pages_task, index_obsidian_vault_task, - index_slack_messages_task, ) - # Map connector type to task task_map = { - SearchSourceConnectorType.SLACK_CONNECTOR: index_slack_messages_task, SearchSourceConnectorType.NOTION_CONNECTOR: index_notion_pages_task, SearchSourceConnectorType.GITHUB_CONNECTOR: index_github_repos_task, - SearchSourceConnectorType.LINEAR_CONNECTOR: index_linear_issues_task, - SearchSourceConnectorType.JIRA_CONNECTOR: index_jira_issues_task, SearchSourceConnectorType.CONFLUENCE_CONNECTOR: index_confluence_pages_task, - SearchSourceConnectorType.CLICKUP_CONNECTOR: index_clickup_tasks_task, - SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR: index_google_calendar_events_task, - SearchSourceConnectorType.AIRTABLE_CONNECTOR: index_airtable_records_task, - SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR: index_google_gmail_messages_task, - SearchSourceConnectorType.DISCORD_CONNECTOR: index_discord_messages_task, - SearchSourceConnectorType.LUMA_CONNECTOR: index_luma_events_task, SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR: index_elasticsearch_documents_task, SearchSourceConnectorType.WEBCRAWLER_CONNECTOR: index_crawled_urls_task, SearchSourceConnectorType.BOOKSTACK_CONNECTOR: index_bookstack_pages_task, From 53a173a8fdc78a35889ceb028a5e102a11a7ecb8 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:52:23 +0200 Subject: [PATCH 038/113] guard manual indexing for live connectors --- .../routes/search_source_connectors_routes.py | 175 +++--------------- 1 file changed, 28 insertions(+), 147 deletions(-) diff --git a/surfsense_backend/app/routes/search_source_connectors_routes.py b/surfsense_backend/app/routes/search_source_connectors_routes.py index b87ce28c9..7ce3ca9a3 100644 --- a/surfsense_backend/app/routes/search_source_connectors_routes.py +++ b/surfsense_backend/app/routes/search_source_connectors_routes.py @@ -693,27 +693,10 @@ async def index_connector_content( user: User = Depends(current_active_user), ): """ - Index content from a connector to a search space. - Requires CONNECTORS_UPDATE permission (to trigger indexing). + Index content from a KB connector to a search space. - Currently supports: - - SLACK_CONNECTOR: Indexes messages from all accessible Slack channels - - TEAMS_CONNECTOR: Indexes messages from all accessible Microsoft Teams channels - - NOTION_CONNECTOR: Indexes pages from all accessible Notion pages - - GITHUB_CONNECTOR: Indexes code and documentation from GitHub repositories - - LINEAR_CONNECTOR: Indexes issues and comments from Linear - - JIRA_CONNECTOR: Indexes issues and comments from Jira - - DISCORD_CONNECTOR: Indexes messages from all accessible Discord channels - - LUMA_CONNECTOR: Indexes events from Luma - - ELASTICSEARCH_CONNECTOR: Indexes documents from Elasticsearch - - WEBCRAWLER_CONNECTOR: Indexes web pages from crawled websites - - Args: - connector_id: ID of the connector to use - search_space_id: ID of the search space to store indexed content - - Returns: - Dictionary with indexing status + Live connectors (Slack, Teams, Linear, Jira, ClickUp, Calendar, Airtable, + Gmail, Discord, Luma) use real-time agent tools instead. """ try: # Get the connector first @@ -770,9 +753,7 @@ async def index_connector_content( # For calendar connectors, default to today but allow future dates if explicitly provided if connector.connector_type in [ - SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR, SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR, - SearchSourceConnectorType.LUMA_CONNECTOR, ]: # Default to today if no end_date provided (users can manually select future dates) indexing_to = today_str if end_date is None else end_date @@ -796,33 +777,32 @@ async def index_connector_content( # For non-calendar connectors, cap at today indexing_to = end_date if end_date else today_str - if connector.connector_type == SearchSourceConnectorType.SLACK_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import ( - index_slack_messages_task, - ) + _LIVE_CONNECTOR_TYPES = { + SearchSourceConnectorType.SLACK_CONNECTOR, + SearchSourceConnectorType.TEAMS_CONNECTOR, + SearchSourceConnectorType.LINEAR_CONNECTOR, + SearchSourceConnectorType.JIRA_CONNECTOR, + SearchSourceConnectorType.CLICKUP_CONNECTOR, + SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR, + SearchSourceConnectorType.AIRTABLE_CONNECTOR, + SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR, + SearchSourceConnectorType.DISCORD_CONNECTOR, + SearchSourceConnectorType.LUMA_CONNECTOR, + } + if connector.connector_type in _LIVE_CONNECTOR_TYPES: + return { + "message": ( + f"{connector.connector_type.value} uses real-time agent tools; " + "background indexing is disabled." + ), + "indexing_started": False, + "connector_id": connector_id, + "search_space_id": search_space_id, + "indexing_from": indexing_from, + "indexing_to": indexing_to, + } - logger.info( - f"Triggering Slack indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_slack_messages_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Slack indexing started in the background." - - elif connector.connector_type == SearchSourceConnectorType.TEAMS_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import ( - index_teams_messages_task, - ) - - logger.info( - f"Triggering Teams indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_teams_messages_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Teams indexing started in the background." - - elif connector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR: + if connector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR: from app.tasks.celery_tasks.connector_tasks import index_notion_pages_task logger.info( @@ -844,28 +824,6 @@ async def index_connector_content( ) response_message = "GitHub indexing started in the background." - elif connector.connector_type == SearchSourceConnectorType.LINEAR_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import index_linear_issues_task - - logger.info( - f"Triggering Linear indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_linear_issues_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Linear indexing started in the background." - - elif connector.connector_type == SearchSourceConnectorType.JIRA_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import index_jira_issues_task - - logger.info( - f"Triggering Jira indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_jira_issues_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Jira indexing started in the background." - elif connector.connector_type == SearchSourceConnectorType.CONFLUENCE_CONNECTOR: from app.tasks.celery_tasks.connector_tasks import ( index_confluence_pages_task, @@ -892,59 +850,6 @@ async def index_connector_content( ) response_message = "BookStack indexing started in the background." - elif connector.connector_type == SearchSourceConnectorType.CLICKUP_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import index_clickup_tasks_task - - logger.info( - f"Triggering ClickUp indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_clickup_tasks_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "ClickUp indexing started in the background." - - elif ( - connector.connector_type - == SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR - ): - from app.tasks.celery_tasks.connector_tasks import ( - index_google_calendar_events_task, - ) - - logger.info( - f"Triggering Google Calendar indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_google_calendar_events_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Google Calendar indexing started in the background." - elif connector.connector_type == SearchSourceConnectorType.AIRTABLE_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import ( - index_airtable_records_task, - ) - - logger.info( - f"Triggering Airtable indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_airtable_records_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Airtable indexing started in the background." - elif ( - connector.connector_type == SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR - ): - from app.tasks.celery_tasks.connector_tasks import ( - index_google_gmail_messages_task, - ) - - logger.info( - f"Triggering Google Gmail indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_google_gmail_messages_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Google Gmail indexing started in the background." - elif ( connector.connector_type == SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR ): @@ -1089,30 +994,6 @@ async def index_connector_content( ) response_message = "Dropbox indexing started in the background." - elif connector.connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import ( - index_discord_messages_task, - ) - - logger.info( - f"Triggering Discord indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_discord_messages_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Discord indexing started in the background." - - elif connector.connector_type == SearchSourceConnectorType.LUMA_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import index_luma_events_task - - logger.info( - f"Triggering Luma indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_luma_events_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Luma indexing started in the background." - elif ( connector.connector_type == SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR From 0ab7d6a5e385d071befec0c386121181288b0228 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:52:43 +0200 Subject: [PATCH 039/113] set is_indexable=False for all live connector add routes --- surfsense_backend/app/routes/airtable_add_connector_route.py | 2 +- surfsense_backend/app/routes/clickup_add_connector_route.py | 4 ++-- surfsense_backend/app/routes/discord_add_connector_route.py | 2 +- .../app/routes/google_calendar_add_connector_route.py | 2 +- .../app/routes/google_gmail_add_connector_route.py | 2 +- surfsense_backend/app/routes/jira_add_connector_route.py | 2 +- surfsense_backend/app/routes/linear_add_connector_route.py | 2 +- surfsense_backend/app/routes/luma_add_connector_route.py | 4 ++-- surfsense_backend/app/routes/slack_add_connector_route.py | 2 +- surfsense_backend/app/routes/teams_add_connector_route.py | 2 +- 10 files changed, 12 insertions(+), 12 deletions(-) diff --git a/surfsense_backend/app/routes/airtable_add_connector_route.py b/surfsense_backend/app/routes/airtable_add_connector_route.py index 1e0b1eb5d..f70b9166b 100644 --- a/surfsense_backend/app/routes/airtable_add_connector_route.py +++ b/surfsense_backend/app/routes/airtable_add_connector_route.py @@ -311,7 +311,7 @@ async def airtable_callback( new_connector = SearchSourceConnector( name=connector_name, connector_type=SearchSourceConnectorType.AIRTABLE_CONNECTOR, - is_indexable=True, + is_indexable=False, config=credentials_dict, search_space_id=space_id, user_id=user_id, diff --git a/surfsense_backend/app/routes/clickup_add_connector_route.py b/surfsense_backend/app/routes/clickup_add_connector_route.py index 2cd63eca2..f7b0876e5 100644 --- a/surfsense_backend/app/routes/clickup_add_connector_route.py +++ b/surfsense_backend/app/routes/clickup_add_connector_route.py @@ -301,7 +301,7 @@ async def clickup_callback( # Update existing connector existing_connector.config = connector_config existing_connector.name = "ClickUp Connector" - existing_connector.is_indexable = True + existing_connector.is_indexable = False logger.info( f"Updated existing ClickUp connector for user {user_id} in space {space_id}" ) @@ -310,7 +310,7 @@ async def clickup_callback( new_connector = SearchSourceConnector( name="ClickUp Connector", connector_type=SearchSourceConnectorType.CLICKUP_CONNECTOR, - is_indexable=True, + is_indexable=False, config=connector_config, search_space_id=space_id, user_id=user_id, diff --git a/surfsense_backend/app/routes/discord_add_connector_route.py b/surfsense_backend/app/routes/discord_add_connector_route.py index 27bfffc90..4ab48f544 100644 --- a/surfsense_backend/app/routes/discord_add_connector_route.py +++ b/surfsense_backend/app/routes/discord_add_connector_route.py @@ -326,7 +326,7 @@ async def discord_callback( new_connector = SearchSourceConnector( name=connector_name, connector_type=SearchSourceConnectorType.DISCORD_CONNECTOR, - is_indexable=True, + is_indexable=False, config=connector_config, search_space_id=space_id, user_id=user_id, diff --git a/surfsense_backend/app/routes/google_calendar_add_connector_route.py b/surfsense_backend/app/routes/google_calendar_add_connector_route.py index d7ccf62ca..a143fd50d 100644 --- a/surfsense_backend/app/routes/google_calendar_add_connector_route.py +++ b/surfsense_backend/app/routes/google_calendar_add_connector_route.py @@ -340,7 +340,7 @@ async def calendar_callback( config=creds_dict, search_space_id=space_id, user_id=user_id, - is_indexable=True, + is_indexable=False, ) session.add(db_connector) await session.commit() diff --git a/surfsense_backend/app/routes/google_gmail_add_connector_route.py b/surfsense_backend/app/routes/google_gmail_add_connector_route.py index dd8feb1c7..9b807a556 100644 --- a/surfsense_backend/app/routes/google_gmail_add_connector_route.py +++ b/surfsense_backend/app/routes/google_gmail_add_connector_route.py @@ -371,7 +371,7 @@ async def gmail_callback( config=creds_dict, search_space_id=space_id, user_id=user_id, - is_indexable=True, + is_indexable=False, ) session.add(db_connector) await session.commit() diff --git a/surfsense_backend/app/routes/jira_add_connector_route.py b/surfsense_backend/app/routes/jira_add_connector_route.py index 6cd6283d7..eeb4f91d9 100644 --- a/surfsense_backend/app/routes/jira_add_connector_route.py +++ b/surfsense_backend/app/routes/jira_add_connector_route.py @@ -386,7 +386,7 @@ async def jira_callback( new_connector = SearchSourceConnector( name=connector_name, connector_type=SearchSourceConnectorType.JIRA_CONNECTOR, - is_indexable=True, + is_indexable=False, config=connector_config, search_space_id=space_id, user_id=user_id, diff --git a/surfsense_backend/app/routes/linear_add_connector_route.py b/surfsense_backend/app/routes/linear_add_connector_route.py index 9345ae495..f59c17d25 100644 --- a/surfsense_backend/app/routes/linear_add_connector_route.py +++ b/surfsense_backend/app/routes/linear_add_connector_route.py @@ -399,7 +399,7 @@ async def linear_callback( new_connector = SearchSourceConnector( name=connector_name, connector_type=SearchSourceConnectorType.LINEAR_CONNECTOR, - is_indexable=True, + is_indexable=False, config=connector_config, search_space_id=space_id, user_id=user_id, diff --git a/surfsense_backend/app/routes/luma_add_connector_route.py b/surfsense_backend/app/routes/luma_add_connector_route.py index 04d840a08..7040581bc 100644 --- a/surfsense_backend/app/routes/luma_add_connector_route.py +++ b/surfsense_backend/app/routes/luma_add_connector_route.py @@ -61,7 +61,7 @@ async def add_luma_connector( if existing_connector: # Update existing connector with new API key existing_connector.config = {"api_key": request.api_key} - existing_connector.is_indexable = True + existing_connector.is_indexable = False await session.commit() await session.refresh(existing_connector) @@ -82,7 +82,7 @@ async def add_luma_connector( config={"api_key": request.api_key}, search_space_id=request.space_id, user_id=user.id, - is_indexable=True, + is_indexable=False, ) session.add(db_connector) diff --git a/surfsense_backend/app/routes/slack_add_connector_route.py b/surfsense_backend/app/routes/slack_add_connector_route.py index 405ab2c4f..f6a1458a0 100644 --- a/surfsense_backend/app/routes/slack_add_connector_route.py +++ b/surfsense_backend/app/routes/slack_add_connector_route.py @@ -312,7 +312,7 @@ async def slack_callback( new_connector = SearchSourceConnector( name=connector_name, connector_type=SearchSourceConnectorType.SLACK_CONNECTOR, - is_indexable=True, + is_indexable=False, config=connector_config, search_space_id=space_id, user_id=user_id, diff --git a/surfsense_backend/app/routes/teams_add_connector_route.py b/surfsense_backend/app/routes/teams_add_connector_route.py index bbaae3a5f..9d0f5144f 100644 --- a/surfsense_backend/app/routes/teams_add_connector_route.py +++ b/surfsense_backend/app/routes/teams_add_connector_route.py @@ -321,7 +321,7 @@ async def teams_callback( new_connector = SearchSourceConnector( name=connector_name, connector_type=SearchSourceConnectorType.TEAMS_CONNECTOR, - is_indexable=True, + is_indexable=False, config=connector_config, search_space_id=space_id, user_id=user_id, From e676ebfabeb0584cee14232eb90575646dd8b040 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:52:54 +0200 Subject: [PATCH 040/113] remove live connectors from AUTO_INDEX_DEFAULTS --- .../constants/connector-constants.ts | 54 ------------------- 1 file changed, 54 deletions(-) diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index dcd63f525..39e827d1a 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -297,66 +297,18 @@ export interface AutoIndexConfig { } export const AUTO_INDEX_DEFAULTS: Record = { - [EnumConnectorName.GOOGLE_GMAIL_CONNECTOR]: { - daysBack: 30, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your last 30 days of emails.", - }, [EnumConnectorName.COMPOSIO_GMAIL_CONNECTOR]: { daysBack: 30, daysForward: 0, frequencyMinutes: 1440, syncDescription: "Syncing your last 30 days of emails.", }, - [EnumConnectorName.SLACK_CONNECTOR]: { - daysBack: 30, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your last 30 days of messages.", - }, - [EnumConnectorName.DISCORD_CONNECTOR]: { - daysBack: 30, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your last 30 days of messages.", - }, - [EnumConnectorName.TEAMS_CONNECTOR]: { - daysBack: 30, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your last 30 days of messages.", - }, - [EnumConnectorName.GOOGLE_CALENDAR_CONNECTOR]: { - daysBack: 90, - daysForward: 90, - frequencyMinutes: 1440, - syncDescription: "Syncing 90 days of past and upcoming events.", - }, [EnumConnectorName.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR]: { daysBack: 90, daysForward: 90, frequencyMinutes: 1440, syncDescription: "Syncing 90 days of past and upcoming events.", }, - [EnumConnectorName.LINEAR_CONNECTOR]: { - daysBack: 90, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your last 90 days of issues.", - }, - [EnumConnectorName.JIRA_CONNECTOR]: { - daysBack: 90, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your last 90 days of issues.", - }, - [EnumConnectorName.CLICKUP_CONNECTOR]: { - daysBack: 90, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your last 90 days of tasks.", - }, [EnumConnectorName.NOTION_CONNECTOR]: { daysBack: 365, daysForward: 0, @@ -369,12 +321,6 @@ export const AUTO_INDEX_DEFAULTS: Record = { frequencyMinutes: 1440, syncDescription: "Syncing your documentation.", }, - [EnumConnectorName.AIRTABLE_CONNECTOR]: { - daysBack: 365, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your bases.", - }, }; export const AUTO_INDEX_CONNECTOR_TYPES = new Set(Object.keys(AUTO_INDEX_DEFAULTS)); From bd2d6c07ff3487797d0a0a921259469f2c60a9b1 Mon Sep 17 00:00:00 2001 From: "DESKTOP-RTLN3BA\\$punk" Date: Tue, 21 Apr 2026 22:13:41 -0700 Subject: [PATCH 041/113] fix: summarization middleware usage --- .../app/agents/new_chat/chat_deepagent.py | 8 +- .../new_chat/middleware/safe_summarization.py | 125 ++++++++++++ surfsense_backend/pyproject.toml | 2 +- surfsense_backend/uv.lock | 2 +- surfsense_web/pnpm-lock.yaml | 183 +++++++----------- 5 files changed, 206 insertions(+), 114 deletions(-) create mode 100644 surfsense_backend/app/agents/new_chat/middleware/safe_summarization.py diff --git a/surfsense_backend/app/agents/new_chat/chat_deepagent.py b/surfsense_backend/app/agents/new_chat/chat_deepagent.py index ab47b49ce..a901a7519 100644 --- a/surfsense_backend/app/agents/new_chat/chat_deepagent.py +++ b/surfsense_backend/app/agents/new_chat/chat_deepagent.py @@ -24,7 +24,6 @@ from deepagents.backends import StateBackend from deepagents.graph import BASE_AGENT_PROMPT from deepagents.middleware.patch_tool_calls import PatchToolCallsMiddleware from deepagents.middleware.subagents import GENERAL_PURPOSE_SUBAGENT -from deepagents.middleware.summarization import create_summarization_middleware from langchain.agents import create_agent from langchain.agents.middleware import TodoListMiddleware from langchain_anthropic.middleware import AnthropicPromptCachingMiddleware @@ -41,6 +40,9 @@ from app.agents.new_chat.middleware import ( MemoryInjectionMiddleware, SurfSenseFilesystemMiddleware, ) +from app.agents.new_chat.middleware.safe_summarization import ( + create_safe_summarization_middleware, +) from app.agents.new_chat.system_prompt import ( build_configurable_system_prompt, build_surfsense_system_prompt, @@ -442,7 +444,7 @@ async def create_surfsense_deep_agent( created_by_id=user_id, thread_id=thread_id, ), - create_summarization_middleware(llm, StateBackend), + create_safe_summarization_middleware(llm, StateBackend), PatchToolCallsMiddleware(), AnthropicPromptCachingMiddleware(unsupported_model_behavior="ignore"), ] @@ -472,7 +474,7 @@ async def create_surfsense_deep_agent( thread_id=thread_id, ), SubAgentMiddleware(backend=StateBackend, subagents=[general_purpose_spec]), - create_summarization_middleware(llm, StateBackend), + create_safe_summarization_middleware(llm, StateBackend), PatchToolCallsMiddleware(), DedupHITLToolCallsMiddleware(agent_tools=tools), AnthropicPromptCachingMiddleware(unsupported_model_behavior="ignore"), diff --git a/surfsense_backend/app/agents/new_chat/middleware/safe_summarization.py b/surfsense_backend/app/agents/new_chat/middleware/safe_summarization.py new file mode 100644 index 000000000..8248f5c8c --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/middleware/safe_summarization.py @@ -0,0 +1,125 @@ +"""Safe wrapper around deepagents' SummarizationMiddleware. + +Upstream issue +-------------- +`deepagents.middleware.summarization.SummarizationMiddleware._aoffload_to_backend` +(and its sync counterpart) call +``get_buffer_string(filtered_messages)`` before writing the evicted history +to the backend file. In recent ``langchain-core`` versions, ``get_buffer_string`` +accesses ``m.text`` which iterates ``self.content`` — this raises +``TypeError: 'NoneType' object is not iterable`` whenever an ``AIMessage`` +has ``content=None`` (common when a model returns *only* tool_calls, seen +frequently with Azure OpenAI ``gpt-5.x`` responses streamed through +LiteLLM). + +The exception aborts the whole agent turn, so the user just sees "Error during +chat" with no assistant response. + +Fix +--- +We subclass ``SummarizationMiddleware`` and override +``_filter_summary_messages`` — the only call site that feeds messages into +``get_buffer_string`` — to return *copies* of messages whose ``content`` is +``None`` with ``content=""``. The originals flowing through the rest of the +agent state are untouched. + +We also expose a drop-in ``create_safe_summarization_middleware`` factory +that mirrors ``deepagents.middleware.summarization.create_summarization_middleware`` +but instantiates our safe subclass. +""" + +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING + +from deepagents.middleware.summarization import ( + SummarizationMiddleware, + compute_summarization_defaults, +) + +if TYPE_CHECKING: + from deepagents.backends.protocol import BACKEND_TYPES + from langchain_core.language_models import BaseChatModel + from langchain_core.messages import AnyMessage + +logger = logging.getLogger(__name__) + + +def _sanitize_message_content(msg: "AnyMessage") -> "AnyMessage": + """Return ``msg`` with ``content`` coerced to a non-``None`` value. + + ``get_buffer_string`` reads ``m.text`` which iterates ``self.content``; + when a provider streams back an ``AIMessage`` with only tool_calls and + no text, ``content`` can be ``None`` and the iteration explodes. We + replace ``None`` with an empty string so downstream consumers that only + care about text see an empty body. + + The original message is left untouched — we return a copy via + pydantic's ``model_copy`` when available, otherwise we fall back to + re-setting the attribute on a shallow copy. + """ + + if getattr(msg, "content", "not-missing") is not None: + return msg + + try: + return msg.model_copy(update={"content": ""}) + except AttributeError: + import copy + + new_msg = copy.copy(msg) + try: + new_msg.content = "" + except Exception: # pragma: no cover - defensive + logger.debug( + "Could not sanitize content=None on message of type %s", + type(msg).__name__, + ) + return msg + return new_msg + + +class SafeSummarizationMiddleware(SummarizationMiddleware): + """`SummarizationMiddleware` that tolerates messages with ``content=None``. + + Only ``_filter_summary_messages`` is overridden — this is the single + helper invoked by both the sync and async offload paths immediately + before ``get_buffer_string``. Normalising here means we get coverage + for both without having to copy the (long, rapidly-changing) offload + implementations from upstream. + """ + + def _filter_summary_messages( + self, messages: "list[AnyMessage]" + ) -> "list[AnyMessage]": + filtered = super()._filter_summary_messages(messages) + return [_sanitize_message_content(m) for m in filtered] + + +def create_safe_summarization_middleware( + model: "BaseChatModel", + backend: "BACKEND_TYPES", +) -> SafeSummarizationMiddleware: + """Drop-in replacement for ``create_summarization_middleware``. + + Mirrors the defaults computed by ``deepagents`` but returns our + ``SafeSummarizationMiddleware`` subclass so the + ``content=None`` crash in ``get_buffer_string`` is avoided. + """ + + defaults = compute_summarization_defaults(model) + return SafeSummarizationMiddleware( + model=model, + backend=backend, + trigger=defaults["trigger"], + keep=defaults["keep"], + trim_tokens_to_summarize=None, + truncate_args_settings=defaults["truncate_args_settings"], + ) + + +__all__ = [ + "SafeSummarizationMiddleware", + "create_safe_summarization_middleware", +] diff --git a/surfsense_backend/pyproject.toml b/surfsense_backend/pyproject.toml index 01f5ddc1b..131627386 100644 --- a/surfsense_backend/pyproject.toml +++ b/surfsense_backend/pyproject.toml @@ -74,7 +74,7 @@ dependencies = [ "deepagents>=0.4.12", "stripe>=15.0.0", "azure-ai-documentintelligence>=1.0.2", - "litellm>=1.83.0", + "litellm>=1.83.4", "langchain-litellm>=0.6.4", ] diff --git a/surfsense_backend/uv.lock b/surfsense_backend/uv.lock index ac2784668..209c42a9c 100644 --- a/surfsense_backend/uv.lock +++ b/surfsense_backend/uv.lock @@ -8070,7 +8070,7 @@ requires-dist = [ { name = "langgraph", specifier = ">=1.1.3" }, { name = "langgraph-checkpoint-postgres", specifier = ">=3.0.2" }, { name = "linkup-sdk", specifier = ">=0.2.4" }, - { name = "litellm", specifier = ">=1.83.0" }, + { name = "litellm", specifier = ">=1.83.4" }, { name = "llama-cloud-services", specifier = ">=0.6.25" }, { name = "markdown", specifier = ">=3.7" }, { name = "markdownify", specifier = ">=0.14.1" }, diff --git a/surfsense_web/pnpm-lock.yaml b/surfsense_web/pnpm-lock.yaml index 7cb492a05..1c3dd61e0 100644 --- a/surfsense_web/pnpm-lock.yaml +++ b/surfsense_web/pnpm-lock.yaml @@ -1088,10 +1088,6 @@ packages: peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/runtime@7.28.6': - resolution: {integrity: sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA==} - engines: {node: '>=6.9.0'} - '@babel/runtime@7.29.2': resolution: {integrity: sha512-JiDShH45zKHWyGe4ZNVRrCjBz8Nh9TMmZG1kh4QTK8hCBTWBi8Da+i7s1fJw7/lYpM4ccepSNfqzZ/QvABBi5g==} engines: {node: '>=6.9.0'} @@ -2188,12 +2184,6 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.10.0' - '@opentelemetry/core@2.5.1': - resolution: {integrity: sha512-Dwlc+3HAZqpgTYq0MUyZABjFkcrKTePwuiFVLjahGD8cx3enqihmpAmdgNFO1R4m/sIe5afjJrA25Prqy4NXlA==} - engines: {node: ^18.19.0 || >=20.6.0} - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.10.0' - '@opentelemetry/core@2.6.0': resolution: {integrity: sha512-HLM1v2cbZ4TgYN6KEOj+Bbj8rAKriOdkF9Ed3tG25FoprSiQl7kYc+RRT6fUZGOvx0oMi5U67GoFdT+XUn8zEg==} engines: {node: ^18.19.0 || >=20.6.0} @@ -2606,12 +2596,6 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.3.0 <1.10.0' - '@opentelemetry/resources@2.5.1': - resolution: {integrity: sha512-BViBCdE/GuXRlp9k7nS1w6wJvY5fnFX5XvuEtWsTAOQFIO89Eru7lGW3WbfbxtCuZ/GbrJfAziXG0w0dpxL7eQ==} - engines: {node: ^18.19.0 || >=20.6.0} - peerDependencies: - '@opentelemetry/api': '>=1.3.0 <1.10.0' - '@opentelemetry/resources@2.6.0': resolution: {integrity: sha512-D4y/+OGe3JSuYUCBxtH5T9DSAWNcvCb/nQWIga8HNtXTVPQn59j0nTBAgaAXxUVBDl40mG3Tc76b46wPlZaiJQ==} engines: {node: ^18.19.0 || >=20.6.0} @@ -4372,8 +4356,8 @@ packages: peerDependencies: react: '>= 16' - '@tabler/icons@3.37.1': - resolution: {integrity: sha512-neLCWkuyNHEPXCyYu6nbN4S3g/59BTa4qyITAugYVpq1YzYNDOZooW7/vRWH98ZItXAudxdKU8muFT7y1PqzuA==} + '@tabler/icons@3.41.1': + resolution: {integrity: sha512-OaRnVbRmH2nHtFeg+RmMJ/7m2oBIF9XCJAUD5gQnMrpK9f05ydj8MZrAf3NZQqOXyxGN1UBL0D5IKLLEUfr74Q==} '@tailwindcss/node@4.2.1': resolution: {integrity: sha512-jlx6sLk4EOwO6hHe1oCGm1Q4AN/s0rSrTTPBGPM0/RQ6Uylwq17FuU8IeJJKEjtc6K6O07zsvP+gDO6MMWo7pg==} @@ -4814,6 +4798,7 @@ packages: '@xmldom/xmldom@0.8.11': resolution: {integrity: sha512-cQzWCtO6C8TQiYl1ruKNn2U6Ao4o4WBBcbL61yJl84x+j5sOWWFU9X7DpND8XZG3daDppSsigMdfAIl2upQBRw==} engines: {node: '>=10.0.0'} + deprecated: this version has critical issues, please update to the latest version abstract-logging@2.0.1: resolution: {integrity: sha512-2BjRTZxTPvheOvGbBslFSYOUkr+SjPtOnrLP33f+VIWLzezQpZcqVg7ja3L4dBXmzzgwT+a029jRx5PCi3JuiA==} @@ -7012,11 +6997,6 @@ packages: engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true - nanoid@5.1.6: - resolution: {integrity: sha512-c7+7RQ+dMB5dPwwCp4ee1/iV/q2P6aK1mTZcfr1BTuVlyW9hJYiMPybJCcnBlQtuSmTIWNeazm/zqNoZSSElBg==} - engines: {node: ^18 || >=20} - hasBin: true - nanoid@5.1.7: resolution: {integrity: sha512-ua3NDgISf6jdwezAheMOk4mbE1LXjm1DfMUDMuJf4AqxLFK3ccGpgWizwa5YV7Yz9EpXwEaWoRXSb/BnV0t5dQ==} engines: {node: ^18 || >=20} @@ -7421,8 +7401,8 @@ packages: property-information@7.1.0: resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==} - protobufjs@7.5.4: - resolution: {integrity: sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==} + protobufjs@7.5.5: + resolution: {integrity: sha512-3wY1AxV+VBNW8Yypfd1yQY9pXnqTAN+KwQxL8iYm3/BjKYMNg4i0owhEe26PWDOMaIrzeeF98Lqd5NGz4omiIg==} engines: {node: '>=12.0.0'} proxy-compare@2.6.0: @@ -9387,8 +9367,6 @@ snapshots: transitivePeerDependencies: - supports-color - '@babel/runtime@7.28.6': {} - '@babel/runtime@7.29.2': {} '@babel/standalone@7.29.2': {} @@ -9886,7 +9864,7 @@ snapshots: dependencies: lodash.camelcase: 4.3.0 long: 5.3.2 - protobufjs: 7.5.4 + protobufjs: 7.5.5 yargs: 17.7.2 '@hookform/resolvers@5.2.2(react-hook-form@7.71.2(react@19.2.4))': @@ -10246,7 +10224,7 @@ snapshots: '@opentelemetry/resource-detector-azure': 0.10.0(@opentelemetry/api@1.9.0) '@opentelemetry/resource-detector-container': 0.7.11(@opentelemetry/api@1.9.0) '@opentelemetry/resource-detector-gcp': 0.37.0(@opentelemetry/api@1.9.0) - '@opentelemetry/resources': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-node': 0.203.0(@opentelemetry/api@1.9.0) transitivePeerDependencies: - encoding @@ -10270,11 +10248,6 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/semantic-conventions': 1.39.0 - '@opentelemetry/core@2.5.1(@opentelemetry/api@1.9.0)': - dependencies: - '@opentelemetry/api': 1.9.0 - '@opentelemetry/semantic-conventions': 1.39.0 - '@opentelemetry/core@2.6.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -10397,7 +10370,7 @@ snapshots: '@opentelemetry/instrumentation-amqplib@0.50.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.203.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 transitivePeerDependencies: @@ -10415,7 +10388,7 @@ snapshots: '@opentelemetry/instrumentation-aws-sdk@0.58.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.203.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 transitivePeerDependencies: @@ -10441,7 +10414,7 @@ snapshots: '@opentelemetry/instrumentation-connect@0.47.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.203.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 '@types/connect': 3.4.38 @@ -10473,7 +10446,7 @@ snapshots: '@opentelemetry/instrumentation-express@0.52.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.203.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 transitivePeerDependencies: @@ -10482,7 +10455,7 @@ snapshots: '@opentelemetry/instrumentation-fastify@0.48.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.203.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 transitivePeerDependencies: @@ -10491,7 +10464,7 @@ snapshots: '@opentelemetry/instrumentation-fs@0.23.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.203.0(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color @@ -10521,7 +10494,7 @@ snapshots: '@opentelemetry/instrumentation-hapi@0.50.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.203.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 transitivePeerDependencies: @@ -10565,7 +10538,7 @@ snapshots: '@opentelemetry/instrumentation-koa@0.51.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.203.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 transitivePeerDependencies: @@ -10598,7 +10571,7 @@ snapshots: '@opentelemetry/instrumentation-mongoose@0.50.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.203.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 transitivePeerDependencies: @@ -10650,7 +10623,7 @@ snapshots: '@opentelemetry/instrumentation-pg@0.56.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.203.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 '@opentelemetry/sql-common': 0.41.2(@opentelemetry/api@1.9.0) @@ -10663,7 +10636,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/api-logs': 0.203.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.203.0(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color @@ -10680,7 +10653,7 @@ snapshots: '@opentelemetry/instrumentation-restify@0.49.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.203.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 transitivePeerDependencies: @@ -10721,7 +10694,7 @@ snapshots: '@opentelemetry/instrumentation-undici@0.14.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation': 0.203.0(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color @@ -10772,7 +10745,7 @@ snapshots: '@opentelemetry/sdk-logs': 0.203.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-metrics': 2.0.1(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 2.0.1(@opentelemetry/api@1.9.0) - protobufjs: 7.5.4 + protobufjs: 7.5.5 '@opentelemetry/otlp-transformer@0.208.0(@opentelemetry/api@1.9.0)': dependencies: @@ -10783,7 +10756,7 @@ snapshots: '@opentelemetry/sdk-logs': 0.208.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-metrics': 2.2.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 2.2.0(@opentelemetry/api@1.9.0) - protobufjs: 7.5.4 + protobufjs: 7.5.5 '@opentelemetry/propagator-b3@2.0.1(@opentelemetry/api@1.9.0)': dependencies: @@ -10800,34 +10773,34 @@ snapshots: '@opentelemetry/resource-detector-alibaba-cloud@0.31.11(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) - '@opentelemetry/resources': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/resource-detector-aws@2.13.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) - '@opentelemetry/resources': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 '@opentelemetry/resource-detector-azure@0.10.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) - '@opentelemetry/resources': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 '@opentelemetry/resource-detector-container@0.7.11(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) - '@opentelemetry/resources': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/resource-detector-gcp@0.37.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) - '@opentelemetry/resources': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 gcp-metadata: 6.1.1 transitivePeerDependencies: @@ -10846,12 +10819,6 @@ snapshots: '@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 - '@opentelemetry/resources@2.5.1(@opentelemetry/api@1.9.0)': - dependencies: - '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.39.0 - '@opentelemetry/resources@2.6.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -10952,7 +10919,7 @@ snapshots: '@opentelemetry/sql-common@0.41.2(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) '@orama/orama@3.1.18': {} @@ -11067,7 +11034,7 @@ snapshots: jotai-optics: 0.4.0(jotai@2.8.4(@types/react@19.2.14)(react@19.2.4))(optics-ts@2.4.1) jotai-x: 2.3.3(@types/react@19.2.14)(jotai@2.8.4(@types/react@19.2.14)(react@19.2.4))(react@19.2.4) lodash: 4.17.23 - nanoid: 5.1.6 + nanoid: 5.1.7 optics-ts: 2.4.1 react: 19.2.4 react-compiler-runtime: 1.0.0(react@19.2.4) @@ -11265,11 +11232,11 @@ snapshots: '@radix-ui/primitive@1.0.0': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/primitive@1.0.1': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/primitive@1.1.3': {} @@ -11403,12 +11370,12 @@ snapshots: '@radix-ui/react-compose-refs@1.0.0(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 react: 19.2.4 '@radix-ui/react-compose-refs@1.0.1(@types/react@19.2.14)(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 react: 19.2.4 optionalDependencies: '@types/react': 19.2.14 @@ -11435,12 +11402,12 @@ snapshots: '@radix-ui/react-context@1.0.0(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 react: 19.2.4 '@radix-ui/react-context@1.0.1(@types/react@19.2.14)(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 react: 19.2.4 optionalDependencies: '@types/react': 19.2.14 @@ -11459,7 +11426,7 @@ snapshots: '@radix-ui/react-dialog@1.0.0(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/primitive': 1.0.0 '@radix-ui/react-compose-refs': 1.0.0(react@19.2.4) '@radix-ui/react-context': 1.0.0(react@19.2.4) @@ -11481,7 +11448,7 @@ snapshots: '@radix-ui/react-dialog@1.0.4(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/primitive': 1.0.1 '@radix-ui/react-compose-refs': 1.0.1(@types/react@19.2.14)(react@19.2.4) '@radix-ui/react-context': 1.0.1(@types/react@19.2.14)(react@19.2.4) @@ -11532,7 +11499,7 @@ snapshots: '@radix-ui/react-dismissable-layer@1.0.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/primitive': 1.0.0 '@radix-ui/react-compose-refs': 1.0.0(react@19.2.4) '@radix-ui/react-primitive': 1.0.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) @@ -11543,7 +11510,7 @@ snapshots: '@radix-ui/react-dismissable-layer@1.0.4(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/primitive': 1.0.1 '@radix-ui/react-compose-refs': 1.0.1(@types/react@19.2.14)(react@19.2.4) '@radix-ui/react-primitive': 1.0.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) @@ -11585,12 +11552,12 @@ snapshots: '@radix-ui/react-focus-guards@1.0.0(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 react: 19.2.4 '@radix-ui/react-focus-guards@1.0.1(@types/react@19.2.14)(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 react: 19.2.4 optionalDependencies: '@types/react': 19.2.14 @@ -11603,7 +11570,7 @@ snapshots: '@radix-ui/react-focus-scope@1.0.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/react-compose-refs': 1.0.0(react@19.2.4) '@radix-ui/react-primitive': 1.0.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-use-callback-ref': 1.0.0(react@19.2.4) @@ -11612,7 +11579,7 @@ snapshots: '@radix-ui/react-focus-scope@1.0.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/react-compose-refs': 1.0.1(@types/react@19.2.14)(react@19.2.4) '@radix-ui/react-primitive': 1.0.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@radix-ui/react-use-callback-ref': 1.0.1(@types/react@19.2.14)(react@19.2.4) @@ -11666,13 +11633,13 @@ snapshots: '@radix-ui/react-id@1.0.0(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/react-use-layout-effect': 1.0.0(react@19.2.4) react: 19.2.4 '@radix-ui/react-id@1.0.1(@types/react@19.2.14)(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/react-use-layout-effect': 1.0.1(@types/react@19.2.14)(react@19.2.4) react: 19.2.4 optionalDependencies: @@ -11848,14 +11815,14 @@ snapshots: '@radix-ui/react-portal@1.0.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/react-primitive': 1.0.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) react: 19.2.4 react-dom: 19.2.4(react@19.2.4) '@radix-ui/react-portal@1.0.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/react-primitive': 1.0.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) react: 19.2.4 react-dom: 19.2.4(react@19.2.4) @@ -11875,7 +11842,7 @@ snapshots: '@radix-ui/react-presence@1.0.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/react-compose-refs': 1.0.0(react@19.2.4) '@radix-ui/react-use-layout-effect': 1.0.0(react@19.2.4) react: 19.2.4 @@ -11883,7 +11850,7 @@ snapshots: '@radix-ui/react-presence@1.0.1(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/react-compose-refs': 1.0.1(@types/react@19.2.14)(react@19.2.4) '@radix-ui/react-use-layout-effect': 1.0.1(@types/react@19.2.14)(react@19.2.4) react: 19.2.4 @@ -11904,14 +11871,14 @@ snapshots: '@radix-ui/react-primitive@1.0.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/react-slot': 1.0.0(react@19.2.4) react: 19.2.4 react-dom: 19.2.4(react@19.2.4) '@radix-ui/react-primitive@1.0.3(@types/react-dom@19.2.3(@types/react@19.2.14))(@types/react@19.2.14)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/react-slot': 1.0.2(@types/react@19.2.14)(react@19.2.4) react: 19.2.4 react-dom: 19.2.4(react@19.2.4) @@ -12067,13 +12034,13 @@ snapshots: '@radix-ui/react-slot@1.0.0(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/react-compose-refs': 1.0.0(react@19.2.4) react: 19.2.4 '@radix-ui/react-slot@1.0.2(@types/react@19.2.14)(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/react-compose-refs': 1.0.1(@types/react@19.2.14)(react@19.2.4) react: 19.2.4 optionalDependencies: @@ -12207,12 +12174,12 @@ snapshots: '@radix-ui/react-use-callback-ref@1.0.0(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 react: 19.2.4 '@radix-ui/react-use-callback-ref@1.0.1(@types/react@19.2.14)(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 react: 19.2.4 optionalDependencies: '@types/react': 19.2.14 @@ -12225,13 +12192,13 @@ snapshots: '@radix-ui/react-use-controllable-state@1.0.0(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/react-use-callback-ref': 1.0.0(react@19.2.4) react: 19.2.4 '@radix-ui/react-use-controllable-state@1.0.1(@types/react@19.2.14)(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/react-use-callback-ref': 1.0.1(@types/react@19.2.14)(react@19.2.4) react: 19.2.4 optionalDependencies: @@ -12254,13 +12221,13 @@ snapshots: '@radix-ui/react-use-escape-keydown@1.0.0(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/react-use-callback-ref': 1.0.0(react@19.2.4) react: 19.2.4 '@radix-ui/react-use-escape-keydown@1.0.3(@types/react@19.2.14)(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 '@radix-ui/react-use-callback-ref': 1.0.1(@types/react@19.2.14)(react@19.2.4) react: 19.2.4 optionalDependencies: @@ -12282,12 +12249,12 @@ snapshots: '@radix-ui/react-use-layout-effect@1.0.0(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 react: 19.2.4 '@radix-ui/react-use-layout-effect@1.0.1(@types/react@19.2.14)(react@19.2.4)': dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 react: 19.2.4 optionalDependencies: '@types/react': 19.2.14 @@ -12389,7 +12356,7 @@ snapshots: '@opentelemetry/api-logs': 0.203.0 '@opentelemetry/auto-instrumentations-node': 0.62.2(@opentelemetry/api@1.9.0)(@opentelemetry/core@2.6.0(@opentelemetry/api@1.9.0)) '@opentelemetry/exporter-metrics-otlp-http': 0.203.0(@opentelemetry/api@1.9.0) - '@opentelemetry/resources': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-metrics': 2.2.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-node': 0.203.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-node': 2.6.0(@opentelemetry/api@1.9.0) @@ -12417,7 +12384,7 @@ snapshots: js-xxhash: 4.0.0 json-custom-numbers: 3.1.1 kasi: 1.1.2 - nanoid: 5.1.6 + nanoid: 5.1.7 parse-prometheus-text-format: 1.1.1 pg-format: pg-format-fix@1.0.5 postgres: 3.4.7 @@ -12756,10 +12723,10 @@ snapshots: '@tabler/icons-react@3.37.1(react@19.2.4)': dependencies: - '@tabler/icons': 3.37.1 + '@tabler/icons': 3.41.1 react: 19.2.4 - '@tabler/icons@3.37.1': {} + '@tabler/icons@3.41.1': {} '@tailwindcss/node@4.2.1': dependencies: @@ -15875,8 +15842,6 @@ snapshots: nanoid@3.3.11: {} - nanoid@5.1.6: {} - nanoid@5.1.7: {} napi-build-utils@2.0.0: {} @@ -16256,7 +16221,7 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/api-logs': 0.208.0 '@opentelemetry/exporter-logs-otlp-http': 0.208.0(@opentelemetry/api@1.9.0) - '@opentelemetry/resources': 2.5.1(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-logs': 0.208.0(@opentelemetry/api@1.9.0) '@posthog/core': 1.23.1 '@posthog/types': 1.352.1 @@ -16323,7 +16288,7 @@ snapshots: property-information@7.1.0: {} - protobufjs@7.5.4: + protobufjs@7.5.5: dependencies: '@protobufjs/aspromise': 1.1.2 '@protobufjs/base64': 1.1.2 @@ -16560,7 +16525,7 @@ snapshots: react-syntax-highlighter@15.6.6(react@19.2.4): dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 highlight.js: 10.7.3 highlightjs-vue: 1.0.0 lowlight: 1.20.0 @@ -16645,7 +16610,7 @@ snapshots: redux@4.2.1: dependencies: - '@babel/runtime': 7.28.6 + '@babel/runtime': 7.29.2 reflect.getprototypeof@1.0.10: dependencies: From 940889c291fd8ca6fa445efa5bf73fdde1949d0c Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 08:42:38 +0200 Subject: [PATCH 042/113] fix open redirect, error leaking, unused imports, state validation --- .../app/agents/new_chat/tools/discord/_auth.py | 4 ---- .../app/agents/new_chat/tools/luma/_auth.py | 4 ---- .../app/agents/new_chat/tools/teams/_auth.py | 6 ------ surfsense_backend/app/routes/__init__.py | 2 +- surfsense_backend/app/routes/mcp_oauth_route.py | 13 ++++++++----- .../app/routes/oauth_connector_base.py | 8 ++++---- 6 files changed, 13 insertions(+), 24 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/tools/discord/_auth.py b/surfsense_backend/app/agents/new_chat/tools/discord/_auth.py index b369c10f1..1f51e3660 100644 --- a/surfsense_backend/app/agents/new_chat/tools/discord/_auth.py +++ b/surfsense_backend/app/agents/new_chat/tools/discord/_auth.py @@ -1,7 +1,5 @@ """Shared auth helper for Discord agent tools (REST API, not gateway bot).""" -import logging - from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select @@ -9,8 +7,6 @@ from app.config import config from app.db import SearchSourceConnector, SearchSourceConnectorType from app.utils.oauth_security import TokenEncryption -logger = logging.getLogger(__name__) - DISCORD_API = "https://discord.com/api/v10" diff --git a/surfsense_backend/app/agents/new_chat/tools/luma/_auth.py b/surfsense_backend/app/agents/new_chat/tools/luma/_auth.py index ef2fa8540..1d88161d6 100644 --- a/surfsense_backend/app/agents/new_chat/tools/luma/_auth.py +++ b/surfsense_backend/app/agents/new_chat/tools/luma/_auth.py @@ -1,14 +1,10 @@ """Shared auth helper for Luma agent tools.""" -import logging - from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select from app.db import SearchSourceConnector, SearchSourceConnectorType -logger = logging.getLogger(__name__) - LUMA_API = "https://public-api.luma.com/v1" diff --git a/surfsense_backend/app/agents/new_chat/tools/teams/_auth.py b/surfsense_backend/app/agents/new_chat/tools/teams/_auth.py index 989fce7c6..f24f5502e 100644 --- a/surfsense_backend/app/agents/new_chat/tools/teams/_auth.py +++ b/surfsense_backend/app/agents/new_chat/tools/teams/_auth.py @@ -1,15 +1,9 @@ """Shared auth helper for Teams agent tools (Microsoft Graph REST API).""" -import logging - from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select -from app.config import config from app.db import SearchSourceConnector, SearchSourceConnectorType -from app.utils.oauth_security import TokenEncryption - -logger = logging.getLogger(__name__) GRAPH_API = "https://graph.microsoft.com/v1.0" diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py index 925c207a6..40ca7a7e8 100644 --- a/surfsense_backend/app/routes/__init__.py +++ b/surfsense_backend/app/routes/__init__.py @@ -96,7 +96,7 @@ router.include_router(logs_router) router.include_router(circleback_webhook_router) # Circleback meeting webhooks router.include_router(surfsense_docs_router) # Surfsense documentation for citations router.include_router(notifications_router) # Notifications with Zero sync -router.include_router(mcp_oauth_router) # MCP OAuth 2.1 for Linear, Jira, ClickUp +router.include_router(mcp_oauth_router) # MCP OAuth 2.1 for Linear, Jira, ClickUp, Slack, Airtable router.include_router(composio_router) # Composio OAuth and toolkit management router.include_router(public_chat_router) # Public chat sharing and cloning router.include_router(incentive_tasks_router) # Incentive tasks for earning free pages diff --git a/surfsense_backend/app/routes/mcp_oauth_route.py b/surfsense_backend/app/routes/mcp_oauth_route.py index e47dc0a62..0870d52fe 100644 --- a/surfsense_backend/app/routes/mcp_oauth_route.py +++ b/surfsense_backend/app/routes/mcp_oauth_route.py @@ -182,7 +182,7 @@ async def connect_mcp_service( except Exception as e: logger.error("Failed to initiate %s MCP OAuth: %s", service, e, exc_info=True) raise HTTPException( - status_code=500, detail=f"Failed to initiate {service} MCP OAuth: {e!s}", + status_code=500, detail=f"Failed to initiate {service} MCP OAuth.", ) from e @@ -221,6 +221,9 @@ async def mcp_oauth_callback( space_id = data["space_id"] svc_key = data.get("service", service) + if svc_key != service: + raise HTTPException(status_code=400, detail="State/path service mismatch") + from app.services.mcp_oauth.registry import get_service svc = get_service(svc_key) @@ -315,7 +318,7 @@ async def mcp_oauth_callback( svc.name, db_connector.id, user_id, ) reauth_return_url = data.get("return_url") - if reauth_return_url and reauth_return_url.startswith("/"): + if reauth_return_url and reauth_return_url.startswith("/") and not reauth_return_url.startswith("//"): return RedirectResponse( url=f"{config.NEXT_FRONTEND_URL}{reauth_return_url}" ) @@ -347,7 +350,7 @@ async def mcp_oauth_callback( except IntegrityError as e: await session.rollback() raise HTTPException( - status_code=409, detail=f"Database integrity error: {e!s}", + status_code=409, detail="A connector for this service already exists.", ) from e _invalidate_cache(space_id) @@ -368,7 +371,7 @@ async def mcp_oauth_callback( ) raise HTTPException( status_code=500, - detail=f"Failed to complete {service} MCP OAuth: {e!s}", + detail=f"Failed to complete {service} MCP OAuth.", ) from e @@ -495,7 +498,7 @@ async def reauth_mcp_service( ) raise HTTPException( status_code=500, - detail=f"Failed to initiate {service} MCP re-auth: {e!s}", + detail=f"Failed to initiate {service} MCP re-auth.", ) from e diff --git a/surfsense_backend/app/routes/oauth_connector_base.py b/surfsense_backend/app/routes/oauth_connector_base.py index 0483d2540..0638e8f34 100644 --- a/surfsense_backend/app/routes/oauth_connector_base.py +++ b/surfsense_backend/app/routes/oauth_connector_base.py @@ -430,7 +430,7 @@ class OAuthConnectorRoute: state_mgr = oauth._get_state_manager() extra: dict[str, Any] = {"connector_id": connector_id} - if return_url and return_url.startswith("/"): + if return_url and return_url.startswith("/") and not return_url.startswith("//"): extra["return_url"] = return_url auth_params: dict[str, str] = { @@ -498,7 +498,7 @@ class OAuthConnectorRoute: data = state_mgr.validate_state(state) except Exception as e: raise HTTPException( - status_code=400, detail=f"Invalid state parameter: {e!s}" + status_code=400, detail="Invalid or expired state parameter." ) from e user_id = UUID(data["user_id"]) @@ -552,7 +552,7 @@ class OAuthConnectorRoute: db_connector.id, user_id, ) - if reauth_return_url and reauth_return_url.startswith("/"): + if reauth_return_url and reauth_return_url.startswith("/") and not reauth_return_url.startswith("//"): return RedirectResponse( url=f"{config.NEXT_FRONTEND_URL}{reauth_return_url}" ) @@ -603,7 +603,7 @@ class OAuthConnectorRoute: except IntegrityError as e: await session.rollback() raise HTTPException( - status_code=409, detail=f"Database integrity error: {e!s}" + status_code=409, detail="A connector for this service already exists." ) from e logger.info( From ea3508cb25db5369dc01c5443fc318830089673f Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 08:57:28 +0200 Subject: [PATCH 043/113] use native connector types for MCP OAuth, restore original UI --- .../app/agents/new_chat/tools/mcp_tool.py | 5 +- .../app/routes/mcp_oauth_route.py | 42 ++++++++--------- .../app/services/mcp_oauth/registry.py | 6 +++ .../constants/connector-constants.ts | 47 ++----------------- .../tabs/all-connectors-tab.tsx | 23 +-------- 5 files changed, 34 insertions(+), 89 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py b/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py index cf3e51166..47ee16f7d 100644 --- a/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py +++ b/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py @@ -530,11 +530,12 @@ async def load_mcp_tools( return list(cached_tools) try: + # Find all connectors with MCP server config: generic MCP_CONNECTOR type + # and service-specific types (LINEAR_CONNECTOR, etc.) created via MCP OAuth. result = await session.execute( select(SearchSourceConnector).filter( - SearchSourceConnector.connector_type - == SearchSourceConnectorType.MCP_CONNECTOR, SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.config.has_key("server_config"), # noqa: W601 ), ) diff --git a/surfsense_backend/app/routes/mcp_oauth_route.py b/surfsense_backend/app/routes/mcp_oauth_route.py index 0870d52fe..f7164eab3 100644 --- a/surfsense_backend/app/routes/mcp_oauth_route.py +++ b/surfsense_backend/app/routes/mcp_oauth_route.py @@ -56,9 +56,7 @@ def _get_token_encryption() -> TokenEncryption: def _build_redirect_uri(service: str) -> str: - base = config.BACKEND_URL - if not base: - raise HTTPException(status_code=500, detail="BACKEND_URL not configured.") + base = config.BACKEND_URL or "http://localhost:8000" return f"{base.rstrip('/')}/api/v1/auth/mcp/{service}/connector/callback" @@ -288,6 +286,7 @@ async def mcp_oauth_callback( } # ---- Re-auth path ---- + db_connector_type = SearchSourceConnectorType(svc.connector_type) reauth_connector_id = data.get("connector_id") if reauth_connector_id: result = await session.execute( @@ -295,8 +294,7 @@ async def mcp_oauth_callback( SearchSourceConnector.id == reauth_connector_id, SearchSourceConnector.user_id == user_id, SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.connector_type - == SearchSourceConnectorType.MCP_CONNECTOR, + SearchSourceConnector.connector_type == db_connector_type, ) ) db_connector = result.scalars().first() @@ -329,15 +327,15 @@ async def mcp_oauth_callback( # ---- New connector path ---- connector_name = await generate_unique_connector_name( session, - SearchSourceConnectorType.MCP_CONNECTOR, + db_connector_type, space_id, user_id, - f"{svc.name} MCP", + svc.name, ) new_connector = SearchSourceConnector( name=connector_name, - connector_type=SearchSourceConnectorType.MCP_CONNECTOR, + connector_type=db_connector_type, is_indexable=False, config=connector_config, search_space_id=space_id, @@ -388,26 +386,26 @@ async def reauth_mcp_service( user: User = Depends(current_active_user), session: AsyncSession = Depends(get_async_session), ): - result = await session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == connector_id, - SearchSourceConnector.user_id == user.id, - SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.connector_type - == SearchSourceConnectorType.MCP_CONNECTOR, - ) - ) - if not result.scalars().first(): - raise HTTPException( - status_code=404, detail="MCP connector not found or access denied", - ) - from app.services.mcp_oauth.registry import get_service svc = get_service(service) if not svc: raise HTTPException(status_code=404, detail=f"Unknown MCP service: {service}") + db_connector_type = SearchSourceConnectorType(svc.connector_type) + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == connector_id, + SearchSourceConnector.user_id == user.id, + SearchSourceConnector.search_space_id == space_id, + SearchSourceConnector.connector_type == db_connector_type, + ) + ) + if not result.scalars().first(): + raise HTTPException( + status_code=404, detail="Connector not found or access denied", + ) + try: from app.services.mcp_oauth.discovery import ( discover_oauth_metadata, diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index 3f9a03fbc..e6a9d20a5 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -15,6 +15,7 @@ from dataclasses import dataclass, field class MCPServiceConfig: name: str mcp_url: str + connector_type: str supports_dcr: bool = True oauth_discovery_origin: str | None = None client_id_env: str | None = None @@ -26,18 +27,22 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { "linear": MCPServiceConfig( name="Linear", mcp_url="https://mcp.linear.app/mcp", + connector_type="LINEAR_CONNECTOR", ), "jira": MCPServiceConfig( name="Jira", mcp_url="https://mcp.atlassian.com/v1/mcp", + connector_type="JIRA_CONNECTOR", ), "clickup": MCPServiceConfig( name="ClickUp", mcp_url="https://mcp.clickup.com/mcp", + connector_type="CLICKUP_CONNECTOR", ), "slack": MCPServiceConfig( name="Slack", mcp_url="https://mcp.slack.com/mcp", + connector_type="SLACK_CONNECTOR", supports_dcr=False, client_id_env="SLACK_CLIENT_ID", client_secret_env="SLACK_CLIENT_SECRET", @@ -45,6 +50,7 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { "airtable": MCPServiceConfig( name="Airtable", mcp_url="https://mcp.airtable.com/mcp", + connector_type="AIRTABLE_CONNECTOR", oauth_discovery_origin="https://airtable.com", ), } diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index 39e827d1a..08ffde9ae 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -31,7 +31,7 @@ export const OAUTH_CONNECTORS = [ title: "Airtable", description: "Search your Airtable bases", connectorType: EnumConnectorName.AIRTABLE_CONNECTOR, - authEndpoint: "/api/v1/auth/airtable/connector/add/", + authEndpoint: "/api/v1/auth/mcp/airtable/connector/add/", }, { id: "notion-connector", @@ -45,14 +45,14 @@ export const OAUTH_CONNECTORS = [ title: "Linear", description: "Search issues & projects", connectorType: EnumConnectorName.LINEAR_CONNECTOR, - authEndpoint: "/api/v1/auth/linear/connector/add/", + authEndpoint: "/api/v1/auth/mcp/linear/connector/add/", }, { id: "slack-connector", title: "Slack", description: "Search Slack messages", connectorType: EnumConnectorName.SLACK_CONNECTOR, - authEndpoint: "/api/v1/auth/slack/connector/add/", + authEndpoint: "/api/v1/auth/mcp/slack/connector/add/", }, { id: "teams-connector", @@ -87,7 +87,7 @@ export const OAUTH_CONNECTORS = [ title: "Jira", description: "Search Jira issues", connectorType: EnumConnectorName.JIRA_CONNECTOR, - authEndpoint: "/api/v1/auth/jira/connector/add/", + authEndpoint: "/api/v1/auth/mcp/jira/connector/add/", }, { id: "confluence-connector", @@ -101,47 +101,8 @@ export const OAUTH_CONNECTORS = [ title: "ClickUp", description: "Search ClickUp tasks", connectorType: EnumConnectorName.CLICKUP_CONNECTOR, - authEndpoint: "/api/v1/auth/clickup/connector/add/", - }, -] as const; - -// MCP OAuth Connectors (one-click connect via official MCP servers) -export const MCP_OAUTH_CONNECTORS = [ - { - id: "linear-mcp-connector", - title: "Linear (MCP)", - description: "Interact with Linear issues via MCP", - connectorType: EnumConnectorName.MCP_CONNECTOR, - authEndpoint: "/api/v1/auth/mcp/linear/connector/add/", - }, - { - id: "jira-mcp-connector", - title: "Jira (MCP)", - description: "Interact with Jira issues via MCP", - connectorType: EnumConnectorName.MCP_CONNECTOR, - authEndpoint: "/api/v1/auth/mcp/jira/connector/add/", - }, - { - id: "clickup-mcp-connector", - title: "ClickUp (MCP)", - description: "Interact with ClickUp tasks via MCP", - connectorType: EnumConnectorName.MCP_CONNECTOR, authEndpoint: "/api/v1/auth/mcp/clickup/connector/add/", }, - { - id: "slack-mcp-connector", - title: "Slack (MCP)", - description: "Interact with Slack channels via MCP", - connectorType: EnumConnectorName.MCP_CONNECTOR, - authEndpoint: "/api/v1/auth/mcp/slack/connector/add/", - }, - { - id: "airtable-mcp-connector", - title: "Airtable (MCP)", - description: "Interact with Airtable bases via MCP", - connectorType: EnumConnectorName.MCP_CONNECTOR, - authEndpoint: "/api/v1/auth/mcp/airtable/connector/add/", - }, ] as const; // Content Sources (tools that extract and import content from external sources) diff --git a/surfsense_web/components/assistant-ui/connector-popup/tabs/all-connectors-tab.tsx b/surfsense_web/components/assistant-ui/connector-popup/tabs/all-connectors-tab.tsx index d4f5e2fc1..814959ec4 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/tabs/all-connectors-tab.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/tabs/all-connectors-tab.tsx @@ -10,14 +10,12 @@ import { ConnectorCard } from "../components/connector-card"; import { COMPOSIO_CONNECTORS, CRAWLERS, - MCP_OAUTH_CONNECTORS, OAUTH_CONNECTORS, OTHER_CONNECTORS, } from "../constants/connector-constants"; import { getDocumentCountForConnector } from "../utils/connector-document-mapping"; type OAuthConnector = (typeof OAUTH_CONNECTORS)[number]; -type MCPOAuthConnector = (typeof MCP_OAUTH_CONNECTORS)[number]; type ComposioConnector = (typeof COMPOSIO_CONNECTORS)[number]; type OtherConnector = (typeof OTHER_CONNECTORS)[number]; type CrawlerConnector = (typeof CRAWLERS)[number]; @@ -130,10 +128,6 @@ export const AllConnectorsTab: FC = ({ (c) => c.connectorType === EnumConnectorName.AIRTABLE_CONNECTOR ); - const filteredMCPOAuth = MCP_OAUTH_CONNECTORS.filter( - (c) => matchesSearch(c.title, c.description), - ); - const moreIntegrationsComposio = filteredComposio.filter( (c) => !DOCUMENT_FILE_CONNECTOR_TYPES.has(c.connectorType) && @@ -285,7 +279,6 @@ export const AllConnectorsTab: FC = ({ nativeGoogleDriveConnectors.length > 0 || composioGoogleDriveConnectors.length > 0 || fileStorageConnectors.length > 0; - const hasMCPOAuth = filteredMCPOAuth.length > 0; const hasMoreIntegrations = otherDocumentYouTubeConnectors.length > 0 || otherDocumentNotionConnectors.length > 0 || @@ -295,7 +288,7 @@ export const AllConnectorsTab: FC = ({ moreIntegrationsOther.length > 0 || moreIntegrationsCrawlers.length > 0; - const hasAnyResults = hasDocumentFileConnectors || hasMCPOAuth || hasMoreIntegrations; + const hasAnyResults = hasDocumentFileConnectors || hasMoreIntegrations; if (!hasAnyResults && searchQuery) { return ( @@ -325,20 +318,6 @@ export const AllConnectorsTab: FC = ({
)} - {/* Live MCP Integrations */} - {hasMCPOAuth && ( -
-
-

- Live MCP Integrations -

-
-
- {filteredMCPOAuth.map((connector) => renderOAuthCard(connector as OAuthConnector | ComposioConnector))} -
-
- )} - {/* More Integrations */} {hasMoreIntegrations && (
From c277b6c1219bd4794d7f89da72034e0161e2326e Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 09:01:35 +0200 Subject: [PATCH 044/113] skip indexing config dialog for non-indexable connectors --- .../assistant-ui/connector-popup/hooks/use-connector-dialog.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts index caa85ba2d..4a07693ce 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts @@ -314,6 +314,9 @@ export const useConnectorDialog = () => { oauthConnector.title, oauthConnector.connectorType ); + } else if (!newConnector.is_indexable) { + toast.success(`${oauthConnector.title} connected successfully!`); + await refetchAllConnectors(); } else { toast.dismiss("auto-index"); const config = validateIndexingConfigState({ From 2f4052aa71cfea2ea1d77ba9815eca4634b491ca Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 09:15:06 +0200 Subject: [PATCH 045/113] use pre-configured credentials for Airtable MCP OAuth --- surfsense_backend/app/services/mcp_oauth/registry.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index e6a9d20a5..769f2c88a 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -52,6 +52,9 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { mcp_url="https://mcp.airtable.com/mcp", connector_type="AIRTABLE_CONNECTOR", oauth_discovery_origin="https://airtable.com", + supports_dcr=False, + client_id_env="AIRTABLE_CLIENT_ID", + client_secret_env="AIRTABLE_CLIENT_SECRET", ), } From 0cc2475f6b766f990ff49cca1903c3305c035543 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 09:21:19 +0200 Subject: [PATCH 046/113] add required OAuth scopes for Airtable MCP --- surfsense_backend/app/services/mcp_oauth/registry.py | 1 + 1 file changed, 1 insertion(+) diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index 769f2c88a..173fcf49d 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -55,6 +55,7 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { supports_dcr=False, client_id_env="AIRTABLE_CLIENT_ID", client_secret_env="AIRTABLE_CLIENT_SECRET", + scopes=["data.records:read", "data.records:write", "schema.bases:read", "schema.bases:write"], ), } From 225236e6f1d4a5de2a11280321cb213d4d22471b Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 09:35:15 +0200 Subject: [PATCH 047/113] add required OAuth scopes for Slack MCP --- surfsense_backend/app/services/mcp_oauth/registry.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index 173fcf49d..ea7832f70 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -46,6 +46,14 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { supports_dcr=False, client_id_env="SLACK_CLIENT_ID", client_secret_env="SLACK_CLIENT_SECRET", + scopes=[ + "search:read.public", "search:read.private", "search:read.mpim", + "search:read.im", "search:read.files", "search:read.users", + "chat:write", "channels:history", "groups:history", + "mpim:history", "im:history", + "canvases:read", "canvases:write", + "users:read", "users:read.email", + ], ), "airtable": MCPServiceConfig( name="Airtable", From 3638d72b298e2cebab7cce4d46f80b7bce787d08 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 09:41:19 +0200 Subject: [PATCH 048/113] restore full Slack MCP scopes for all MCP tools --- surfsense_backend/app/services/mcp_oauth/registry.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index ea7832f70..4d87ceb40 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -49,8 +49,8 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { scopes=[ "search:read.public", "search:read.private", "search:read.mpim", "search:read.im", "search:read.files", "search:read.users", - "chat:write", "channels:history", "groups:history", - "mpim:history", "im:history", + "chat:write", + "channels:history", "groups:history", "mpim:history", "im:history", "canvases:read", "canvases:write", "users:read", "users:read.email", ], From 820326e3ee53386cc5c6605e00d4602cb57c7b16 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 09:54:16 +0200 Subject: [PATCH 049/113] use user_scope param for Slack OAuth --- surfsense_backend/app/routes/mcp_oauth_route.py | 4 ++-- surfsense_backend/app/services/mcp_oauth/registry.py | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/surfsense_backend/app/routes/mcp_oauth_route.py b/surfsense_backend/app/routes/mcp_oauth_route.py index f7164eab3..98ca2be0f 100644 --- a/surfsense_backend/app/routes/mcp_oauth_route.py +++ b/surfsense_backend/app/routes/mcp_oauth_route.py @@ -165,7 +165,7 @@ async def connect_mcp_service( "state": state, } if svc.scopes: - auth_params["scope"] = " ".join(svc.scopes) + auth_params[svc.scope_param] = " ".join(svc.scopes) auth_url = f"{auth_endpoint}?{urlencode(auth_params)}" @@ -478,7 +478,7 @@ async def reauth_mcp_service( "state": state, } if svc.scopes: - auth_params["scope"] = " ".join(svc.scopes) + auth_params[svc.scope_param] = " ".join(svc.scopes) auth_url = f"{auth_endpoint}?{urlencode(auth_params)}" diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index 4d87ceb40..62eb2077f 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -21,6 +21,7 @@ class MCPServiceConfig: client_id_env: str | None = None client_secret_env: str | None = None scopes: list[str] = field(default_factory=list) + scope_param: str = "scope" MCP_SERVICES: dict[str, MCPServiceConfig] = { @@ -46,6 +47,7 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { supports_dcr=False, client_id_env="SLACK_CLIENT_ID", client_secret_env="SLACK_CLIENT_SECRET", + scope_param="user_scope", scopes=[ "search:read.public", "search:read.private", "search:read.mpim", "search:read.im", "search:read.files", "search:read.users", From 95de0547f4157fe88b7add1b406d63e74f417508 Mon Sep 17 00:00:00 2001 From: "DESKTOP-RTLN3BA\\$punk" Date: Wed, 22 Apr 2026 00:56:05 -0700 Subject: [PATCH 050/113] feat: add default API base URLs for LiteLLM providers and enhance fallback handling in LLMRouterService --- .../app/services/llm_router_service.py | 74 +++++++++++++++++-- 1 file changed, 68 insertions(+), 6 deletions(-) diff --git a/surfsense_backend/app/services/llm_router_service.py b/surfsense_backend/app/services/llm_router_service.py index 35dfdd44e..c9eeff01b 100644 --- a/surfsense_backend/app/services/llm_router_service.py +++ b/surfsense_backend/app/services/llm_router_service.py @@ -133,6 +133,44 @@ PROVIDER_MAP = { } +# Default ``api_base`` per LiteLLM provider prefix. Used as a safety net when +# a global LLM config does *not* specify ``api_base``: without this, LiteLLM +# happily picks up provider-agnostic env vars (e.g. ``AZURE_API_BASE``, +# ``OPENAI_API_BASE``) and routes, say, an ``openrouter/anthropic/claude-3-haiku`` +# request to an Azure endpoint, which then 404s with ``Resource not found``. +# Only providers with a well-known, stable public base URL are listed here — +# self-hosted / BYO-endpoint providers (ollama, custom, bedrock, vertex_ai, +# huggingface, databricks, cloudflare, replicate) are intentionally omitted +# so their existing config-driven behaviour is preserved. +PROVIDER_DEFAULT_API_BASE = { + "openrouter": "https://openrouter.ai/api/v1", + "groq": "https://api.groq.com/openai/v1", + "mistral": "https://api.mistral.ai/v1", + "perplexity": "https://api.perplexity.ai", + "xai": "https://api.x.ai/v1", + "cerebras": "https://api.cerebras.ai/v1", + "deepinfra": "https://api.deepinfra.com/v1/openai", + "fireworks_ai": "https://api.fireworks.ai/inference/v1", + "together_ai": "https://api.together.xyz/v1", + "anyscale": "https://api.endpoints.anyscale.com/v1", + "cometapi": "https://api.cometapi.com/v1", + "sambanova": "https://api.sambanova.ai/v1", +} + + +# Canonical provider → base URL when a config uses a generic ``openai``-style +# prefix but the ``provider`` field tells us which API it really is +# (e.g. DeepSeek/Alibaba/Moonshot/Zhipu/MiniMax all use ``openai`` compat but +# each has its own base URL). +PROVIDER_KEY_DEFAULT_API_BASE = { + "DEEPSEEK": "https://api.deepseek.com/v1", + "ALIBABA_QWEN": "https://dashscope-intl.aliyuncs.com/compatible-mode/v1", + "MOONSHOT": "https://api.moonshot.ai/v1", + "ZHIPU": "https://open.bigmodel.cn/api/paas/v4", + "MINIMAX": "https://api.minimax.io/v1", +} + + class LLMRouterService: """ Singleton service for managing LiteLLM Router. @@ -224,6 +262,16 @@ class LLMRouterService: # hits ContextWindowExceededError. full_model_list, ctx_fallbacks = cls._build_context_fallback_groups(model_list) + # Build a general-purpose fallback list so NotFound/timeout/rate-limit + # style failures on one deployment don't bubble up as hard errors — + # the router retries with a sibling deployment in ``auto-large``. + # ``auto-large`` is the large-context subset of ``auto``; if it is + # empty we fall back to ``auto`` itself so the router at least picks a + # different deployment in the same group. + fallbacks: list[dict[str, list[str]]] | None = None + if ctx_fallbacks: + fallbacks = [{"auto": ["auto-large"]}] + try: router_kwargs: dict[str, Any] = { "model_list": full_model_list, @@ -237,15 +285,18 @@ class LLMRouterService: } if ctx_fallbacks: router_kwargs["context_window_fallbacks"] = ctx_fallbacks + if fallbacks: + router_kwargs["fallbacks"] = fallbacks instance._router = Router(**router_kwargs) instance._initialized = True logger.info( "LLM Router initialized with %d deployments, " - "strategy: %s, context_window_fallbacks: %s", + "strategy: %s, context_window_fallbacks: %s, fallbacks: %s", len(model_list), final_settings.get("routing_strategy"), ctx_fallbacks or "none", + fallbacks or "none", ) except Exception as e: logger.error(f"Failed to initialize LLM Router: {e}") @@ -348,10 +399,11 @@ class LLMRouterService: return None # Build model string + provider = config.get("provider", "").upper() if config.get("custom_provider"): - model_string = f"{config['custom_provider']}/{config['model_name']}" + provider_prefix = config["custom_provider"] + model_string = f"{provider_prefix}/{config['model_name']}" else: - provider = config.get("provider", "").upper() provider_prefix = PROVIDER_MAP.get(provider, provider.lower()) model_string = f"{provider_prefix}/{config['model_name']}" @@ -361,9 +413,19 @@ class LLMRouterService: "api_key": config.get("api_key"), } - # Add optional api_base - if config.get("api_base"): - litellm_params["api_base"] = config["api_base"] + # Resolve ``api_base``. Config value wins; otherwise apply a + # provider-aware default so the deployment does not silently + # inherit unrelated env vars (e.g. ``AZURE_API_BASE``) and route + # requests to the wrong endpoint. See ``PROVIDER_DEFAULT_API_BASE`` + # docstring for the motivating bug (OpenRouter models 404-ing + # against an Azure endpoint). + api_base = config.get("api_base") + if not api_base: + api_base = PROVIDER_KEY_DEFAULT_API_BASE.get(provider) + if not api_base: + api_base = PROVIDER_DEFAULT_API_BASE.get(provider_prefix) + if api_base: + litellm_params["api_base"] = api_base # Add any additional litellm parameters if config.get("litellm_params"): From 970f62278b3677541526e6eeba78bf27bb15cbe0 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 09:56:18 +0200 Subject: [PATCH 051/113] revert scope_param, use standard scope for Slack v2_user endpoint --- surfsense_backend/app/routes/mcp_oauth_route.py | 4 ++-- surfsense_backend/app/services/mcp_oauth/registry.py | 2 -- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/surfsense_backend/app/routes/mcp_oauth_route.py b/surfsense_backend/app/routes/mcp_oauth_route.py index 98ca2be0f..f7164eab3 100644 --- a/surfsense_backend/app/routes/mcp_oauth_route.py +++ b/surfsense_backend/app/routes/mcp_oauth_route.py @@ -165,7 +165,7 @@ async def connect_mcp_service( "state": state, } if svc.scopes: - auth_params[svc.scope_param] = " ".join(svc.scopes) + auth_params["scope"] = " ".join(svc.scopes) auth_url = f"{auth_endpoint}?{urlencode(auth_params)}" @@ -478,7 +478,7 @@ async def reauth_mcp_service( "state": state, } if svc.scopes: - auth_params[svc.scope_param] = " ".join(svc.scopes) + auth_params["scope"] = " ".join(svc.scopes) auth_url = f"{auth_endpoint}?{urlencode(auth_params)}" diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index 62eb2077f..4d87ceb40 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -21,7 +21,6 @@ class MCPServiceConfig: client_id_env: str | None = None client_secret_env: str | None = None scopes: list[str] = field(default_factory=list) - scope_param: str = "scope" MCP_SERVICES: dict[str, MCPServiceConfig] = { @@ -47,7 +46,6 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { supports_dcr=False, client_id_env="SLACK_CLIENT_ID", client_secret_env="SLACK_CLIENT_SECRET", - scope_param="user_scope", scopes=[ "search:read.public", "search:read.private", "search:read.mpim", "search:read.im", "search:read.files", "search:read.users", From 80d3f624d4a42ed2e75d9eccfde64bdba1d0f4a0 Mon Sep 17 00:00:00 2001 From: "DESKTOP-RTLN3BA\\$punk" Date: Wed, 22 Apr 2026 01:05:31 -0700 Subject: [PATCH 052/113] chore: linting --- .../new_chat/middleware/safe_summarization.py | 10 +- .../tests/unit/test_error_contract.py | 4 +- .../components/DesktopContent.tsx | 7 +- .../components/PurchaseHistoryContent.tsx | 4 +- .../constants/connector-constants.ts | 80 +++--- .../hooks/use-connector-dialog.ts | 12 +- .../components/free-chat/free-chat-page.tsx | 3 +- .../homepage/features-bento-grid.tsx | 262 ++++++++++++++++-- .../components/sources/DocumentUploadTab.tsx | 55 ++-- 9 files changed, 310 insertions(+), 127 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/middleware/safe_summarization.py b/surfsense_backend/app/agents/new_chat/middleware/safe_summarization.py index 8248f5c8c..4ddcf334f 100644 --- a/surfsense_backend/app/agents/new_chat/middleware/safe_summarization.py +++ b/surfsense_backend/app/agents/new_chat/middleware/safe_summarization.py @@ -46,7 +46,7 @@ if TYPE_CHECKING: logger = logging.getLogger(__name__) -def _sanitize_message_content(msg: "AnyMessage") -> "AnyMessage": +def _sanitize_message_content(msg: AnyMessage) -> AnyMessage: """Return ``msg`` with ``content`` coerced to a non-``None`` value. ``get_buffer_string`` reads ``m.text`` which iterates ``self.content``; @@ -90,16 +90,14 @@ class SafeSummarizationMiddleware(SummarizationMiddleware): implementations from upstream. """ - def _filter_summary_messages( - self, messages: "list[AnyMessage]" - ) -> "list[AnyMessage]": + def _filter_summary_messages(self, messages: list[AnyMessage]) -> list[AnyMessage]: filtered = super()._filter_summary_messages(messages) return [_sanitize_message_content(m) for m in filtered] def create_safe_summarization_middleware( - model: "BaseChatModel", - backend: "BACKEND_TYPES", + model: BaseChatModel, + backend: BACKEND_TYPES, ) -> SafeSummarizationMiddleware: """Drop-in replacement for ``create_summarization_middleware``. diff --git a/surfsense_backend/tests/unit/test_error_contract.py b/surfsense_backend/tests/unit/test_error_contract.py index 81ec08b2d..ec8021290 100644 --- a/surfsense_backend/tests/unit/test_error_contract.py +++ b/surfsense_backend/tests/unit/test_error_contract.py @@ -202,9 +202,7 @@ class TestHTTPExceptionHandler: # Intentional 503s (e.g. feature flag off) must surface the developer # message so the frontend can render actionable copy. body = _assert_envelope(client.get("/http-503"), 503) - assert ( - body["error"]["message"] == "Page purchases are temporarily unavailable." - ) + assert body["error"]["message"] == "Page purchases are temporarily unavailable." assert body["error"]["message"] != GENERIC_5XX_MESSAGE def test_502_preserves_detail(self, client): diff --git a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopContent.tsx b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopContent.tsx index 3175268d2..63ca9f5df 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopContent.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopContent.tsx @@ -200,8 +200,8 @@ export function DesktopContent() { Launch on Startup - Automatically start SurfSense when you sign in to your computer so global - shortcuts and folder sync are always available. + Automatically start SurfSense when you sign in to your computer so global shortcuts and + folder sync are always available. @@ -232,8 +232,7 @@ export function DesktopContent() { Start minimized to tray

- Skip the main window on boot — SurfSense lives in the system tray until you need - it. + Skip the main window on boot — SurfSense lives in the system tray until you need it.

new Date(b.created_at).getTime() - new Date(a.created_at).getTime() - ); + ].sort((a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime()); }, [pagesQuery.data, tokensQuery.data]); if (isLoading) { diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index d430e0f6c..6f60c63d6 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -349,12 +349,7 @@ export const AUTO_INDEX_CONNECTOR_TYPES = new Set(Object.keys(AUTO_INDEX // `lib/posthog/events.ts` or per-connector tracking code. // ============================================================================ -export type ConnectorTelemetryGroup = - | "oauth" - | "composio" - | "crawler" - | "other" - | "unknown"; +export type ConnectorTelemetryGroup = "oauth" | "composio" | "crawler" | "other" | "unknown"; export interface ConnectorTelemetryMeta { connector_type: string; @@ -363,45 +358,44 @@ export interface ConnectorTelemetryMeta { is_oauth: boolean; } -const CONNECTOR_TELEMETRY_REGISTRY: ReadonlyMap = - (() => { - const map = new Map(); +const CONNECTOR_TELEMETRY_REGISTRY: ReadonlyMap = (() => { + const map = new Map(); - for (const c of OAUTH_CONNECTORS) { - map.set(c.connectorType, { - connector_type: c.connectorType, - connector_title: c.title, - connector_group: "oauth", - is_oauth: true, - }); - } - for (const c of COMPOSIO_CONNECTORS) { - map.set(c.connectorType, { - connector_type: c.connectorType, - connector_title: c.title, - connector_group: "composio", - is_oauth: true, - }); - } - for (const c of CRAWLERS) { - map.set(c.connectorType, { - connector_type: c.connectorType, - connector_title: c.title, - connector_group: "crawler", - is_oauth: false, - }); - } - for (const c of OTHER_CONNECTORS) { - map.set(c.connectorType, { - connector_type: c.connectorType, - connector_title: c.title, - connector_group: "other", - is_oauth: false, - }); - } + for (const c of OAUTH_CONNECTORS) { + map.set(c.connectorType, { + connector_type: c.connectorType, + connector_title: c.title, + connector_group: "oauth", + is_oauth: true, + }); + } + for (const c of COMPOSIO_CONNECTORS) { + map.set(c.connectorType, { + connector_type: c.connectorType, + connector_title: c.title, + connector_group: "composio", + is_oauth: true, + }); + } + for (const c of CRAWLERS) { + map.set(c.connectorType, { + connector_type: c.connectorType, + connector_title: c.title, + connector_group: "crawler", + is_oauth: false, + }); + } + for (const c of OTHER_CONNECTORS) { + map.set(c.connectorType, { + connector_type: c.connectorType, + connector_title: c.title, + connector_group: "other", + is_oauth: false, + }); + } - return map; - })(); + return map; +})(); /** * Returns telemetry metadata for a connector_type, or a minimal "unknown" diff --git a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts index 7ac903342..404ee16f0 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts @@ -360,11 +360,7 @@ export const useConnectorDialog = () => { // Set connecting state immediately to disable button and show spinner setConnectingId(connector.id); - trackConnectorSetupStarted( - Number(searchSpaceId), - connector.connectorType, - "oauth_click" - ); + trackConnectorSetupStarted(Number(searchSpaceId), connector.connectorType, "oauth_click"); try { // Check if authEndpoint already has query parameters @@ -487,11 +483,7 @@ export const useConnectorDialog = () => { (connectorType: string) => { if (!searchSpaceId) return; - trackConnectorSetupStarted( - Number(searchSpaceId), - connectorType, - "non_oauth_click" - ); + trackConnectorSetupStarted(Number(searchSpaceId), connectorType, "non_oauth_click"); // Handle Obsidian specifically on Desktop & Cloud if (connectorType === EnumConnectorName.OBSIDIAN_CONNECTOR && !selfHosted && isDesktop) { diff --git a/surfsense_web/components/free-chat/free-chat-page.tsx b/surfsense_web/components/free-chat/free-chat-page.tsx index b389a8489..deac1fd00 100644 --- a/surfsense_web/components/free-chat/free-chat-page.tsx +++ b/surfsense_web/components/free-chat/free-chat-page.tsx @@ -210,8 +210,7 @@ export function FreeChatPage() { trackAnonymousChatMessageSent({ modelSlug, messageLength: userQuery.trim().length, - hasUploadedDoc: - anonMode.isAnonymous && anonMode.uploadedDoc !== null ? true : false, + hasUploadedDoc: anonMode.isAnonymous && anonMode.uploadedDoc !== null ? true : false, surface: "free_chat_page", }); diff --git a/surfsense_web/components/homepage/features-bento-grid.tsx b/surfsense_web/components/homepage/features-bento-grid.tsx index 835ccd2c2..7406223de 100644 --- a/surfsense_web/components/homepage/features-bento-grid.tsx +++ b/surfsense_web/components/homepage/features-bento-grid.tsx @@ -426,15 +426,50 @@ const AiSortIllustration = () => ( AI File Sorting illustration showing automatic folder organization {/* Scattered documents on the left */} - - - + + + {/* AI sparkle / magic in the center */} - - + + @@ -442,51 +477,208 @@ const AiSortIllustration = () => ( {/* Animated sorting arrows */} - + - + - + - + {/* Organized folder tree on the right */} {/* Root folder */} - - - + + + {/* Subfolder 1 */} - - - - - + + + + + {/* Subfolder 2 */} - - - - - + + + + + {/* Subfolder 3 */} - - - - - + + + + + {/* Sparkle accents */} @@ -495,10 +687,22 @@ const AiSortIllustration = () => ( - + - + diff --git a/surfsense_web/components/sources/DocumentUploadTab.tsx b/surfsense_web/components/sources/DocumentUploadTab.tsx index 65fa117f7..42fa72847 100644 --- a/surfsense_web/components/sources/DocumentUploadTab.tsx +++ b/surfsense_web/components/sources/DocumentUploadTab.tsx @@ -546,35 +546,36 @@ export function DocumentUploadTab({ ) ) : ( -
{ - if (!isElectron) fileInputRef.current?.click(); - }} - onKeyDown={(e) => { - if (e.key === "Enter" || e.key === " ") { - e.preventDefault(); + // biome-ignore lint/a11y/useSemanticElements: cannot use
)} From dde1948a5c8782d96e9a478518940439f1114373 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 10:06:12 +0200 Subject: [PATCH 053/113] fix Slack MCP OAuth: v2 endpoint, user_scope param, nested token extraction --- .../app/routes/mcp_oauth_route.py | 30 ++++++++++++------- .../app/services/mcp_oauth/registry.py | 6 ++++ 2 files changed, 26 insertions(+), 10 deletions(-) diff --git a/surfsense_backend/app/routes/mcp_oauth_route.py b/surfsense_backend/app/routes/mcp_oauth_route.py index f7164eab3..efe928fd1 100644 --- a/surfsense_backend/app/routes/mcp_oauth_route.py +++ b/surfsense_backend/app/routes/mcp_oauth_route.py @@ -107,8 +107,8 @@ async def connect_mcp_service( metadata = await discover_oauth_metadata( svc.mcp_url, origin_override=svc.oauth_discovery_origin, ) - auth_endpoint = metadata.get("authorization_endpoint") - token_endpoint = metadata.get("token_endpoint") + auth_endpoint = svc.auth_endpoint_override or metadata.get("authorization_endpoint") + token_endpoint = svc.token_endpoint_override or metadata.get("token_endpoint") registration_endpoint = metadata.get("registration_endpoint") if not auth_endpoint or not token_endpoint: @@ -165,7 +165,7 @@ async def connect_mcp_service( "state": state, } if svc.scopes: - auth_params["scope"] = " ".join(svc.scopes) + auth_params[svc.scope_param] = " ".join(svc.scopes) auth_url = f"{auth_endpoint}?{urlencode(auth_params)}" @@ -253,17 +253,27 @@ async def mcp_oauth_callback( ) access_token = token_json.get("access_token") + refresh_token = token_json.get("refresh_token") + expires_in = token_json.get("expires_in") + scope = token_json.get("scope") + + if not access_token and "authed_user" in token_json: + authed = token_json["authed_user"] + access_token = authed.get("access_token") + refresh_token = refresh_token or authed.get("refresh_token") + scope = scope or authed.get("scope") + expires_in = expires_in or authed.get("expires_in") + if not access_token: raise HTTPException( status_code=400, detail=f"No access token received from {svc.name}.", ) - refresh_token = token_json.get("refresh_token") expires_at = None - if token_json.get("expires_in"): + if expires_in: expires_at = datetime.now(UTC) + timedelta( - seconds=int(token_json["expires_in"]) + seconds=int(expires_in) ) connector_config = { @@ -280,7 +290,7 @@ async def mcp_oauth_callback( "access_token": enc.encrypt_token(access_token), "refresh_token": enc.encrypt_token(refresh_token) if refresh_token else None, "expires_at": expires_at.isoformat() if expires_at else None, - "scope": token_json.get("scope"), + "scope": scope, }, "_token_encrypted": True, } @@ -415,8 +425,8 @@ async def reauth_mcp_service( metadata = await discover_oauth_metadata( svc.mcp_url, origin_override=svc.oauth_discovery_origin, ) - auth_endpoint = metadata.get("authorization_endpoint") - token_endpoint = metadata.get("token_endpoint") + auth_endpoint = svc.auth_endpoint_override or metadata.get("authorization_endpoint") + token_endpoint = svc.token_endpoint_override or metadata.get("token_endpoint") registration_endpoint = metadata.get("registration_endpoint") if not auth_endpoint or not token_endpoint: @@ -478,7 +488,7 @@ async def reauth_mcp_service( "state": state, } if svc.scopes: - auth_params["scope"] = " ".join(svc.scopes) + auth_params[svc.scope_param] = " ".join(svc.scopes) auth_url = f"{auth_endpoint}?{urlencode(auth_params)}" diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index 4d87ceb40..df6c6bb18 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -21,6 +21,9 @@ class MCPServiceConfig: client_id_env: str | None = None client_secret_env: str | None = None scopes: list[str] = field(default_factory=list) + scope_param: str = "scope" + auth_endpoint_override: str | None = None + token_endpoint_override: str | None = None MCP_SERVICES: dict[str, MCPServiceConfig] = { @@ -46,6 +49,9 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { supports_dcr=False, client_id_env="SLACK_CLIENT_ID", client_secret_env="SLACK_CLIENT_SECRET", + scope_param="user_scope", + auth_endpoint_override="https://slack.com/oauth/v2/authorize", + token_endpoint_override="https://slack.com/api/oauth.v2.access", scopes=[ "search:read.public", "search:read.private", "search:read.mpim", "search:read.im", "search:read.files", "search:read.users", From dfa40b88018e09f1e4f743d1cedd8e1bb4744441 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 10:50:43 +0200 Subject: [PATCH 054/113] fix MCP OAuth for all 5 services, add MCP connector edit view --- .../app/routes/mcp_oauth_route.py | 4 +-- .../app/services/mcp_oauth/registry.py | 10 +++---- .../components/mcp-service-config.tsx | 30 +++++++++++++++++++ .../views/connector-edit-view.tsx | 25 ++++++++++------ 4 files changed, 53 insertions(+), 16 deletions(-) create mode 100644 surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-service-config.tsx diff --git a/surfsense_backend/app/routes/mcp_oauth_route.py b/surfsense_backend/app/routes/mcp_oauth_route.py index efe928fd1..b7c605089 100644 --- a/surfsense_backend/app/routes/mcp_oauth_route.py +++ b/surfsense_backend/app/routes/mcp_oauth_route.py @@ -128,7 +128,7 @@ async def connect_mcp_service( status_code=502, detail=f"DCR for {svc.name} did not return a client_id.", ) - elif not svc.supports_dcr and svc.client_id_env: + elif svc.client_id_env: client_id = getattr(config, svc.client_id_env, None) client_secret = getattr(config, svc.client_secret_env or "", None) or "" if not client_id: @@ -446,7 +446,7 @@ async def reauth_mcp_service( status_code=502, detail=f"DCR for {svc.name} did not return a client_id.", ) - elif not svc.supports_dcr and svc.client_id_env: + elif svc.client_id_env: client_id = getattr(config, svc.client_id_env, None) client_secret = getattr(config, svc.client_secret_env or "", None) or "" if not client_id: diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index df6c6bb18..cd1a0ae8c 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -1,9 +1,9 @@ -"""Registry of MCP services with OAuth 2.1 support. +"""Registry of MCP services with OAuth support. Each entry maps a URL-safe service key to its MCP server endpoint and -authentication strategy. Services with ``supports_dcr=True`` will use -RFC 7591 Dynamic Client Registration; the rest require pre-configured -credentials via environment variables. +authentication configuration. Services with ``supports_dcr=True`` use +RFC 7591 Dynamic Client Registration (the MCP server issues its own +credentials); the rest use pre-configured credentials via env vars. """ from __future__ import annotations @@ -65,8 +65,8 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { name="Airtable", mcp_url="https://mcp.airtable.com/mcp", connector_type="AIRTABLE_CONNECTOR", - oauth_discovery_origin="https://airtable.com", supports_dcr=False, + oauth_discovery_origin="https://airtable.com", client_id_env="AIRTABLE_CLIENT_ID", client_secret_env="AIRTABLE_CLIENT_SECRET", scopes=["data.records:read", "data.records:write", "schema.bases:read", "schema.bases:write"], diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-service-config.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-service-config.tsx new file mode 100644 index 000000000..4f43694ad --- /dev/null +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-service-config.tsx @@ -0,0 +1,30 @@ +"use client"; + +import { CheckCircle2 } from "lucide-react"; +import type { FC } from "react"; +import type { ConnectorConfigProps } from "../index"; + +export const MCPServiceConfig: FC = ({ connector }) => { + const serviceName = connector.config?.mcp_service as string | undefined; + + return ( +
+
+
+ +
+
+

Connected via MCP

+

+ Your agent can search, read, and take actions in{" "} + {serviceName + ? serviceName.charAt(0).toUpperCase() + serviceName.slice(1) + : "this service"}{" "} + in real time. No background indexing needed. +

+
+
+ +
+ ); +}; diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx index e19600ab2..3c92320da 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx @@ -17,7 +17,7 @@ import { PeriodicSyncConfig } from "../../components/periodic-sync-config"; import { SummaryConfig } from "../../components/summary-config"; import { VisionLLMConfig } from "../../components/vision-llm-config"; import { getConnectorDisplayName } from "../../tabs/all-connectors-tab"; -import { getConnectorConfigComponent } from "../index"; +import { type ConnectorConfigProps, getConnectorConfigComponent } from "../index"; const REAUTH_ENDPOINTS: Partial> = { [EnumConnectorName.LINEAR_CONNECTOR]: "/api/v1/auth/linear/connector/reauth", @@ -118,11 +118,16 @@ export const ConnectorEditView: FC = ({ } }, [searchSpaceId, searchSpaceIdAtom, reauthEndpoint, connector.id]); - // Get connector-specific config component - const ConnectorConfigComponent = useMemo( - () => getConnectorConfigComponent(connector.connector_type), - [connector.connector_type] - ); + const isMCPBacked = Boolean(connector.config?.server_config); + + // Get connector-specific config component (MCP-backed connectors use a generic view) + const ConnectorConfigComponent = useMemo(() => { + if (isMCPBacked) { + const { MCPServiceConfig } = require("../components/mcp-service-config"); + return MCPServiceConfig as FC; + } + return getConnectorConfigComponent(connector.connector_type); + }, [connector.connector_type, isMCPBacked]); const [isScrolled, setIsScrolled] = useState(false); const [hasMoreContent, setHasMoreContent] = useState(false); const [showDisconnectConfirm, setShowDisconnectConfirm] = useState(false); @@ -223,7 +228,9 @@ export const ConnectorEditView: FC = ({ {getConnectorDisplayName(connector.name)}

- Manage your connector settings and sync configuration + {isMCPBacked + ? "Connected — your agent can interact with this service in real time" + : "Manage your connector settings and sync configuration"}

@@ -421,7 +428,7 @@ export const ConnectorEditView: FC = ({ Re-authenticate - ) : ( + ) : !isMCPBacked ? ( - )} + ) : null} ); From a4bc621c2acae3a1305da77c3ff8046d7ab40c68 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 11:22:04 +0200 Subject: [PATCH 055/113] uniform connector UX across all connector types --- .../components/connector-card.tsx | 16 ++++---- .../components/discord-config.tsx | 17 ++++----- .../components/mcp-service-config.tsx | 14 +++---- .../components/teams-config.tsx | 6 +-- .../views/connector-edit-view.tsx | 14 ++++--- .../constants/connector-constants.ts | 37 ++++++++++++++----- .../tabs/active-connectors-tab.tsx | 14 +++++-- .../views/connector-accounts-list-view.tsx | 25 +++++-------- 8 files changed, 82 insertions(+), 61 deletions(-) diff --git a/surfsense_web/components/assistant-ui/connector-popup/components/connector-card.tsx b/surfsense_web/components/assistant-ui/connector-popup/components/connector-card.tsx index d24057b1c..e0df73e66 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/components/connector-card.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/components/connector-card.tsx @@ -8,6 +8,7 @@ import { Spinner } from "@/components/ui/spinner"; import { EnumConnectorName } from "@/contracts/enums/connector"; import { getConnectorIcon } from "@/contracts/enums/connectorIcons"; import { cn } from "@/lib/utils"; +import { LIVE_CONNECTOR_TYPES } from "../constants/connector-constants"; import { useConnectorStatus } from "../hooks/use-connector-status"; import { ConnectorStatusBadge } from "./connector-status-badge"; @@ -55,6 +56,7 @@ export const ConnectorCard: FC = ({ onManage, }) => { const isMCP = connectorType === EnumConnectorName.MCP_CONNECTOR; + const isLive = !!connectorType && LIVE_CONNECTOR_TYPES.has(connectorType); // Get connector status const { getConnectorStatus, isConnectorEnabled, getConnectorStatusMessage, shouldShowWarnings } = useConnectorStatus(); @@ -123,14 +125,14 @@ export const ConnectorCard: FC = ({ ) : ( <> - {formatDocumentCount(documentCount)} + {!isLive && {formatDocumentCount(documentCount)}} + {!isLive && accountCount !== undefined && accountCount > 0 && ( + + )} {accountCount !== undefined && accountCount > 0 && ( - <> - - - {accountCount} {accountCount === 1 ? "Account" : "Accounts"} - - + + {accountCount} {accountCount === 1 ? "Account" : "Accounts"} + )} )} diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/discord-config.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/discord-config.tsx index f782a6f4d..c8714ba40 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/discord-config.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/discord-config.tsx @@ -53,8 +53,7 @@ export const DiscordConfig: FC = ({ connector }) => { return () => document.removeEventListener("visibilitychange", handleVisibilityChange); }, [connector?.id, fetchChannels]); - // Separate channels by indexing capability - const readyToIndex = channels.filter((ch) => ch.can_index); + const accessible = channels.filter((ch) => ch.can_index); const needsPermissions = channels.filter((ch) => !ch.can_index); // Format last fetched time @@ -80,7 +79,7 @@ export const DiscordConfig: FC = ({ connector }) => {

- The bot needs "Read Message History" permission to index channels. Ask a + The bot needs "Read Message History" permission to access channels. Ask a server admin to grant this permission for channels shown below.

@@ -127,18 +126,18 @@ export const DiscordConfig: FC = ({ connector }) => { ) : (
- {/* Ready to index */} - {readyToIndex.length > 0 && ( + {/* Accessible channels */} + {accessible.length > 0 && (
0 && "border-b border-border")}>
- Ready to index + Accessible - {readyToIndex.length} {readyToIndex.length === 1 ? "channel" : "channels"} + {accessible.length} {accessible.length === 1 ? "channel" : "channels"}
- {readyToIndex.map((channel) => ( + {accessible.map((channel) => ( ))}
@@ -150,7 +149,7 @@ export const DiscordConfig: FC = ({ connector }) => {
- Grant permissions to index + Needs permissions {needsPermissions.length}{" "} {needsPermissions.length === 1 ? "channel" : "channels"} diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-service-config.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-service-config.tsx index 4f43694ad..71d0e31a8 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-service-config.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-service-config.tsx @@ -6,25 +6,23 @@ import type { ConnectorConfigProps } from "../index"; export const MCPServiceConfig: FC = ({ connector }) => { const serviceName = connector.config?.mcp_service as string | undefined; + const displayName = serviceName + ? serviceName.charAt(0).toUpperCase() + serviceName.slice(1) + : "this service"; return (
-
+
-

Connected via MCP

+

Connected

- Your agent can search, read, and take actions in{" "} - {serviceName - ? serviceName.charAt(0).toUpperCase() + serviceName.slice(1) - : "this service"}{" "} - in real time. No background indexing needed. + Your agent can search, read, and take actions in {displayName}.

-
); }; diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx index ac08a6c03..e96ddfd29 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx @@ -18,9 +18,9 @@ export const TeamsConfig: FC = () => {

Microsoft Teams Access

- SurfSense will index messages from Teams channels that you have access to. The app can - only read messages from teams and channels where you are a member. Make sure you're a - member of the teams you want to index before connecting. + Your agent can search and read messages from Teams channels you have access to, + and send messages on your behalf. Make sure you're a member of the teams + you want to interact with.

diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx index 3c92320da..aa3c8d193 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx @@ -16,6 +16,7 @@ import { DateRangeSelector } from "../../components/date-range-selector"; import { PeriodicSyncConfig } from "../../components/periodic-sync-config"; import { SummaryConfig } from "../../components/summary-config"; import { VisionLLMConfig } from "../../components/vision-llm-config"; +import { LIVE_CONNECTOR_TYPES } from "../../constants/connector-constants"; import { getConnectorDisplayName } from "../../tabs/all-connectors-tab"; import { type ConnectorConfigProps, getConnectorConfigComponent } from "../index"; @@ -119,6 +120,7 @@ export const ConnectorEditView: FC = ({ }, [searchSpaceId, searchSpaceIdAtom, reauthEndpoint, connector.id]); const isMCPBacked = Boolean(connector.config?.server_config); + const isLive = isMCPBacked || LIVE_CONNECTOR_TYPES.has(connector.connector_type); // Get connector-specific config component (MCP-backed connectors use a generic view) const ConnectorConfigComponent = useMemo(() => { @@ -228,8 +230,8 @@ export const ConnectorEditView: FC = ({ {getConnectorDisplayName(connector.name)}

- {isMCPBacked - ? "Connected — your agent can interact with this service in real time" + {isLive + ? "Manage your connected account" : "Manage your connector settings and sync configuration"}

@@ -381,10 +383,12 @@ export const ConnectorEditView: FC = ({ {/* Fixed Footer - Action buttons */}
- {showDisconnectConfirm ? ( -
+ {showDisconnectConfirm ? ( +
- Are you sure? + {isLive + ? "Your agent will lose access to this service." + : "This will remove all indexed data."}
@@ -234,15 +231,13 @@ export const ConnectorAccountsListView: FC = ({ Syncing

- ) : ( -

- {isIndexableConnector(connector.connector_type) - ? connector.last_indexed_at - ? `Last indexed: ${formatRelativeDate(connector.last_indexed_at)}` - : "Never indexed" - : "Active"} + ) : !isLiveConnector(connector.connector_type) ? ( +

+ {connector.last_indexed_at + ? `Last indexed: ${formatRelativeDate(connector.last_indexed_at)}` + : "Never indexed"}

- )} + ) : null}
{isAuthExpired ? (
- {/* Quick Index Button - hidden when auth is expired */} - {connector.is_indexable && onQuickIndex && !isAuthExpired && ( + {/* Quick Index Button - hidden for live connectors and when auth is expired */} + {connector.is_indexable && !isLive && onQuickIndex && !isAuthExpired && ( - ) : !isMCPBacked ? ( + ) : !isLive ? (

- Configure when to start syncing your data + {isLive + ? "Your account is ready to use" + : "Configure when to start syncing your data"}

@@ -157,8 +161,8 @@ export const IndexingConfigurationView: FC = ({ )} - {/* Summary and sync settings - only shown for indexable connectors */} - {connector?.is_indexable && ( + {/* Summary and sync settings - hidden for live connectors */} + {connector?.is_indexable && !isLive && ( <> {/* AI Summary toggle */} @@ -209,8 +213,8 @@ export const IndexingConfigurationView: FC = ({ )} - {/* Info box - only shown for indexable connectors */} - {connector?.is_indexable && ( + {/* Info box - hidden for live connectors */} + {connector?.is_indexable && !isLive && (
@@ -238,14 +242,20 @@ export const IndexingConfigurationView: FC = ({ {/* Fixed Footer - Action buttons */}
- + {isLive ? ( + + ) : ( + + )}
); diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index 1f324d53e..05f866d0f 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -13,7 +13,9 @@ export const LIVE_CONNECTOR_TYPES = new Set([ EnumConnectorName.DISCORD_CONNECTOR, EnumConnectorName.TEAMS_CONNECTOR, EnumConnectorName.GOOGLE_CALENDAR_CONNECTOR, + EnumConnectorName.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR, EnumConnectorName.GOOGLE_GMAIL_CONNECTOR, + EnumConnectorName.COMPOSIO_GMAIL_CONNECTOR, EnumConnectorName.LUMA_CONNECTOR, ]); @@ -30,7 +32,7 @@ export const OAUTH_CONNECTORS = [ { id: "google-gmail-connector", title: "Gmail", - description: "Search and read your emails", + description: "Search, read, draft, and send emails", connectorType: EnumConnectorName.GOOGLE_GMAIL_CONNECTOR, authEndpoint: "/api/v1/auth/google/gmail/connector/add/", selfHostedOnly: true, @@ -46,7 +48,7 @@ export const OAUTH_CONNECTORS = [ { id: "airtable-connector", title: "Airtable", - description: "Search, read, and manage records", + description: "Browse bases, tables, and records", connectorType: EnumConnectorName.AIRTABLE_CONNECTOR, authEndpoint: "/api/v1/auth/mcp/airtable/connector/add/", }, @@ -67,7 +69,7 @@ export const OAUTH_CONNECTORS = [ { id: "slack-connector", title: "Slack", - description: "Search, read, and send messages", + description: "Search and read channels and threads", connectorType: EnumConnectorName.SLACK_CONNECTOR, authEndpoint: "/api/v1/auth/mcp/slack/connector/add/", }, @@ -116,7 +118,7 @@ export const OAUTH_CONNECTORS = [ { id: "clickup-connector", title: "ClickUp", - description: "Search, read, and manage tasks", + description: "Search and read tasks", connectorType: EnumConnectorName.CLICKUP_CONNECTOR, authEndpoint: "/api/v1/auth/mcp/clickup/connector/add/", }, @@ -155,7 +157,7 @@ export const OTHER_CONNECTORS = [ { id: "luma-connector", title: "Luma", - description: "Search and manage events", + description: "Browse, read, and create events", connectorType: EnumConnectorName.LUMA_CONNECTOR, }, { @@ -214,14 +216,14 @@ export const COMPOSIO_CONNECTORS = [ { id: "composio-gmail", title: "Gmail", - description: "Search through your emails via Composio", + description: "Search, read, draft, and send emails via Composio", connectorType: EnumConnectorName.COMPOSIO_GMAIL_CONNECTOR, authEndpoint: "/api/v1/auth/composio/connector/add/?toolkit_id=gmail", }, { id: "composio-googlecalendar", title: "Google Calendar", - description: "Search through your events via Composio", + description: "Search and manage your events via Composio", connectorType: EnumConnectorName.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR, authEndpoint: "/api/v1/auth/composio/connector/add/?toolkit_id=googlecalendar", }, @@ -238,14 +240,14 @@ export const COMPOSIO_TOOLKITS = [ { id: "gmail", name: "Gmail", - description: "Search through your emails", - isIndexable: true, + description: "Search, read, draft, and send emails", + isIndexable: false, }, { id: "googlecalendar", name: "Google Calendar", - description: "Search through your events", - isIndexable: true, + description: "Search and manage your events", + isIndexable: false, }, { id: "slack", @@ -275,18 +277,6 @@ export interface AutoIndexConfig { } export const AUTO_INDEX_DEFAULTS: Record = { - [EnumConnectorName.COMPOSIO_GMAIL_CONNECTOR]: { - daysBack: 30, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your last 30 days of emails.", - }, - [EnumConnectorName.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR]: { - daysBack: 90, - daysForward: 90, - frequencyMinutes: 1440, - syncDescription: "Syncing 90 days of past and upcoming events.", - }, [EnumConnectorName.NOTION_CONNECTOR]: { daysBack: 365, daysForward: 0, diff --git a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts index 9f968e2a7..a8d395e5c 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts @@ -38,6 +38,7 @@ import { AUTO_INDEX_CONNECTOR_TYPES, AUTO_INDEX_DEFAULTS, COMPOSIO_CONNECTORS, + LIVE_CONNECTOR_TYPES, OAUTH_CONNECTORS, OTHER_CONNECTORS, } from "../constants/connector-constants"; @@ -317,7 +318,12 @@ export const useConnectorDialog = () => { newConnector.id ); - if ( + const isLiveConnector = LIVE_CONNECTOR_TYPES.has(oauthConnector.connectorType); + + if (isLiveConnector) { + toast.success(`${oauthConnector.title} connected successfully!`); + await refetchAllConnectors(); + } else if ( newConnector.is_indexable && AUTO_INDEX_CONNECTOR_TYPES.has(oauthConnector.connectorType) ) { From 16f47578d787e59dcdbc7f4aebc7597be16fc7b4 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Thu, 23 Apr 2026 08:03:32 +0200 Subject: [PATCH 069/113] Enhance MCP tool trust functionality to support OAuth-backed connectors and improve error handling in the UI. Refactor API calls to use baseApiService for consistency. --- .../routes/search_source_connectors_routes.py | 17 +++++++++---- .../tool-ui/generic-hitl-approval.tsx | 5 ++-- .../lib/apis/connectors-api.service.ts | 24 ++++--------------- 3 files changed, 20 insertions(+), 26 deletions(-) diff --git a/surfsense_backend/app/routes/search_source_connectors_routes.py b/surfsense_backend/app/routes/search_source_connectors_routes.py index 989894003..b8142c192 100644 --- a/surfsense_backend/app/routes/search_source_connectors_routes.py +++ b/surfsense_backend/app/routes/search_source_connectors_routes.py @@ -3105,13 +3105,18 @@ async def trust_mcp_tool( """Add a tool to the MCP connector's trusted (always-allow) list. Once trusted, the tool executes without HITL approval on subsequent calls. + Works for both generic MCP_CONNECTOR and OAuth-backed MCP connectors + (LINEAR_CONNECTOR, JIRA_CONNECTOR, etc.) by checking for ``server_config``. """ try: + from sqlalchemy import cast + from sqlalchemy.dialects.postgresql import JSONB as PG_JSONB + result = await session.execute( select(SearchSourceConnector).filter( SearchSourceConnector.id == connector_id, - SearchSourceConnector.connector_type - == SearchSourceConnectorType.MCP_CONNECTOR, + SearchSourceConnector.user_id == user.id, + cast(SearchSourceConnector.config, PG_JSONB).has_key("server_config"), # noqa: W601 ) ) connector = result.scalars().first() @@ -3156,13 +3161,17 @@ async def untrust_mcp_tool( """Remove a tool from the MCP connector's trusted list. The tool will require HITL approval again on subsequent calls. + Works for both generic MCP_CONNECTOR and OAuth-backed MCP connectors. """ try: + from sqlalchemy import cast + from sqlalchemy.dialects.postgresql import JSONB as PG_JSONB + result = await session.execute( select(SearchSourceConnector).filter( SearchSourceConnector.id == connector_id, - SearchSourceConnector.connector_type - == SearchSourceConnectorType.MCP_CONNECTOR, + SearchSourceConnector.user_id == user.id, + cast(SearchSourceConnector.config, PG_JSONB).has_key("server_config"), # noqa: W601 ) ) connector = result.scalars().first() diff --git a/surfsense_web/components/tool-ui/generic-hitl-approval.tsx b/surfsense_web/components/tool-ui/generic-hitl-approval.tsx index 809b76c38..d21f249ee 100644 --- a/surfsense_web/components/tool-ui/generic-hitl-approval.tsx +++ b/surfsense_web/components/tool-ui/generic-hitl-approval.tsx @@ -3,6 +3,7 @@ import type { ToolCallMessagePartComponent } from "@assistant-ui/react"; import { CornerDownLeftIcon, Pen } from "lucide-react"; import { useCallback, useEffect, useMemo, useState } from "react"; +import { toast } from "sonner"; import { TextShimmerLoader } from "@/components/prompt-kit/loader"; import { Button } from "@/components/ui/button"; import { Input } from "@/components/ui/input"; @@ -116,8 +117,8 @@ function GenericApprovalCard({ if (phase !== "pending" || !isMCPTool) return; setProcessing(); onDecision({ type: "approve" }); - connectorsApiService.trustMCPTool(mcpConnectorId, toolName).catch((err) => { - console.error("Failed to trust MCP tool:", err); + connectorsApiService.trustMCPTool(mcpConnectorId, toolName).catch(() => { + toast.error("Failed to save 'Always Allow' preference. The tool will still require approval next time."); }); }, [phase, setProcessing, onDecision, isMCPTool, mcpConnectorId, toolName]); diff --git a/surfsense_web/lib/apis/connectors-api.service.ts b/surfsense_web/lib/apis/connectors-api.service.ts index 3eaa767c5..f4137c787 100644 --- a/surfsense_web/lib/apis/connectors-api.service.ts +++ b/surfsense_web/lib/apis/connectors-api.service.ts @@ -414,16 +414,8 @@ class ConnectorsApiService { * Subsequent calls to this tool will skip HITL approval. */ trustMCPTool = async (connectorId: number, toolName: string): Promise => { - const backendUrl = process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL || "http://localhost:8000"; - const token = - typeof window !== "undefined" ? document.cookie.match(/fapiToken=([^;]+)/)?.[1] : undefined; - await fetch(`${backendUrl}/api/v1/connectors/mcp/${connectorId}/trust-tool`, { - method: "POST", - headers: { - "Content-Type": "application/json", - ...(token ? { Authorization: `Bearer ${token}` } : {}), - }, - body: JSON.stringify({ tool_name: toolName }), + await baseApiService.post(`/api/v1/connectors/mcp/${connectorId}/trust-tool`, undefined, { + body: { tool_name: toolName }, }); }; @@ -431,16 +423,8 @@ class ConnectorsApiService { * Remove a tool from the MCP connector's "Always Allow" list. */ untrustMCPTool = async (connectorId: number, toolName: string): Promise => { - const backendUrl = process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL || "http://localhost:8000"; - const token = - typeof window !== "undefined" ? document.cookie.match(/fapiToken=([^;]+)/)?.[1] : undefined; - await fetch(`${backendUrl}/api/v1/connectors/mcp/${connectorId}/untrust-tool`, { - method: "POST", - headers: { - "Content-Type": "application/json", - ...(token ? { Authorization: `Bearer ${token}` } : {}), - }, - body: JSON.stringify({ tool_name: toolName }), + await baseApiService.post(`/api/v1/connectors/mcp/${connectorId}/untrust-tool`, undefined, { + body: { tool_name: toolName }, }); }; } From e3172dc282dbd31115b0ac5696dba74bf5bfbad7 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Thu, 23 Apr 2026 08:27:11 +0200 Subject: [PATCH 070/113] fix: reactive 401 recovery for live MCP connectors and unified reauth endpoints --- .../app/agents/new_chat/tools/mcp_tool.py | 480 +++++++++++++----- .../views/connector-edit-view.tsx | 18 +- .../constants/connector-constants.ts | 39 ++ .../views/connector-accounts-list-view.tsx | 37 +- 4 files changed, 396 insertions(+), 178 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py b/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py index 8f8e5007f..ddd65c7a7 100644 --- a/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py +++ b/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py @@ -194,6 +194,31 @@ async def _create_mcp_tool_from_definition_http( input_model = _create_dynamic_input_model_from_schema(exposed_name, input_schema) + async def _do_mcp_call( + call_headers: dict[str, str], + call_kwargs: dict[str, Any], + ) -> str: + """Execute a single MCP HTTP call with the given headers.""" + async with ( + streamablehttp_client(url, headers=call_headers) as (read, write, _), + ClientSession(read, write) as session, + ): + await session.initialize() + response = await session.call_tool( + original_tool_name, arguments=call_kwargs, + ) + + result = [] + for content in response.content: + if hasattr(content, "text"): + result.append(content.text) + elif hasattr(content, "data"): + result.append(str(content.data)) + else: + result.append(str(content)) + + return "\n".join(result) if result else "" + async def mcp_http_tool_call(**kwargs) -> str: """Execute the MCP tool call via HTTP transport.""" logger.debug("MCP HTTP tool '%s' called", exposed_name) @@ -218,31 +243,46 @@ async def _create_mcp_tool_from_definition_http( call_kwargs = {k: v for k, v in hitl_result.params.items() if v is not None} try: - async with ( - streamablehttp_client(url, headers=headers) as (read, write, _), - ClientSession(read, write) as session, - ): - await session.initialize() - response = await session.call_tool( - original_tool_name, arguments=call_kwargs, + result_str = await _do_mcp_call(headers, call_kwargs) + logger.debug("MCP HTTP tool '%s' succeeded (len=%d)", exposed_name, len(result_str)) + return result_str + + except Exception as first_err: + if not _is_auth_error(first_err) or connector_id is None: + logger.exception("MCP HTTP tool '%s' execution failed: %s", exposed_name, first_err) + return f"Error: MCP HTTP tool '{exposed_name}' execution failed: {first_err!s}" + + logger.warning( + "MCP HTTP tool '%s' got 401 — attempting token refresh for connector %s", + exposed_name, connector_id, + ) + fresh_headers = await _force_refresh_and_get_headers(connector_id) + if fresh_headers is None: + await _mark_connector_auth_expired(connector_id) + return ( + f"Error: MCP tool '{exposed_name}' authentication expired. " + "Please re-authenticate the connector in your settings." ) - result = [] - for content in response.content: - if hasattr(content, "text"): - result.append(content.text) - elif hasattr(content, "data"): - result.append(str(content.data)) - else: - result.append(str(content)) - - result_str = "\n".join(result) if result else "" - logger.debug("MCP HTTP tool '%s' succeeded (len=%d)", exposed_name, len(result_str)) + try: + result_str = await _do_mcp_call(fresh_headers, call_kwargs) + logger.info( + "MCP HTTP tool '%s' succeeded after 401 recovery", + exposed_name, + ) return result_str - - except Exception as e: - logger.exception("MCP HTTP tool '%s' execution failed: %s", exposed_name, e) - return f"Error: MCP HTTP tool '{exposed_name}' execution failed: {e!s}" + except Exception as retry_err: + logger.exception( + "MCP HTTP tool '%s' still failing after token refresh: %s", + exposed_name, retry_err, + ) + if _is_auth_error(retry_err): + await _mark_connector_auth_expired(connector_id) + return ( + f"Error: MCP tool '{exposed_name}' authentication expired. " + "Please re-authenticate the connector in your settings." + ) + return f"Error: MCP HTTP tool '{exposed_name}' execution failed: {retry_err!s}" tool = StructuredTool( name=exposed_name, @@ -365,66 +405,98 @@ async def _load_http_mcp_tools( allowed_set = set(allowed_tools) if allowed_tools else None - try: + async def _discover(disc_headers: dict[str, str]) -> list[dict[str, Any]]: + """Connect, initialize, and list tools from the MCP server.""" async with ( - streamablehttp_client(url, headers=headers) as (read, write, _), + streamablehttp_client(url, headers=disc_headers) as (read, write, _), ClientSession(read, write) as session, ): await session.initialize() - response = await session.list_tools() - tool_definitions = [] - for tool in response.tools: - tool_definitions.append( - { - "name": tool.name, - "description": tool.description or "", - "input_schema": tool.inputSchema - if hasattr(tool, "inputSchema") - else {}, - } - ) + return [ + { + "name": tool.name, + "description": tool.description or "", + "input_schema": tool.inputSchema + if hasattr(tool, "inputSchema") + else {}, + } + for tool in response.tools + ] - total_discovered = len(tool_definitions) + try: + tool_definitions = await _discover(headers) + except Exception as first_err: + if not _is_auth_error(first_err) or connector_id is None: + logger.exception( + "Failed to connect to HTTP MCP server at '%s' (connector %d): %s", + url, connector_id, first_err, + ) + return tools - if allowed_set: - tool_definitions = [ - td for td in tool_definitions if td["name"] in allowed_set - ] - logger.info( - "HTTP MCP server '%s' (connector %d): %d/%d tools after allowlist filter", - url, connector_id, len(tool_definitions), total_discovered, - ) - else: - logger.info( - "Discovered %d tools from HTTP MCP server '%s' (connector %d) — no allowlist, loading all", - total_discovered, url, connector_id, - ) - - for tool_def in tool_definitions: - try: - tool = await _create_mcp_tool_from_definition_http( - tool_def, - url, - headers, - connector_name=connector_name, - connector_id=connector_id, - trusted_tools=trusted_tools, - readonly_tools=readonly_tools, - tool_name_prefix=tool_name_prefix, - ) - tools.append(tool) - except Exception as e: - logger.exception( - "Failed to create HTTP tool '%s' from connector %d: %s", - tool_def.get("name"), connector_id, e, - ) - - except Exception as e: - logger.exception( - "Failed to connect to HTTP MCP server at '%s' (connector %d): %s", - url, connector_id, e, + logger.warning( + "HTTP MCP discovery for connector %d got 401 — attempting token refresh", + connector_id, ) + fresh_headers = await _force_refresh_and_get_headers(connector_id) + if fresh_headers is None: + await _mark_connector_auth_expired(connector_id) + logger.error( + "HTTP MCP discovery for connector %d: token refresh failed, marking auth_expired", + connector_id, + ) + return tools + + try: + tool_definitions = await _discover(fresh_headers) + headers = fresh_headers + logger.info( + "HTTP MCP discovery for connector %d succeeded after 401 recovery", + connector_id, + ) + except Exception as retry_err: + logger.exception( + "HTTP MCP discovery for connector %d still failing after refresh: %s", + connector_id, retry_err, + ) + if _is_auth_error(retry_err): + await _mark_connector_auth_expired(connector_id) + return tools + + total_discovered = len(tool_definitions) + + if allowed_set: + tool_definitions = [ + td for td in tool_definitions if td["name"] in allowed_set + ] + logger.info( + "HTTP MCP server '%s' (connector %d): %d/%d tools after allowlist filter", + url, connector_id, len(tool_definitions), total_discovered, + ) + else: + logger.info( + "Discovered %d tools from HTTP MCP server '%s' (connector %d) — no allowlist, loading all", + total_discovered, url, connector_id, + ) + + for tool_def in tool_definitions: + try: + tool = await _create_mcp_tool_from_definition_http( + tool_def, + url, + headers, + connector_name=connector_name, + connector_id=connector_id, + trusted_tools=trusted_tools, + readonly_tools=readonly_tools, + tool_name_prefix=tool_name_prefix, + ) + tools.append(tool) + except Exception as e: + logger.exception( + "Failed to create HTTP tool '%s' from connector %d: %s", + tool_def.get("name"), connector_id, e, + ) return tools @@ -476,6 +548,91 @@ def _inject_oauth_headers( return None +async def _refresh_connector_token( + session: AsyncSession, + connector: "SearchSourceConnector", +) -> str | None: + """Refresh the OAuth token for an MCP connector and persist the result. + + This is the shared core used by both proactive (pre-expiry) and reactive + (401 recovery) refresh paths. It handles: + - Decrypting the current refresh token / client secret + - Calling the token endpoint + - Encrypting and persisting the new tokens + - Clearing ``auth_expired`` if it was set + - Invalidating the MCP tools cache + + Returns the **plaintext** new access token on success, or ``None`` on + failure (no refresh token, IdP error, etc.). + """ + from datetime import UTC, datetime, timedelta + + from sqlalchemy.orm.attributes import flag_modified + + from app.services.mcp_oauth.discovery import refresh_access_token + + cfg = connector.config or {} + mcp_oauth = cfg.get("mcp_oauth", {}) + + refresh_token = mcp_oauth.get("refresh_token") + if not refresh_token: + logger.warning( + "MCP connector %s: no refresh_token available", + connector.id, + ) + return None + + enc = _get_token_enc() + decrypted_refresh = enc.decrypt_token(refresh_token) + decrypted_secret = ( + enc.decrypt_token(mcp_oauth["client_secret"]) + if mcp_oauth.get("client_secret") + else "" + ) + + token_json = await refresh_access_token( + token_endpoint=mcp_oauth["token_endpoint"], + refresh_token=decrypted_refresh, + client_id=mcp_oauth["client_id"], + client_secret=decrypted_secret, + ) + + new_access = token_json.get("access_token") + if not new_access: + logger.warning( + "MCP connector %s: token refresh returned no access_token", + connector.id, + ) + return None + + new_expires_at = None + if token_json.get("expires_in"): + new_expires_at = datetime.now(UTC) + timedelta( + seconds=int(token_json["expires_in"]) + ) + + updated_oauth = dict(mcp_oauth) + updated_oauth["access_token"] = enc.encrypt_token(new_access) + if token_json.get("refresh_token"): + updated_oauth["refresh_token"] = enc.encrypt_token( + token_json["refresh_token"] + ) + updated_oauth["expires_at"] = ( + new_expires_at.isoformat() if new_expires_at else None + ) + + updated_cfg = {**cfg, "mcp_oauth": updated_oauth} + updated_cfg.pop("auth_expired", None) + connector.config = updated_cfg + flag_modified(connector, "config") + await session.commit() + await session.refresh(connector) + + invalidate_mcp_tools_cache(connector.search_space_id) + + return new_access + + async def _maybe_refresh_mcp_oauth_token( session: AsyncSession, connector: "SearchSourceConnector", @@ -504,73 +661,13 @@ async def _maybe_refresh_mcp_oauth_token( except (ValueError, TypeError): return server_config - refresh_token = mcp_oauth.get("refresh_token") - if not refresh_token: - logger.warning( - "MCP connector %s token expired but no refresh_token available", - connector.id, - ) - return server_config - try: - from app.services.mcp_oauth.discovery import refresh_access_token - - enc = _get_token_enc() - decrypted_refresh = enc.decrypt_token(refresh_token) - decrypted_secret = ( - enc.decrypt_token(mcp_oauth["client_secret"]) - if mcp_oauth.get("client_secret") - else "" - ) - - token_json = await refresh_access_token( - token_endpoint=mcp_oauth["token_endpoint"], - refresh_token=decrypted_refresh, - client_id=mcp_oauth["client_id"], - client_secret=decrypted_secret, - ) - - new_access = token_json.get("access_token") + new_access = await _refresh_connector_token(session, connector) if not new_access: - logger.warning( - "MCP connector %s token refresh returned no access_token", - connector.id, - ) return server_config - new_expires_at = None - if token_json.get("expires_in"): - new_expires_at = datetime.now(UTC) + timedelta( - seconds=int(token_json["expires_in"]) - ) + logger.info("Proactively refreshed MCP OAuth token for connector %s", connector.id) - updated_oauth = dict(mcp_oauth) - updated_oauth["access_token"] = enc.encrypt_token(new_access) - if token_json.get("refresh_token"): - updated_oauth["refresh_token"] = enc.encrypt_token( - token_json["refresh_token"] - ) - updated_oauth["expires_at"] = ( - new_expires_at.isoformat() if new_expires_at else None - ) - - from sqlalchemy.orm.attributes import flag_modified - - connector.config = { - **cfg, - "server_config": server_config, - "mcp_oauth": updated_oauth, - } - flag_modified(connector, "config") - await session.commit() - await session.refresh(connector) - - logger.info("Refreshed MCP OAuth token for connector %s", connector.id) - - # Invalidate cache so next call picks up the new token. - invalidate_mcp_tools_cache(connector.search_space_id) - - # Return server_config with the fresh token injected for immediate use. refreshed_config = dict(server_config) refreshed_config["headers"] = { **server_config.get("headers", {}), @@ -587,6 +684,117 @@ async def _maybe_refresh_mcp_oauth_token( return server_config +# --------------------------------------------------------------------------- +# Reactive 401 handling helpers +# --------------------------------------------------------------------------- + + +def _is_auth_error(exc: Exception) -> bool: + """Check if an exception indicates an HTTP 401 authentication failure.""" + try: + import httpx + + if isinstance(exc, httpx.HTTPStatusError): + return exc.response.status_code == 401 + except ImportError: + pass + err_str = str(exc).lower() + return "401" in err_str or "unauthorized" in err_str + + +async def _force_refresh_and_get_headers( + connector_id: int, +) -> dict[str, str] | None: + """Force-refresh OAuth token for a connector and return fresh HTTP headers. + + Opens a **new** DB session so this can be called from inside tool closures + that don't have access to the original session. + + Returns ``None`` when the connector is not OAuth-backed, has no + refresh token, or the refresh itself fails. + """ + from app.db import async_session_maker + + try: + async with async_session_maker() as session: + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == connector_id, + ) + ) + connector = result.scalars().first() + if not connector: + return None + + cfg = connector.config or {} + if not cfg.get("mcp_oauth"): + return None + + server_config = cfg.get("server_config", {}) + + new_access = await _refresh_connector_token(session, connector) + if not new_access: + return None + + logger.info( + "Force-refreshed MCP OAuth token for connector %s (401 recovery)", + connector_id, + ) + return { + **server_config.get("headers", {}), + "Authorization": f"Bearer {new_access}", + } + + except Exception: + logger.warning( + "Failed to force-refresh MCP OAuth token for connector %s", + connector_id, + exc_info=True, + ) + return None + + +async def _mark_connector_auth_expired(connector_id: int) -> None: + """Set ``config.auth_expired = True`` so the frontend shows re-auth UI.""" + from app.db import async_session_maker + + try: + async with async_session_maker() as session: + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == connector_id, + ) + ) + connector = result.scalars().first() + if not connector: + return + + cfg = dict(connector.config or {}) + if cfg.get("auth_expired"): + return + + cfg["auth_expired"] = True + connector.config = cfg + + from sqlalchemy.orm.attributes import flag_modified + + flag_modified(connector, "config") + await session.commit() + + logger.info( + "Marked MCP connector %s as auth_expired after unrecoverable 401", + connector_id, + ) + invalidate_mcp_tools_cache(connector.search_space_id) + + except Exception: + logger.warning( + "Failed to mark connector %s as auth_expired", + connector_id, + exc_info=True, + ) + + def invalidate_mcp_tools_cache(search_space_id: int | None = None) -> None: """Invalidate cached MCP tools. diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx index a69cf968f..16e7bd0d5 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx @@ -7,7 +7,6 @@ import { toast } from "sonner"; import { activeSearchSpaceIdAtom } from "@/atoms/search-spaces/search-space-query.atoms"; import { Button } from "@/components/ui/button"; import { Spinner } from "@/components/ui/spinner"; -import { EnumConnectorName } from "@/contracts/enums/connector"; import { getConnectorIcon } from "@/contracts/enums/connectorIcons"; import type { SearchSourceConnector } from "@/contracts/types/connector.types"; import { authenticatedFetch } from "@/lib/auth-utils"; @@ -16,23 +15,10 @@ import { DateRangeSelector } from "../../components/date-range-selector"; import { PeriodicSyncConfig } from "../../components/periodic-sync-config"; import { SummaryConfig } from "../../components/summary-config"; import { VisionLLMConfig } from "../../components/vision-llm-config"; -import { LIVE_CONNECTOR_TYPES } from "../../constants/connector-constants"; +import { LIVE_CONNECTOR_TYPES, getReauthEndpoint } from "../../constants/connector-constants"; import { getConnectorDisplayName } from "../../tabs/all-connectors-tab"; import { type ConnectorConfigProps, getConnectorConfigComponent } from "../index"; -const REAUTH_ENDPOINTS: Partial> = { - [EnumConnectorName.LINEAR_CONNECTOR]: "/api/v1/auth/linear/connector/reauth", - [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion/connector/reauth", - [EnumConnectorName.GOOGLE_DRIVE_CONNECTOR]: "/api/v1/auth/google/drive/connector/reauth", - [EnumConnectorName.GOOGLE_GMAIL_CONNECTOR]: "/api/v1/auth/google/gmail/connector/reauth", - [EnumConnectorName.GOOGLE_CALENDAR_CONNECTOR]: "/api/v1/auth/google/calendar/connector/reauth", - [EnumConnectorName.COMPOSIO_GOOGLE_DRIVE_CONNECTOR]: "/api/v1/auth/composio/connector/reauth", - [EnumConnectorName.COMPOSIO_GMAIL_CONNECTOR]: "/api/v1/auth/composio/connector/reauth", - [EnumConnectorName.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR]: "/api/v1/auth/composio/connector/reauth", - [EnumConnectorName.ONEDRIVE_CONNECTOR]: "/api/v1/auth/onedrive/connector/reauth", - [EnumConnectorName.DROPBOX_CONNECTOR]: "/api/v1/auth/dropbox/connector/reauth", -}; - interface ConnectorEditViewProps { connector: SearchSourceConnector; startDate: Date | undefined; @@ -86,7 +72,7 @@ export const ConnectorEditView: FC = ({ }) => { const searchSpaceIdAtom = useAtomValue(activeSearchSpaceIdAtom); const isAuthExpired = connector.config?.auth_expired === true; - const reauthEndpoint = REAUTH_ENDPOINTS[connector.connector_type]; + const reauthEndpoint = getReauthEndpoint(connector); const [reauthing, setReauthing] = useState(false); const handleReauth = useCallback(async () => { diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index 05f866d0f..621b71411 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -1,4 +1,5 @@ import { EnumConnectorName } from "@/contracts/enums/connector"; +import type { SearchSourceConnector } from "@/contracts/types/connector.types"; /** * Connectors that operate in real time (no background indexing). @@ -367,5 +368,43 @@ export function getConnectorTelemetryMeta(connectorType: string): ConnectorTelem }; } +// ============================================================================= +// REAUTH ENDPOINTS +// ============================================================================= + +/** + * Legacy (non-MCP) OAuth reauth endpoints, keyed by connector type. + * These are used for connectors that were NOT created via MCP OAuth. + */ +export const LEGACY_REAUTH_ENDPOINTS: Partial> = { + [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion/connector/reauth", + [EnumConnectorName.GOOGLE_DRIVE_CONNECTOR]: "/api/v1/auth/google/drive/connector/reauth", + [EnumConnectorName.GOOGLE_GMAIL_CONNECTOR]: "/api/v1/auth/google/gmail/connector/reauth", + [EnumConnectorName.GOOGLE_CALENDAR_CONNECTOR]: "/api/v1/auth/google/calendar/connector/reauth", + [EnumConnectorName.COMPOSIO_GOOGLE_DRIVE_CONNECTOR]: "/api/v1/auth/composio/connector/reauth", + [EnumConnectorName.COMPOSIO_GMAIL_CONNECTOR]: "/api/v1/auth/composio/connector/reauth", + [EnumConnectorName.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR]: "/api/v1/auth/composio/connector/reauth", + [EnumConnectorName.ONEDRIVE_CONNECTOR]: "/api/v1/auth/onedrive/connector/reauth", + [EnumConnectorName.DROPBOX_CONNECTOR]: "/api/v1/auth/dropbox/connector/reauth", + [EnumConnectorName.CONFLUENCE_CONNECTOR]: "/api/v1/auth/confluence/connector/reauth", + [EnumConnectorName.TEAMS_CONNECTOR]: "/api/v1/auth/teams/connector/reauth", + [EnumConnectorName.DISCORD_CONNECTOR]: "/api/v1/auth/discord/connector/reauth", +}; + +/** + * Resolve the reauth endpoint for a connector. + * + * MCP OAuth connectors (those with ``config.mcp_service``) dynamically build + * the URL from the service key. Legacy OAuth connectors fall back to the + * static ``LEGACY_REAUTH_ENDPOINTS`` map. + */ +export function getReauthEndpoint(connector: SearchSourceConnector): string | undefined { + const mcpService = connector.config?.mcp_service as string | undefined; + if (mcpService) { + return `/api/v1/auth/mcp/${mcpService}/connector/reauth`; + } + return LEGACY_REAUTH_ENDPOINTS[connector.connector_type]; +} + // Re-export IndexingConfigState from schemas for backward compatibility export type { IndexingConfigState } from "./connector-popup.schemas"; diff --git a/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx index b48b14ed2..a1ae96a40 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx @@ -13,25 +13,10 @@ import type { SearchSourceConnector } from "@/contracts/types/connector.types"; import { authenticatedFetch } from "@/lib/auth-utils"; import { formatRelativeDate } from "@/lib/format-date"; import { cn } from "@/lib/utils"; -import { LIVE_CONNECTOR_TYPES } from "../constants/connector-constants"; +import { LIVE_CONNECTOR_TYPES, getReauthEndpoint } from "../constants/connector-constants"; import { useConnectorStatus } from "../hooks/use-connector-status"; import { getConnectorDisplayName } from "../tabs/all-connectors-tab"; -const REAUTH_ENDPOINTS: Partial> = { - [EnumConnectorName.LINEAR_CONNECTOR]: "/api/v1/auth/linear/connector/reauth", - [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion/connector/reauth", - [EnumConnectorName.GOOGLE_DRIVE_CONNECTOR]: "/api/v1/auth/google/drive/connector/reauth", - [EnumConnectorName.GOOGLE_GMAIL_CONNECTOR]: "/api/v1/auth/google/gmail/connector/reauth", - [EnumConnectorName.GOOGLE_CALENDAR_CONNECTOR]: "/api/v1/auth/google/calendar/connector/reauth", - [EnumConnectorName.COMPOSIO_GOOGLE_DRIVE_CONNECTOR]: "/api/v1/auth/composio/connector/reauth", - [EnumConnectorName.COMPOSIO_GMAIL_CONNECTOR]: "/api/v1/auth/composio/connector/reauth", - [EnumConnectorName.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR]: "/api/v1/auth/composio/connector/reauth", - [EnumConnectorName.ONEDRIVE_CONNECTOR]: "/api/v1/auth/onedrive/connector/reauth", - [EnumConnectorName.JIRA_CONNECTOR]: "/api/v1/auth/jira/connector/reauth", - [EnumConnectorName.DROPBOX_CONNECTOR]: "/api/v1/auth/dropbox/connector/reauth", - [EnumConnectorName.CONFLUENCE_CONNECTOR]: "/api/v1/auth/confluence/connector/reauth", -}; - interface ConnectorAccountsListViewProps { connectorType: string; connectorTitle: string; @@ -68,16 +53,15 @@ export const ConnectorAccountsListView: FC = ({ const isEnabled = isConnectorEnabled(connectorType); const statusMessage = getConnectorStatusMessage(connectorType); - const reauthEndpoint = REAUTH_ENDPOINTS[connectorType]; - const handleReauth = useCallback( - async (connectorId: number) => { - if (!searchSpaceId || !reauthEndpoint) return; - setReauthingId(connectorId); + async (connector: SearchSourceConnector) => { + const endpoint = getReauthEndpoint(connector); + if (!searchSpaceId || !endpoint) return; + setReauthingId(connector.id); try { const backendUrl = process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL || "http://localhost:8000"; - const url = new URL(`${backendUrl}${reauthEndpoint}`); - url.searchParams.set("connector_id", String(connectorId)); + const url = new URL(`${backendUrl}${endpoint}`); + url.searchParams.set("connector_id", String(connector.id)); url.searchParams.set("space_id", String(searchSpaceId)); url.searchParams.set("return_url", window.location.pathname); const response = await authenticatedFetch(url.toString()); @@ -99,7 +83,7 @@ export const ConnectorAccountsListView: FC = ({ setReauthingId(null); } }, - [searchSpaceId, reauthEndpoint] + [searchSpaceId] ); // Filter connectors to only show those of this type @@ -200,7 +184,8 @@ export const ConnectorAccountsListView: FC = ({
{typeConnectors.map((connector) => { const isIndexing = indexingConnectorIds.has(connector.id); - const isAuthExpired = !!reauthEndpoint && connector.config?.auth_expired === true; + const connectorReauthEndpoint = getReauthEndpoint(connector); + const isAuthExpired = !!connectorReauthEndpoint && connector.config?.auth_expired === true; return (
= ({
) : (
- {typeConnectors.map((connector) => { - const isIndexing = indexingConnectorIds.has(connector.id); - const connectorReauthEndpoint = getReauthEndpoint(connector); - const isAuthExpired = !!connectorReauthEndpoint && connector.config?.auth_expired === true; + {typeConnectors.map((connector) => { + const isIndexing = indexingConnectorIds.has(connector.id); + const connectorReauthEndpoint = getReauthEndpoint(connector); + const isAuthExpired = !!connectorReauthEndpoint && connector.config?.auth_expired === true; + const isLive = LIVE_CONNECTOR_TYPES.has(connector.connector_type) || Boolean(connector.config?.server_config); return (
= ({ Syncing

- ) : !isLiveConnector(connector.connector_type) ? ( + ) : !isLive ? (

{connector.last_indexed_at ? `Last indexed: ${formatRelativeDate(connector.last_indexed_at)}` @@ -224,28 +225,73 @@ export const ConnectorAccountsListView: FC = ({

) : null}
- {isAuthExpired ? ( - + {isAuthExpired ? ( + + ) : isLive && onDisconnect ? ( + confirmDisconnectId === connector.id ? ( +
+ + +
) : ( - )} + ) + ) : ( + + )}
); })} From 9bb117ffa7542870296fd5584f3a8fce3a7b4abd Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Thu, 23 Apr 2026 08:51:31 +0200 Subject: [PATCH 073/113] feat: skip edit view for live connectors, disconnect directly from accounts list --- .../assistant-ui/connector-popup.tsx | 43 ++++++++++--------- .../views/connector-edit-view.tsx | 6 +-- .../hooks/use-connector-dialog.ts | 20 +++++++++ 3 files changed, 45 insertions(+), 24 deletions(-) diff --git a/surfsense_web/components/assistant-ui/connector-popup.tsx b/surfsense_web/components/assistant-ui/connector-popup.tsx index 84361e25b..66333a9ef 100644 --- a/surfsense_web/components/assistant-ui/connector-popup.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup.tsx @@ -123,8 +123,9 @@ export const ConnectorIndicator = forwardRef ) : viewingMCPList ? ( - + handleDisconnectFromList(connector, () => refreshConnectors())} + onAddAccount={handleAddNewMCPFromList} + addButtonText="Add New MCP Server" + /> ) : viewingAccountsType ? ( - { + handleDisconnectFromList(connector, () => refreshConnectors())} + onAddAccount={() => { // Check both OAUTH_CONNECTORS and COMPOSIO_CONNECTORS const oauthConnector = OAUTH_CONNECTORS.find( diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx index 16e7bd0d5..44461c351 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx @@ -17,6 +17,7 @@ import { SummaryConfig } from "../../components/summary-config"; import { VisionLLMConfig } from "../../components/vision-llm-config"; import { LIVE_CONNECTOR_TYPES, getReauthEndpoint } from "../../constants/connector-constants"; import { getConnectorDisplayName } from "../../tabs/all-connectors-tab"; +import { MCPServiceConfig } from "../components/mcp-service-config"; import { type ConnectorConfigProps, getConnectorConfigComponent } from "../index"; interface ConnectorEditViewProps { @@ -110,10 +111,7 @@ export const ConnectorEditView: FC = ({ // Get connector-specific config component (MCP-backed connectors use a generic view) const ConnectorConfigComponent = useMemo(() => { - if (isMCPBacked) { - const { MCPServiceConfig } = require("../components/mcp-service-config"); - return MCPServiceConfig as FC; - } + if (isMCPBacked) return MCPServiceConfig; return getConnectorConfigComponent(connector.connector_type); }, [connector.connector_type, isMCPBacked]); const [isScrolled, setIsScrolled] = useState(false); diff --git a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts index a8d395e5c..a9223fee5 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts @@ -1311,6 +1311,25 @@ export const useConnectorDialog = () => { [editingConnector, searchSpaceId, deleteConnector, cameFromMCPList, setIsOpen] ); + const handleDisconnectFromList = useCallback( + async (connector: SearchSourceConnector, refreshConnectors: () => void) => { + if (!searchSpaceId) return; + try { + await deleteConnector({ id: connector.id }); + trackConnectorDeleted(Number(searchSpaceId), connector.connector_type, connector.id); + toast.success(`${connector.name} disconnected successfully`); + refreshConnectors(); + queryClient.invalidateQueries({ + queryKey: cacheKeys.logs.summary(Number(searchSpaceId)), + }); + } catch (error) { + console.error("Error disconnecting connector:", error); + toast.error("Failed to disconnect connector"); + } + }, + [searchSpaceId, deleteConnector] + ); + // Handle quick index (index with selected date range, or backend defaults if none selected) const handleQuickIndexConnector = useCallback( async ( @@ -1484,6 +1503,7 @@ export const useConnectorDialog = () => { handleStartEdit, handleSaveConnector, handleDisconnectConnector, + handleDisconnectFromList, handleBackFromEdit, handleBackFromConnect, handleBackFromYouTube, From 2eb0ff9e5e108760b5d846590450802ff6325022 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Thu, 23 Apr 2026 08:57:56 +0200 Subject: [PATCH 074/113] feat: add reauthentication endpoints for Linear and JIRA connectors --- .../connector-popup/constants/connector-constants.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index 621b71411..2ee811c19 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -377,6 +377,8 @@ export function getConnectorTelemetryMeta(connectorType: string): ConnectorTelem * These are used for connectors that were NOT created via MCP OAuth. */ export const LEGACY_REAUTH_ENDPOINTS: Partial> = { + [EnumConnectorName.LINEAR_CONNECTOR]: "/api/v1/auth/linear/connector/reauth", + [EnumConnectorName.JIRA_CONNECTOR]: "/api/v1/auth/jira/connector/reauth", [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion/connector/reauth", [EnumConnectorName.GOOGLE_DRIVE_CONNECTOR]: "/api/v1/auth/google/drive/connector/reauth", [EnumConnectorName.GOOGLE_GMAIL_CONNECTOR]: "/api/v1/auth/google/gmail/connector/reauth", From cf7c14cf44b9887d88b28c6718221c2da4a1fb9d Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Thu, 23 Apr 2026 09:27:03 +0200 Subject: [PATCH 075/113] fix: mark connector auth_expired on token decryption failure --- surfsense_backend/app/agents/new_chat/tools/mcp_tool.py | 1 + 1 file changed, 1 insertion(+) diff --git a/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py b/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py index 7909657e0..b0dcd72b6 100644 --- a/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py +++ b/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py @@ -895,6 +895,7 @@ async def load_mcp_tools( "Skipping MCP connector %d — OAuth token decryption failed", connector.id, ) + await _mark_connector_auth_expired(connector.id) continue trusted_tools = cfg.get("trusted_tools", []) From 1712f454f82f35d228d8765341617346673c2a48 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Thu, 23 Apr 2026 09:45:25 +0200 Subject: [PATCH 076/113] fix: add spinner loading state to MCP test connection button --- .../connect-forms/components/mcp-connect-form.tsx | 11 +++++++++-- .../connector-configs/components/mcp-config.tsx | 11 +++++++++-- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/surfsense_web/components/assistant-ui/connector-popup/connect-forms/components/mcp-connect-form.tsx b/surfsense_web/components/assistant-ui/connector-popup/connect-forms/components/mcp-connect-form.tsx index 58d365128..fc9812240 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connect-forms/components/mcp-connect-form.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connect-forms/components/mcp-connect-form.tsx @@ -1,6 +1,6 @@ "use client"; -import { CheckCircle2, ChevronDown, ChevronUp, Server, XCircle } from "lucide-react"; +import { CheckCircle2, ChevronDown, ChevronUp, Loader2, Server, XCircle } from "lucide-react"; import { type FC, useRef, useState } from "react"; import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert"; import { Button } from "@/components/ui/button"; @@ -212,7 +212,14 @@ export const MCPConnectForm: FC = ({ onSubmit, isSubmitting }) variant="secondary" className="w-full h-8 text-[13px] px-3 rounded-lg font-medium bg-white text-slate-700 hover:bg-slate-50 border-0 shadow-xs dark:bg-secondary dark:text-secondary-foreground dark:hover:bg-secondary/80" > - {isTesting ? "Testing Connection" : "Test Connection"} + {isTesting ? ( + <> + + Testing Connection... + + ) : ( + "Test Connection" + )}
diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-config.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-config.tsx index ca997a9ba..d6f60e824 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-config.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-config.tsx @@ -1,6 +1,6 @@ "use client"; -import { CheckCircle2, ChevronDown, ChevronUp, Server, XCircle } from "lucide-react"; +import { CheckCircle2, ChevronDown, ChevronUp, Loader2, Server, XCircle } from "lucide-react"; import type { FC } from "react"; import { useCallback, useEffect, useRef, useState } from "react"; import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert"; @@ -217,7 +217,14 @@ export const MCPConfig: FC = ({ connector, onConfigChange, onNam variant="secondary" className="w-full h-8 text-[13px] px-3 rounded-lg font-medium bg-white text-slate-700 hover:bg-slate-50 border-0 shadow-xs dark:bg-secondary dark:text-secondary-foreground dark:hover:bg-secondary/80" > - {isTesting ? "Testing Connection" : "Test Connection"} + {isTesting ? ( + <> + + Testing Connection... + + ) : ( + "Test Connection" + )}
From 45b72de481f5d3a1645de6545b39d23d4b776a7f Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Thu, 23 Apr 2026 11:30:58 +0200 Subject: [PATCH 077/113] fix: robust generic MCP tool routing, retry, and empty-schema handling --- .../app/agents/new_chat/chat_deepagent.py | 16 +++ .../app/agents/new_chat/system_prompt.py | 35 +++++ .../app/agents/new_chat/tools/mcp_client.py | 58 +++++--- .../app/agents/new_chat/tools/mcp_tool.py | 124 ++++++++++++++---- 4 files changed, 191 insertions(+), 42 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/chat_deepagent.py b/surfsense_backend/app/agents/new_chat/chat_deepagent.py index 4b204ffa9..89aa13620 100644 --- a/surfsense_backend/app/agents/new_chat/chat_deepagent.py +++ b/surfsense_backend/app/agents/new_chat/chat_deepagent.py @@ -314,6 +314,20 @@ async def create_surfsense_deep_agent( _t0 = time.perf_counter() _enabled_tool_names = {t.name for t in tools} _user_disabled_tool_names = set(disabled_tools) if disabled_tools else set() + + # Collect generic MCP connector info so the system prompt can route queries + # to their tools instead of falling back to "not in knowledge base". + _mcp_connector_tools: dict[str, list[str]] = {} + for t in tools: + meta = getattr(t, "metadata", None) or {} + if meta.get("mcp_is_generic") and meta.get("mcp_connector_name"): + _mcp_connector_tools.setdefault( + meta["mcp_connector_name"], [], + ).append(t.name) + + if _mcp_connector_tools: + _perf_log.info("MCP connector tool routing: %s", _mcp_connector_tools) + if agent_config is not None: system_prompt = build_configurable_system_prompt( custom_system_instructions=agent_config.system_instructions, @@ -322,12 +336,14 @@ async def create_surfsense_deep_agent( thread_visibility=thread_visibility, enabled_tool_names=_enabled_tool_names, disabled_tool_names=_user_disabled_tool_names, + mcp_connector_tools=_mcp_connector_tools, ) else: system_prompt = build_surfsense_system_prompt( thread_visibility=thread_visibility, enabled_tool_names=_enabled_tool_names, disabled_tool_names=_user_disabled_tool_names, + mcp_connector_tools=_mcp_connector_tools, ) _perf_log.info( "[create_agent] System prompt built in %.3fs", time.perf_counter() - _t0 diff --git a/surfsense_backend/app/agents/new_chat/system_prompt.py b/surfsense_backend/app/agents/new_chat/system_prompt.py index 3182735d9..e77132182 100644 --- a/surfsense_backend/app/agents/new_chat/system_prompt.py +++ b/surfsense_backend/app/agents/new_chat/system_prompt.py @@ -815,11 +815,36 @@ Your goal is to provide helpful, informative answers in a clean, readable format """ +def _build_mcp_routing_block( + mcp_connector_tools: dict[str, list[str]] | None, +) -> str: + """Build an additional tool routing block for generic MCP connectors. + + When users add MCP servers (e.g. GitLab, GitHub), the LLM needs to know + those tools exist and should be called directly — not searched in the + knowledge base. + """ + if not mcp_connector_tools: + return "" + + lines = [ + "\n", + "You also have direct tools from these user-connected MCP servers.", + "Their data is NEVER in the knowledge base — call their tools directly.", + "", + ] + for server_name, tool_names in mcp_connector_tools.items(): + lines.append(f"- {server_name} → {', '.join(tool_names)}") + lines.append("\n") + return "\n".join(lines) + + def build_surfsense_system_prompt( today: datetime | None = None, thread_visibility: ChatVisibility | None = None, enabled_tool_names: set[str] | None = None, disabled_tool_names: set[str] | None = None, + mcp_connector_tools: dict[str, list[str]] | None = None, ) -> str: """ Build the SurfSense system prompt with default settings. @@ -834,6 +859,9 @@ def build_surfsense_system_prompt( thread_visibility: Optional; when provided, used for conditional prompt (e.g. private vs shared memory wording). Defaults to private behavior when None. enabled_tool_names: Set of tool names actually bound to the agent. When None all tools are included. disabled_tool_names: Set of tool names the user explicitly disabled. Included as a note so the model can inform the user. + mcp_connector_tools: Mapping of MCP server display name → list of tool names + for generic MCP connectors. Injected into the system prompt so the LLM + knows to call these tools directly. Returns: Complete system prompt string @@ -841,6 +869,7 @@ def build_surfsense_system_prompt( visibility = thread_visibility or ChatVisibility.PRIVATE system_instructions = _get_system_instructions(visibility, today) + system_instructions += _build_mcp_routing_block(mcp_connector_tools) tools_instructions = _get_tools_instructions( visibility, enabled_tool_names, disabled_tool_names ) @@ -856,6 +885,7 @@ def build_configurable_system_prompt( thread_visibility: ChatVisibility | None = None, enabled_tool_names: set[str] | None = None, disabled_tool_names: set[str] | None = None, + mcp_connector_tools: dict[str, list[str]] | None = None, ) -> str: """ Build a configurable SurfSense system prompt based on NewLLMConfig settings. @@ -877,6 +907,9 @@ def build_configurable_system_prompt( thread_visibility: Optional; when provided, used for conditional prompt (e.g. private vs shared memory wording). Defaults to private behavior when None. enabled_tool_names: Set of tool names actually bound to the agent. When None all tools are included. disabled_tool_names: Set of tool names the user explicitly disabled. Included as a note so the model can inform the user. + mcp_connector_tools: Mapping of MCP server display name → list of tool names + for generic MCP connectors. Injected into the system prompt so the LLM + knows to call these tools directly. Returns: Complete system prompt string @@ -894,6 +927,8 @@ def build_configurable_system_prompt( else: system_instructions = "" + system_instructions += _build_mcp_routing_block(mcp_connector_tools) + # Tools instructions: only include enabled tools, note disabled ones tools_instructions = _get_tools_instructions( thread_visibility, enabled_tool_names, disabled_tool_names diff --git a/surfsense_backend/app/agents/new_chat/tools/mcp_client.py b/surfsense_backend/app/agents/new_chat/tools/mcp_client.py index 44c48344c..b46ddbcc5 100644 --- a/surfsense_backend/app/agents/new_chat/tools/mcp_client.py +++ b/surfsense_backend/app/agents/new_chat/tools/mcp_client.py @@ -45,6 +45,18 @@ class MCPClient: async def connect(self, max_retries: int = MAX_RETRIES): """Connect to the MCP server and manage its lifecycle. + Retries only apply to the **connection** phase (spawning the process, + initialising the session). Once the session is yielded to the caller, + any exception raised by the caller propagates normally -- the context + manager will NOT retry after ``yield``. + + Previous implementation wrapped both connection AND yield inside the + retry loop. Because ``@asynccontextmanager`` only allows a single + ``yield``, a failure after yield caused the generator to attempt a + second yield on retry, triggering + ``RuntimeError("generator didn't stop after athrow()")`` and orphaning + the stdio subprocess. + Args: max_retries: Maximum number of connection retry attempts @@ -57,26 +69,22 @@ class MCPClient: """ last_error = None delay = RETRY_DELAY + connected = False for attempt in range(max_retries): try: - # Merge env vars with current environment server_env = os.environ.copy() server_env.update(self.env) - # Create server parameters with env server_params = StdioServerParameters( command=self.command, args=self.args, env=server_env ) - # Spawn server process and create session - # Note: Cannot combine these context managers because ClientSession - # needs the read/write streams from stdio_client async with stdio_client(server=server_params) as (read, write): # noqa: SIM117 async with ClientSession(read, write) as session: - # Initialize the connection await session.initialize() self.session = session + connected = True if attempt > 0: logger.info( @@ -91,10 +99,16 @@ class MCPClient: self.command, " ".join(self.args), ) - yield session - return # Success, exit retry loop + try: + yield session + finally: + self.session = None + return except Exception as e: + self.session = None + if connected: + raise last_error = e if attempt < max_retries - 1: logger.warning( @@ -105,7 +119,7 @@ class MCPClient: delay, ) await asyncio.sleep(delay) - delay *= RETRY_BACKOFF # Exponential backoff + delay *= RETRY_BACKOFF else: logger.error( "Failed to connect to MCP server after %d attempts: %s", @@ -113,10 +127,7 @@ class MCPClient: e, exc_info=True, ) - finally: - self.session = None - # All retries exhausted error_msg = f"Failed to connect to MCP server '{self.command}' after {max_retries} attempts" if last_error: error_msg += f": {last_error}" @@ -161,12 +172,18 @@ class MCPClient: logger.error("Failed to list tools from MCP server: %s", e, exc_info=True) raise - async def call_tool(self, tool_name: str, arguments: dict[str, Any]) -> Any: + async def call_tool( + self, + tool_name: str, + arguments: dict[str, Any], + timeout: float = 60.0, + ) -> Any: """Call a tool on the MCP server. Args: tool_name: Name of the tool to call arguments: Arguments to pass to the tool + timeout: Maximum seconds to wait for the tool to respond Returns: Tool execution result @@ -185,10 +202,11 @@ class MCPClient: "Calling MCP tool '%s' with arguments: %s", tool_name, arguments ) - # Call tools/call RPC method - response = await self.session.call_tool(tool_name, arguments=arguments) + response = await asyncio.wait_for( + self.session.call_tool(tool_name, arguments=arguments), + timeout=timeout, + ) - # Extract content from response result = [] for content in response.content: if hasattr(content, "text"): @@ -202,15 +220,17 @@ class MCPClient: logger.info("MCP tool '%s' succeeded: %s", tool_name, result_str[:200]) return result_str + except asyncio.TimeoutError: + logger.error( + "MCP tool '%s' timed out after %.0fs", tool_name, timeout + ) + return f"Error: MCP tool '{tool_name}' timed out after {timeout:.0f}s" except RuntimeError as e: - # Handle validation errors from MCP server responses - # Some MCP servers (like server-memory) return extra fields not in their schema if "Invalid structured content" in str(e): logger.warning( "MCP server returned data not matching its schema, but continuing: %s", e, ) - # Try to extract result from error message or return a success message return "Operation completed (server returned unexpected format)" raise except (ValueError, TypeError, AttributeError, KeyError) as e: diff --git a/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py b/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py index b0dcd72b6..dfee24516 100644 --- a/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py +++ b/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py @@ -28,7 +28,7 @@ if TYPE_CHECKING: from langchain_core.tools import StructuredTool from mcp import ClientSession from mcp.client.streamable_http import streamablehttp_client -from pydantic import BaseModel, Field, create_model +from pydantic import BaseModel, ConfigDict, Field, create_model from sqlalchemy import cast, select from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.ext.asyncio import AsyncSession @@ -43,6 +43,8 @@ logger = logging.getLogger(__name__) _MCP_CACHE_TTL_SECONDS = 300 # 5 minutes _MCP_CACHE_MAX_SIZE = 50 _MCP_DISCOVERY_TIMEOUT_SECONDS = 30 +_TOOL_CALL_MAX_RETRIES = 3 +_TOOL_CALL_RETRY_DELAY = 1.5 # seconds, doubles per attempt _mcp_tools_cache: dict[int, tuple[float, list[StructuredTool]]] = {} @@ -64,7 +66,18 @@ def _create_dynamic_input_model_from_schema( tool_name: str, input_schema: dict[str, Any], ) -> type[BaseModel]: - """Create a Pydantic model from MCP tool's JSON schema.""" + """Create a Pydantic model from MCP tool's JSON schema. + + Models always allow extra fields (``extra="allow"``) so that parameters + missing from a broken or incomplete JSON schema (e.g. ``zod-to-json-schema`` + producing an empty ``$schema``-only object) can still be forwarded to the + MCP server. + + When the schema declares **no** properties, a synthetic ``input_data`` + field of type ``dict`` is injected so the LLM has a visible parameter to + populate. The caller should unpack ``input_data`` before forwarding to + the MCP server (see ``_unpack_synthetic_input_data``). + """ properties = input_schema.get("properties", {}) required_fields = input_schema.get("required", []) @@ -84,8 +97,35 @@ def _create_dynamic_input_model_from_schema( Field(None, description=param_description), ) + if not properties: + field_definitions["input_data"] = ( + dict[str, Any] | None, + Field( + None, + description=( + "Arguments to pass to this tool as a JSON object. " + "Infer sensible key names from the tool name and description " + "(e.g. {\"search\": \"my query\"} for a search tool)." + ), + ), + ) + model_name = f"{tool_name.replace(' ', '').replace('-', '_')}Input" - return create_model(model_name, **field_definitions) + model = create_model(model_name, __config__=ConfigDict(extra="allow"), **field_definitions) + return model + + +def _unpack_synthetic_input_data(kwargs: dict[str, Any]) -> dict[str, Any]: + """Unpack the synthetic ``input_data`` field into top-level kwargs. + + When the MCP tool schema is empty, ``_create_dynamic_input_model_from_schema`` + adds a catch-all ``input_data: dict`` field. This helper merges that dict + back into the top-level kwargs so the MCP server receives flat arguments. + """ + input_data = kwargs.pop("input_data", None) + if isinstance(input_data, dict): + kwargs.update(input_data) + return kwargs async def _create_mcp_tool_from_definition_stdio( @@ -103,7 +143,12 @@ async def _create_mcp_tool_from_definition_stdio( ``GraphInterrupt`` propagates cleanly to LangGraph. """ tool_name = tool_def.get("name", "unnamed_tool") - tool_description = tool_def.get("description", "No description provided") + raw_description = tool_def.get("description", "No description provided") + tool_description = ( + f"[MCP server: {connector_name}] {raw_description}" + if connector_name + else raw_description + ) input_schema = tool_def.get("input_schema", {"type": "object", "properties": {}}) logger.debug("MCP tool '%s' input schema: %s", tool_name, input_schema) @@ -121,7 +166,7 @@ async def _create_mcp_tool_from_definition_stdio( params=kwargs, context={ "mcp_server": connector_name, - "tool_description": tool_description, + "tool_description": raw_description, "mcp_transport": "stdio", "mcp_connector_id": connector_id, }, @@ -129,18 +174,32 @@ async def _create_mcp_tool_from_definition_stdio( ) if hitl_result.rejected: return "Tool call rejected by user." - call_kwargs = {k: v for k, v in hitl_result.params.items() if v is not None} + call_kwargs = _unpack_synthetic_input_data( + {k: v for k, v in hitl_result.params.items() if v is not None} + ) - try: - async with mcp_client.connect(): - result = await mcp_client.call_tool(tool_name, call_kwargs) - return str(result) - except RuntimeError as e: - logger.error("MCP tool '%s' connection failed after retries: %s", tool_name, e) - return f"Error: MCP tool '{tool_name}' connection failed after retries: {e!s}" - except Exception as e: - logger.exception("MCP tool '%s' execution failed: %s", tool_name, e) - return f"Error: MCP tool '{tool_name}' execution failed: {e!s}" + last_error: Exception | None = None + for attempt in range(_TOOL_CALL_MAX_RETRIES): + try: + async with mcp_client.connect(): + result = await mcp_client.call_tool(tool_name, call_kwargs) + return str(result) + except Exception as e: + last_error = e + if attempt < _TOOL_CALL_MAX_RETRIES - 1: + delay = _TOOL_CALL_RETRY_DELAY * (2 ** attempt) + logger.warning( + "MCP tool '%s' failed (attempt %d/%d): %s. Retrying in %.1fs...", + tool_name, attempt + 1, _TOOL_CALL_MAX_RETRIES, e, delay, + ) + await asyncio.sleep(delay) + else: + logger.error( + "MCP tool '%s' failed after %d attempts: %s", + tool_name, _TOOL_CALL_MAX_RETRIES, e, exc_info=True, + ) + + return f"Error: MCP tool '{tool_name}' failed after {_TOOL_CALL_MAX_RETRIES} attempts: {last_error!s}" tool = StructuredTool( name=tool_name, @@ -150,6 +209,8 @@ async def _create_mcp_tool_from_definition_stdio( metadata={ "mcp_input_schema": input_schema, "mcp_transport": "stdio", + "mcp_connector_name": connector_name or None, + "mcp_is_generic": True, "hitl": True, "hitl_dedup_key": next(iter(input_schema.get("required", [])), None), }, @@ -169,6 +230,7 @@ async def _create_mcp_tool_from_definition_http( trusted_tools: list[str] | None = None, readonly_tools: frozenset[str] | None = None, tool_name_prefix: str | None = None, + is_generic_mcp: bool = False, ) -> StructuredTool: """Create a LangChain tool from an MCP tool definition (HTTP transport). @@ -180,7 +242,7 @@ async def _create_mcp_tool_from_definition_http( but the actual MCP ``call_tool`` still uses the original name. """ original_tool_name = tool_def.get("name", "unnamed_tool") - tool_description = tool_def.get("description", "No description provided") + raw_description = tool_def.get("description", "No description provided") input_schema = tool_def.get("input_schema", {"type": "object", "properties": {}}) is_readonly = readonly_tools is not None and original_tool_name in readonly_tools @@ -190,7 +252,11 @@ async def _create_mcp_tool_from_definition_http( else original_tool_name ) if tool_name_prefix: - tool_description = f"[Account: {connector_name}] {tool_description}" + tool_description = f"[Account: {connector_name}] {raw_description}" + elif is_generic_mcp and connector_name: + tool_description = f"[MCP server: {connector_name}] {raw_description}" + else: + tool_description = raw_description logger.debug("MCP HTTP tool '%s' input schema: %s", exposed_name, input_schema) @@ -199,6 +265,7 @@ async def _create_mcp_tool_from_definition_http( async def _do_mcp_call( call_headers: dict[str, str], call_kwargs: dict[str, Any], + timeout: float = 60.0, ) -> str: """Execute a single MCP HTTP call with the given headers.""" async with ( @@ -206,8 +273,9 @@ async def _create_mcp_tool_from_definition_http( ClientSession(read, write) as session, ): await session.initialize() - response = await session.call_tool( - original_tool_name, arguments=call_kwargs, + response = await asyncio.wait_for( + session.call_tool(original_tool_name, arguments=call_kwargs), + timeout=timeout, ) result = [] @@ -226,7 +294,9 @@ async def _create_mcp_tool_from_definition_http( logger.debug("MCP HTTP tool '%s' called", exposed_name) if is_readonly: - call_kwargs = {k: v for k, v in kwargs.items() if v is not None} + call_kwargs = _unpack_synthetic_input_data( + {k: v for k, v in kwargs.items() if v is not None} + ) else: hitl_result = request_approval( action_type="mcp_tool_call", @@ -234,7 +304,7 @@ async def _create_mcp_tool_from_definition_http( params=kwargs, context={ "mcp_server": connector_name, - "tool_description": tool_description, + "tool_description": raw_description, "mcp_transport": "http", "mcp_connector_id": connector_id, }, @@ -242,7 +312,9 @@ async def _create_mcp_tool_from_definition_http( ) if hitl_result.rejected: return "Tool call rejected by user." - call_kwargs = {k: v for k, v in hitl_result.params.items() if v is not None} + call_kwargs = _unpack_synthetic_input_data( + {k: v for k, v in hitl_result.params.items() if v is not None} + ) try: result_str = await _do_mcp_call(headers, call_kwargs) @@ -295,6 +367,8 @@ async def _create_mcp_tool_from_definition_http( "mcp_input_schema": input_schema, "mcp_transport": "http", "mcp_url": url, + "mcp_connector_name": connector_name or None, + "mcp_is_generic": is_generic_mcp, "hitl": not is_readonly, "hitl_dedup_key": next(iter(input_schema.get("required", [])), None), "mcp_original_tool_name": original_tool_name, @@ -376,6 +450,7 @@ async def _load_http_mcp_tools( allowed_tools: list[str] | None = None, readonly_tools: frozenset[str] | None = None, tool_name_prefix: str | None = None, + is_generic_mcp: bool = False, ) -> list[StructuredTool]: """Load tools from an HTTP-based MCP server. @@ -492,6 +567,7 @@ async def _load_http_mcp_tools( trusted_tools=trusted_tools, readonly_tools=readonly_tools, tool_name_prefix=tool_name_prefix, + is_generic_mcp=is_generic_mcp, ) tools.append(tool) except Exception as e: @@ -928,6 +1004,7 @@ async def load_mcp_tools( "readonly_tools": readonly_tools, "tool_name_prefix": tool_name_prefix, "transport": server_config.get("transport", "stdio"), + "is_generic_mcp": svc_cfg is None, }) except Exception as e: @@ -948,6 +1025,7 @@ async def load_mcp_tools( allowed_tools=task["allowed_tools"], readonly_tools=task["readonly_tools"], tool_name_prefix=task["tool_name_prefix"], + is_generic_mcp=task.get("is_generic_mcp", False), ), timeout=_MCP_DISCOVERY_TIMEOUT_SECONDS, ) From 749116e830122dcd4f6e3dbaf1a1e29d8ea6b726 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 15:02:58 +0530 Subject: [PATCH 078/113] feat(new-chat): add filesystem backend interfaces and selection helpers --- .../agents/new_chat/filesystem_backends.py | 38 +++++++++++++++++++ .../agents/new_chat/filesystem_selection.py | 33 ++++++++++++++++ .../agents/new_chat/middleware/__init__.py | 4 ++ 3 files changed, 75 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/filesystem_backends.py create mode 100644 surfsense_backend/app/agents/new_chat/filesystem_selection.py diff --git a/surfsense_backend/app/agents/new_chat/filesystem_backends.py b/surfsense_backend/app/agents/new_chat/filesystem_backends.py new file mode 100644 index 000000000..8af7e8558 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/filesystem_backends.py @@ -0,0 +1,38 @@ +"""Filesystem backend resolver for cloud and desktop-local modes.""" + +from __future__ import annotations + +from collections.abc import Callable +from functools import lru_cache + +from deepagents.backends.state import StateBackend +from langgraph.prebuilt.tool_node import ToolRuntime + +from app.agents.new_chat.filesystem_selection import FilesystemMode, FilesystemSelection +from app.agents.new_chat.middleware.local_folder_backend import LocalFolderBackend + + +@lru_cache(maxsize=64) +def _cached_local_backend(root_path: str) -> LocalFolderBackend: + return LocalFolderBackend(root_path) + + +def build_backend_resolver( + selection: FilesystemSelection, +) -> Callable[[ToolRuntime], StateBackend | LocalFolderBackend]: + """Create deepagents backend resolver for the selected filesystem mode.""" + + if ( + selection.mode == FilesystemMode.DESKTOP_LOCAL_FOLDER + and selection.local_root_path is not None + ): + + def _resolve_local(_runtime: ToolRuntime) -> LocalFolderBackend: + return _cached_local_backend(selection.local_root_path or "") + + return _resolve_local + + def _resolve_cloud(runtime: ToolRuntime) -> StateBackend: + return StateBackend(runtime) + + return _resolve_cloud diff --git a/surfsense_backend/app/agents/new_chat/filesystem_selection.py b/surfsense_backend/app/agents/new_chat/filesystem_selection.py new file mode 100644 index 000000000..3094a0b29 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/filesystem_selection.py @@ -0,0 +1,33 @@ +"""Filesystem mode contracts and selection helpers for chat sessions.""" + +from __future__ import annotations + +from dataclasses import dataclass +from enum import StrEnum + + +class FilesystemMode(StrEnum): + """Supported filesystem backends for agent tool execution.""" + + CLOUD = "cloud" + DESKTOP_LOCAL_FOLDER = "desktop_local_folder" + + +class ClientPlatform(StrEnum): + """Client runtime reported by the caller.""" + + WEB = "web" + DESKTOP = "desktop" + + +@dataclass(slots=True) +class FilesystemSelection: + """Resolved filesystem selection for a single chat request.""" + + mode: FilesystemMode = FilesystemMode.CLOUD + client_platform: ClientPlatform = ClientPlatform.WEB + local_root_path: str | None = None + + @property + def is_local_mode(self) -> bool: + return self.mode == FilesystemMode.DESKTOP_LOCAL_FOLDER diff --git a/surfsense_backend/app/agents/new_chat/middleware/__init__.py b/surfsense_backend/app/agents/new_chat/middleware/__init__.py index 1f6b12852..5a24b2f9e 100644 --- a/surfsense_backend/app/agents/new_chat/middleware/__init__.py +++ b/surfsense_backend/app/agents/new_chat/middleware/__init__.py @@ -6,6 +6,9 @@ from app.agents.new_chat.middleware.dedup_tool_calls import ( from app.agents.new_chat.middleware.filesystem import ( SurfSenseFilesystemMiddleware, ) +from app.agents.new_chat.middleware.file_intent import ( + FileIntentMiddleware, +) from app.agents.new_chat.middleware.knowledge_search import ( KnowledgeBaseSearchMiddleware, ) @@ -15,6 +18,7 @@ from app.agents.new_chat.middleware.memory_injection import ( __all__ = [ "DedupHITLToolCallsMiddleware", + "FileIntentMiddleware", "KnowledgeBaseSearchMiddleware", "MemoryInjectionMiddleware", "SurfSenseFilesystemMiddleware", From 15a9e8b085f36ceb2065b32ed8f3f0238878617f Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 15:03:32 +0530 Subject: [PATCH 079/113] feat(middleware): detect file intent in chat messages --- .../agents/new_chat/middleware/file_intent.py | 253 ++++++++++++++++++ .../middleware/test_file_intent_middleware.py | 116 ++++++++ 2 files changed, 369 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/middleware/file_intent.py create mode 100644 surfsense_backend/tests/unit/middleware/test_file_intent_middleware.py diff --git a/surfsense_backend/app/agents/new_chat/middleware/file_intent.py b/surfsense_backend/app/agents/new_chat/middleware/file_intent.py new file mode 100644 index 000000000..e264a939c --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/middleware/file_intent.py @@ -0,0 +1,253 @@ +"""Semantic file-intent routing middleware for new chat turns. + +This middleware classifies the latest human turn into a small intent set: +- chat_only +- file_write +- file_read + +For ``file_write`` turns it injects a strict system contract so the model +uses filesystem tools before claiming success, and provides a deterministic +fallback path when no filename is specified by the user. +""" + +from __future__ import annotations + +import json +import logging +import re +from datetime import UTC, datetime +from enum import StrEnum +from typing import Any + +from langchain.agents.middleware import AgentMiddleware, AgentState +from langchain_core.language_models import BaseChatModel +from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage +from langgraph.runtime import Runtime +from pydantic import BaseModel, Field, ValidationError + +logger = logging.getLogger(__name__) + + +class FileOperationIntent(StrEnum): + CHAT_ONLY = "chat_only" + FILE_WRITE = "file_write" + FILE_READ = "file_read" + + +class FileIntentPlan(BaseModel): + intent: FileOperationIntent = Field( + description="Primary user intent for this turn." + ) + confidence: float = Field( + ge=0.0, + le=1.0, + default=0.5, + description="Model confidence in the selected intent.", + ) + suggested_filename: str | None = Field( + default=None, + description="Optional filename (e.g. notes.md) inferred from user request.", + ) + + +def _extract_text_from_message(message: BaseMessage) -> str: + content = getattr(message, "content", "") + if isinstance(content, str): + return content + if isinstance(content, list): + parts: list[str] = [] + for item in content: + if isinstance(item, str): + parts.append(item) + elif isinstance(item, dict) and item.get("type") == "text": + parts.append(str(item.get("text", ""))) + return "\n".join(part for part in parts if part) + return str(content) + + +def _extract_json_payload(text: str) -> str: + stripped = text.strip() + fenced = re.search(r"```(?:json)?\s*(\{.*?\})\s*```", stripped, re.DOTALL) + if fenced: + return fenced.group(1) + start = stripped.find("{") + end = stripped.rfind("}") + if start != -1 and end != -1 and end > start: + return stripped[start : end + 1] + return stripped + + +def _sanitize_filename(value: str) -> str: + name = re.sub(r"[\\/:*?\"<>|]+", "_", value).strip() + name = re.sub(r"\s+", "-", name) + name = name.strip("._-") + if not name: + name = "note" + if len(name) > 80: + name = name[:80].rstrip("-_.") + return name + + +def _infer_text_file_extension(user_text: str) -> str: + lowered = user_text.lower() + if any(token in lowered for token in ("json", ".json")): + return ".json" + if any(token in lowered for token in ("yaml", "yml", ".yaml", ".yml")): + return ".yaml" + if any(token in lowered for token in ("csv", ".csv")): + return ".csv" + if any(token in lowered for token in ("python", ".py")): + return ".py" + if any(token in lowered for token in ("typescript", ".ts", ".tsx")): + return ".ts" + if any(token in lowered for token in ("javascript", ".js", ".mjs", ".cjs")): + return ".js" + if any(token in lowered for token in ("html", ".html")): + return ".html" + if any(token in lowered for token in ("css", ".css")): + return ".css" + if any(token in lowered for token in ("sql", ".sql")): + return ".sql" + if any(token in lowered for token in ("toml", ".toml")): + return ".toml" + if any(token in lowered for token in ("ini", ".ini")): + return ".ini" + if any(token in lowered for token in ("xml", ".xml")): + return ".xml" + if any(token in lowered for token in ("markdown", ".md", "readme")): + return ".md" + return ".md" + + +def _fallback_path(suggested_filename: str | None, *, user_text: str) -> str: + default_extension = _infer_text_file_extension(user_text) + if suggested_filename: + sanitized = _sanitize_filename(suggested_filename) + if sanitized.lower().endswith(".txt"): + sanitized = f"{sanitized[:-4]}.md" + if "." not in sanitized: + sanitized = f"{sanitized}{default_extension}" + return f"/{sanitized}" + return f"/notes{default_extension}" + + +def _build_classifier_prompt(*, recent_conversation: str, user_text: str) -> str: + return ( + "Classify the latest user request into a filesystem intent for an AI agent.\n" + "Return JSON only with this exact schema:\n" + '{"intent":"chat_only|file_write|file_read","confidence":0.0,"suggested_filename":"string or null"}\n\n' + "Rules:\n" + "- Use semantic intent, not literal keywords.\n" + "- file_write: user asks to create/save/write/update/edit content as a file.\n" + "- file_read: user asks to open/read/list/search existing files.\n" + "- chat_only: conversational/analysis responses without required file operations.\n" + "- For file_write, choose a concise semantic suggested_filename and match the requested format.\n" + "- Use extensions that match user intent (e.g. .md, .json, .yaml, .csv, .py, .ts, .js, .html, .css, .sql).\n" + "- Do not use .txt; prefer .md for generic text notes.\n" + "- Do not include dates or timestamps in suggested_filename unless explicitly requested.\n" + "- Never include markdown or explanation.\n\n" + f"Recent conversation:\n{recent_conversation or '(none)'}\n\n" + f"Latest user message:\n{user_text}" + ) + + +def _build_recent_conversation(messages: list[BaseMessage], *, max_messages: int = 6) -> str: + rows: list[str] = [] + for msg in messages[-max_messages:]: + role = "user" if isinstance(msg, HumanMessage) else "assistant" + text = re.sub(r"\s+", " ", _extract_text_from_message(msg)).strip() + if text: + rows.append(f"{role}: {text[:280]}") + return "\n".join(rows) + + +class FileIntentMiddleware(AgentMiddleware): # type: ignore[type-arg] + """Classify file intent and inject a strict file-write contract.""" + + tools = () + + def __init__(self, *, llm: BaseChatModel | None = None) -> None: + self.llm = llm + + async def _classify_intent( + self, *, messages: list[BaseMessage], user_text: str + ) -> FileIntentPlan: + if self.llm is None: + return FileIntentPlan(intent=FileOperationIntent.CHAT_ONLY, confidence=0.0) + + prompt = _build_classifier_prompt( + recent_conversation=_build_recent_conversation(messages), + user_text=user_text, + ) + try: + response = await self.llm.ainvoke( + [HumanMessage(content=prompt)], + config={"tags": ["surfsense:internal"]}, + ) + payload = json.loads(_extract_json_payload(_extract_text_from_message(response))) + plan = FileIntentPlan.model_validate(payload) + return plan + except (json.JSONDecodeError, ValidationError, ValueError) as exc: + logger.warning("File intent classifier returned invalid output: %s", exc) + except Exception as exc: # pragma: no cover - defensive fallback + logger.warning("File intent classifier failed: %s", exc) + + return FileIntentPlan(intent=FileOperationIntent.CHAT_ONLY, confidence=0.0) + + async def abefore_agent( # type: ignore[override] + self, + state: AgentState, + runtime: Runtime[Any], + ) -> dict[str, Any] | None: + del runtime + messages = state.get("messages") or [] + if not messages: + return None + + last_human: HumanMessage | None = None + for msg in reversed(messages): + if isinstance(msg, HumanMessage): + last_human = msg + break + if last_human is None: + return None + + user_text = _extract_text_from_message(last_human).strip() + if not user_text: + return None + + plan = await self._classify_intent(messages=messages, user_text=user_text) + suggested_path = _fallback_path(plan.suggested_filename, user_text=user_text) + contract = { + "intent": plan.intent.value, + "confidence": plan.confidence, + "suggested_path": suggested_path, + "timestamp": datetime.now(UTC).isoformat(), + "turn_id": state.get("turn_id", ""), + } + + if plan.intent != FileOperationIntent.FILE_WRITE: + return {"file_operation_contract": contract} + + contract_msg = SystemMessage( + content=( + "\n" + "This turn intent is file_write.\n" + f"Suggested default path: {suggested_path}\n" + "Rules:\n" + "- You MUST call write_file or edit_file before claiming success.\n" + "- If no path is provided by the user, use the suggested default path.\n" + "- Do not claim a file was created/updated unless tool output confirms it.\n" + "- If the write/edit fails, clearly report failure instead of success.\n" + "- Do not include timestamps or dates in generated file content unless the user explicitly asks for them.\n" + "- For open-ended requests (e.g., random note), generate useful concrete content, not placeholders.\n" + "" + ) + ) + + # Insert just before the latest human turn so it applies to this request. + new_messages = list(messages) + insert_at = max(len(new_messages) - 1, 0) + new_messages.insert(insert_at, contract_msg) + return {"messages": new_messages, "file_operation_contract": contract} + diff --git a/surfsense_backend/tests/unit/middleware/test_file_intent_middleware.py b/surfsense_backend/tests/unit/middleware/test_file_intent_middleware.py new file mode 100644 index 000000000..68876dfeb --- /dev/null +++ b/surfsense_backend/tests/unit/middleware/test_file_intent_middleware.py @@ -0,0 +1,116 @@ +import pytest +from langchain_core.messages import AIMessage, HumanMessage + +from app.agents.new_chat.middleware.file_intent import ( + FileIntentMiddleware, + FileOperationIntent, +) + +pytestmark = pytest.mark.unit + + +class _FakeLLM: + def __init__(self, response_text: str): + self._response_text = response_text + + async def ainvoke(self, *_args, **_kwargs): + return AIMessage(content=self._response_text) + + +@pytest.mark.asyncio +async def test_file_write_intent_injects_contract_message(): + llm = _FakeLLM( + '{"intent":"file_write","confidence":0.93,"suggested_filename":"ideas.md"}' + ) + middleware = FileIntentMiddleware(llm=llm) + state = { + "messages": [HumanMessage(content="Create another random note for me")], + "turn_id": "123:456", + } + + result = await middleware.abefore_agent(state, runtime=None) # type: ignore[arg-type] + + assert result is not None + contract = result["file_operation_contract"] + assert contract["intent"] == FileOperationIntent.FILE_WRITE.value + assert contract["suggested_path"] == "/ideas.md" + assert contract["turn_id"] == "123:456" + assert any( + "file_operation_contract" in str(msg.content) + for msg in result["messages"] + if hasattr(msg, "content") + ) + + +@pytest.mark.asyncio +async def test_non_write_intent_does_not_inject_contract_message(): + llm = _FakeLLM( + '{"intent":"file_read","confidence":0.88,"suggested_filename":null}' + ) + middleware = FileIntentMiddleware(llm=llm) + original_messages = [HumanMessage(content="Read /notes.md")] + state = {"messages": original_messages, "turn_id": "abc:def"} + + result = await middleware.abefore_agent(state, runtime=None) # type: ignore[arg-type] + + assert result is not None + assert result["file_operation_contract"]["intent"] == FileOperationIntent.FILE_READ.value + assert "messages" not in result + + +@pytest.mark.asyncio +async def test_file_write_null_filename_uses_semantic_default_path(): + llm = _FakeLLM( + '{"intent":"file_write","confidence":0.74,"suggested_filename":null}' + ) + middleware = FileIntentMiddleware(llm=llm) + state = { + "messages": [HumanMessage(content="create a random markdown file")], + "turn_id": "turn:1", + } + + result = await middleware.abefore_agent(state, runtime=None) # type: ignore[arg-type] + + assert result is not None + contract = result["file_operation_contract"] + assert contract["intent"] == FileOperationIntent.FILE_WRITE.value + assert contract["suggested_path"] == "/notes.md" + + +@pytest.mark.asyncio +async def test_file_write_null_filename_infers_json_extension(): + llm = _FakeLLM( + '{"intent":"file_write","confidence":0.71,"suggested_filename":null}' + ) + middleware = FileIntentMiddleware(llm=llm) + state = { + "messages": [HumanMessage(content="create a sample json config file")], + "turn_id": "turn:2", + } + + result = await middleware.abefore_agent(state, runtime=None) # type: ignore[arg-type] + + assert result is not None + contract = result["file_operation_contract"] + assert contract["intent"] == FileOperationIntent.FILE_WRITE.value + assert contract["suggested_path"] == "/notes.json" + + +@pytest.mark.asyncio +async def test_file_write_txt_suggestion_is_normalized_to_markdown(): + llm = _FakeLLM( + '{"intent":"file_write","confidence":0.82,"suggested_filename":"random.txt"}' + ) + middleware = FileIntentMiddleware(llm=llm) + state = { + "messages": [HumanMessage(content="create a random file")], + "turn_id": "turn:3", + } + + result = await middleware.abefore_agent(state, runtime=None) # type: ignore[arg-type] + + assert result is not None + contract = result["file_operation_contract"] + assert contract["intent"] == FileOperationIntent.FILE_WRITE.value + assert contract["suggested_path"] == "/random.md" + From 739345671b06f5b13566e2a726973ecba0fe3f67 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Thu, 23 Apr 2026 11:40:21 +0200 Subject: [PATCH 080/113] fix: break circular import in llm_service and kb_sync_service files --- .../app/services/confluence/kb_sync_service.py | 5 ++++- .../app/services/dropbox/kb_sync_service.py | 3 ++- .../app/services/gmail/kb_sync_service.py | 3 ++- .../app/services/google_calendar/kb_sync_service.py | 5 ++++- .../app/services/google_drive/kb_sync_service.py | 3 ++- .../app/services/jira/kb_sync_service.py | 5 ++++- .../app/services/linear/kb_sync_service.py | 5 ++++- surfsense_backend/app/services/llm_service.py | 11 ++++++++++- .../app/services/notion/kb_sync_service.py | 5 ++++- .../app/services/onedrive/kb_sync_service.py | 3 ++- 10 files changed, 38 insertions(+), 10 deletions(-) diff --git a/surfsense_backend/app/services/confluence/kb_sync_service.py b/surfsense_backend/app/services/confluence/kb_sync_service.py index f786a9920..cae2bef88 100644 --- a/surfsense_backend/app/services/confluence/kb_sync_service.py +++ b/surfsense_backend/app/services/confluence/kb_sync_service.py @@ -5,7 +5,6 @@ from sqlalchemy.ext.asyncio import AsyncSession from app.connectors.confluence_history import ConfluenceHistoryConnector from app.db import Document, DocumentType -from app.services.llm_service import get_user_long_context_llm from app.utils.document_converters import ( create_document_chunks, embed_text, @@ -66,6 +65,8 @@ class ConfluenceKBSyncService: if dup: content_hash = unique_hash + from app.services.llm_service import get_user_long_context_llm + user_llm = await get_user_long_context_llm( self.db_session, user_id, @@ -184,6 +185,8 @@ class ConfluenceKBSyncService: space_id = (document.document_metadata or {}).get("space_id", "") + from app.services.llm_service import get_user_long_context_llm + user_llm = await get_user_long_context_llm( self.db_session, user_id, search_space_id, disable_streaming=True ) diff --git a/surfsense_backend/app/services/dropbox/kb_sync_service.py b/surfsense_backend/app/services/dropbox/kb_sync_service.py index 2a74bdf4b..9d1951013 100644 --- a/surfsense_backend/app/services/dropbox/kb_sync_service.py +++ b/surfsense_backend/app/services/dropbox/kb_sync_service.py @@ -5,7 +5,6 @@ from sqlalchemy.ext.asyncio import AsyncSession from app.db import Document, DocumentType from app.indexing_pipeline.document_hashing import compute_identifier_hash -from app.services.llm_service import get_user_long_context_llm from app.utils.document_converters import ( create_document_chunks, embed_text, @@ -73,6 +72,8 @@ class DropboxKBSyncService: ) content_hash = unique_hash + from app.services.llm_service import get_user_long_context_llm + user_llm = await get_user_long_context_llm( self.db_session, user_id, diff --git a/surfsense_backend/app/services/gmail/kb_sync_service.py b/surfsense_backend/app/services/gmail/kb_sync_service.py index b3b50d305..885ee4b94 100644 --- a/surfsense_backend/app/services/gmail/kb_sync_service.py +++ b/surfsense_backend/app/services/gmail/kb_sync_service.py @@ -4,7 +4,6 @@ from datetime import datetime from sqlalchemy.ext.asyncio import AsyncSession from app.db import Document, DocumentType -from app.services.llm_service import get_user_long_context_llm from app.utils.document_converters import ( create_document_chunks, embed_text, @@ -78,6 +77,8 @@ class GmailKBSyncService: ) content_hash = unique_hash + from app.services.llm_service import get_user_long_context_llm + user_llm = await get_user_long_context_llm( self.db_session, user_id, diff --git a/surfsense_backend/app/services/google_calendar/kb_sync_service.py b/surfsense_backend/app/services/google_calendar/kb_sync_service.py index 3cda02b9b..20426f3bc 100644 --- a/surfsense_backend/app/services/google_calendar/kb_sync_service.py +++ b/surfsense_backend/app/services/google_calendar/kb_sync_service.py @@ -14,7 +14,6 @@ from app.db import ( SearchSourceConnector, SearchSourceConnectorType, ) -from app.services.llm_service import get_user_long_context_llm from app.utils.document_converters import ( create_document_chunks, embed_text, @@ -91,6 +90,8 @@ class GoogleCalendarKBSyncService: ) content_hash = unique_hash + from app.services.llm_service import get_user_long_context_llm + user_llm = await get_user_long_context_llm( self.db_session, user_id, @@ -249,6 +250,8 @@ class GoogleCalendarKBSyncService: if not indexable_content: return {"status": "error", "message": "Event produced empty content"} + from app.services.llm_service import get_user_long_context_llm + user_llm = await get_user_long_context_llm( self.db_session, user_id, search_space_id, disable_streaming=True ) diff --git a/surfsense_backend/app/services/google_drive/kb_sync_service.py b/surfsense_backend/app/services/google_drive/kb_sync_service.py index 92a39f7b9..0a8eb47a6 100644 --- a/surfsense_backend/app/services/google_drive/kb_sync_service.py +++ b/surfsense_backend/app/services/google_drive/kb_sync_service.py @@ -4,7 +4,6 @@ from datetime import datetime from sqlalchemy.ext.asyncio import AsyncSession from app.db import Document, DocumentType -from app.services.llm_service import get_user_long_context_llm from app.utils.document_converters import ( create_document_chunks, embed_text, @@ -75,6 +74,8 @@ class GoogleDriveKBSyncService: ) content_hash = unique_hash + from app.services.llm_service import get_user_long_context_llm + user_llm = await get_user_long_context_llm( self.db_session, user_id, diff --git a/surfsense_backend/app/services/jira/kb_sync_service.py b/surfsense_backend/app/services/jira/kb_sync_service.py index 4d2a66e52..8e88bee81 100644 --- a/surfsense_backend/app/services/jira/kb_sync_service.py +++ b/surfsense_backend/app/services/jira/kb_sync_service.py @@ -6,7 +6,6 @@ from sqlalchemy.ext.asyncio import AsyncSession from app.connectors.jira_history import JiraHistoryConnector from app.db import Document, DocumentType -from app.services.llm_service import get_user_long_context_llm from app.utils.document_converters import ( create_document_chunks, embed_text, @@ -75,6 +74,8 @@ class JiraKBSyncService: if dup: content_hash = unique_hash + from app.services.llm_service import get_user_long_context_llm + user_llm = await get_user_long_context_llm( self.db_session, user_id, @@ -190,6 +191,8 @@ class JiraKBSyncService: state = formatted.get("status", "Unknown") comment_count = len(formatted.get("comments", [])) + from app.services.llm_service import get_user_long_context_llm + user_llm = await get_user_long_context_llm( self.db_session, user_id, search_space_id, disable_streaming=True ) diff --git a/surfsense_backend/app/services/linear/kb_sync_service.py b/surfsense_backend/app/services/linear/kb_sync_service.py index dab42af55..471227602 100644 --- a/surfsense_backend/app/services/linear/kb_sync_service.py +++ b/surfsense_backend/app/services/linear/kb_sync_service.py @@ -5,7 +5,6 @@ from sqlalchemy.ext.asyncio import AsyncSession from app.connectors.linear_connector import LinearConnector from app.db import Document, DocumentType -from app.services.llm_service import get_user_long_context_llm from app.utils.document_converters import ( create_document_chunks, embed_text, @@ -85,6 +84,8 @@ class LinearKBSyncService: ) content_hash = unique_hash + from app.services.llm_service import get_user_long_context_llm + user_llm = await get_user_long_context_llm( self.db_session, user_id, @@ -226,6 +227,8 @@ class LinearKBSyncService: comment_count = len(formatted_issue.get("comments", [])) formatted_issue.get("description", "") + from app.services.llm_service import get_user_long_context_llm + user_llm = await get_user_long_context_llm( self.db_session, user_id, search_space_id, disable_streaming=True ) diff --git a/surfsense_backend/app/services/llm_service.py b/surfsense_backend/app/services/llm_service.py index 79a72dd25..942a9b7af 100644 --- a/surfsense_backend/app/services/llm_service.py +++ b/surfsense_backend/app/services/llm_service.py @@ -7,7 +7,6 @@ from langchain_litellm import ChatLiteLLM from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select -from app.agents.new_chat.llm_config import SanitizedChatLiteLLM from app.config import config from app.db import NewLLMConfig, SearchSpace from app.services.llm_router_service import ( @@ -204,6 +203,8 @@ async def validate_llm_config( if litellm_params: litellm_kwargs.update(litellm_params) + from app.agents.new_chat.llm_config import SanitizedChatLiteLLM + llm = SanitizedChatLiteLLM(**litellm_kwargs) # Run the test call in a worker thread with a hard timeout. Some @@ -377,6 +378,8 @@ async def get_search_space_llm_instance( if disable_streaming: litellm_kwargs["disable_streaming"] = True + from app.agents.new_chat.llm_config import SanitizedChatLiteLLM + return SanitizedChatLiteLLM(**litellm_kwargs) # Get the LLM configuration from database (NewLLMConfig) @@ -454,6 +457,8 @@ async def get_search_space_llm_instance( if disable_streaming: litellm_kwargs["disable_streaming"] = True + from app.agents.new_chat.llm_config import SanitizedChatLiteLLM + return SanitizedChatLiteLLM(**litellm_kwargs) except Exception as e: @@ -555,6 +560,8 @@ async def get_vision_llm( if global_cfg.get("litellm_params"): litellm_kwargs.update(global_cfg["litellm_params"]) + from app.agents.new_chat.llm_config import SanitizedChatLiteLLM + return SanitizedChatLiteLLM(**litellm_kwargs) result = await session.execute( @@ -588,6 +595,8 @@ async def get_vision_llm( if vision_cfg.litellm_params: litellm_kwargs.update(vision_cfg.litellm_params) + from app.agents.new_chat.llm_config import SanitizedChatLiteLLM + return SanitizedChatLiteLLM(**litellm_kwargs) except Exception as e: diff --git a/surfsense_backend/app/services/notion/kb_sync_service.py b/surfsense_backend/app/services/notion/kb_sync_service.py index be177c7ca..b10d1b157 100644 --- a/surfsense_backend/app/services/notion/kb_sync_service.py +++ b/surfsense_backend/app/services/notion/kb_sync_service.py @@ -4,7 +4,6 @@ from datetime import datetime from sqlalchemy.ext.asyncio import AsyncSession from app.db import Document, DocumentType -from app.services.llm_service import get_user_long_context_llm from app.utils.document_converters import ( create_document_chunks, embed_text, @@ -74,6 +73,8 @@ class NotionKBSyncService: ) content_hash = unique_hash + from app.services.llm_service import get_user_long_context_llm + user_llm = await get_user_long_context_llm( self.db_session, user_id, @@ -244,6 +245,8 @@ class NotionKBSyncService: f"Final content length: {len(full_content)} chars, verified={content_verified}" ) + from app.services.llm_service import get_user_long_context_llm + logger.debug("Generating summary and embeddings") user_llm = await get_user_long_context_llm( self.db_session, diff --git a/surfsense_backend/app/services/onedrive/kb_sync_service.py b/surfsense_backend/app/services/onedrive/kb_sync_service.py index 962c19fc9..e9b2e38ea 100644 --- a/surfsense_backend/app/services/onedrive/kb_sync_service.py +++ b/surfsense_backend/app/services/onedrive/kb_sync_service.py @@ -5,7 +5,6 @@ from sqlalchemy.ext.asyncio import AsyncSession from app.db import Document, DocumentType from app.indexing_pipeline.document_hashing import compute_identifier_hash -from app.services.llm_service import get_user_long_context_llm from app.utils.document_converters import ( create_document_chunks, embed_text, @@ -73,6 +72,8 @@ class OneDriveKBSyncService: ) content_hash = unique_hash + from app.services.llm_service import get_user_long_context_llm + user_llm = await get_user_long_context_llm( self.db_session, user_id, From 42d2d2222ecbe674ebfcb75a64cc97ef96fc5e9b Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 15:44:12 +0530 Subject: [PATCH 081/113] feat(filesystem): add local folder backend and verification coverage --- .../middleware/local_folder_backend.py | 316 ++++++++++++++++++ .../middleware/test_filesystem_backends.py | 37 ++ .../test_filesystem_verification.py | 64 ++++ .../middleware/test_local_folder_backend.py | 59 ++++ 4 files changed, 476 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/middleware/local_folder_backend.py create mode 100644 surfsense_backend/tests/unit/middleware/test_filesystem_backends.py create mode 100644 surfsense_backend/tests/unit/middleware/test_filesystem_verification.py create mode 100644 surfsense_backend/tests/unit/middleware/test_local_folder_backend.py diff --git a/surfsense_backend/app/agents/new_chat/middleware/local_folder_backend.py b/surfsense_backend/app/agents/new_chat/middleware/local_folder_backend.py new file mode 100644 index 000000000..60d967053 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/middleware/local_folder_backend.py @@ -0,0 +1,316 @@ +"""Desktop local-folder filesystem backend for deepagents tools.""" + +from __future__ import annotations + +import asyncio +import fnmatch +import os +import threading +from pathlib import Path + +from deepagents.backends.protocol import ( + EditResult, + FileDownloadResponse, + FileInfo, + FileUploadResponse, + GrepMatch, + WriteResult, +) +from deepagents.backends.utils import ( + create_file_data, + format_read_response, + perform_string_replacement, +) + +_INVALID_PATH = "invalid_path" +_FILE_NOT_FOUND = "file_not_found" +_IS_DIRECTORY = "is_directory" + + +class LocalFolderBackend: + """Filesystem backend rooted to a single local folder.""" + + def __init__(self, root_path: str) -> None: + root = Path(root_path).expanduser().resolve() + if not root.exists() or not root.is_dir(): + msg = f"Local filesystem root does not exist or is not a directory: {root_path}" + raise ValueError(msg) + self._root = root + self._locks: dict[str, threading.Lock] = {} + self._locks_mu = threading.Lock() + + def _lock_for(self, path: str) -> threading.Lock: + with self._locks_mu: + if path not in self._locks: + self._locks[path] = threading.Lock() + return self._locks[path] + + def _resolve_virtual(self, virtual_path: str, *, allow_root: bool = False) -> Path: + if not virtual_path.startswith("/"): + msg = f"Invalid path (must be absolute): {virtual_path}" + raise ValueError(msg) + rel = virtual_path.lstrip("/") + candidate = self._root if rel == "" else (self._root / rel) + resolved = candidate.resolve() + if not allow_root and resolved == self._root: + msg = "Path must refer to a file or child directory under root" + raise ValueError(msg) + if not resolved.is_relative_to(self._root): + msg = f"Path escapes local filesystem root: {virtual_path}" + raise ValueError(msg) + return resolved + + @staticmethod + def _to_virtual(path: Path, root: Path) -> str: + rel = path.relative_to(root).as_posix() + return "/" if rel == "." else f"/{rel}" + + def _write_text_atomic(self, path: Path, content: str) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + temp_path = path.with_suffix(f"{path.suffix}.tmp") + temp_path.write_text(content, encoding="utf-8") + os.replace(temp_path, path) + + def ls_info(self, path: str) -> list[FileInfo]: + try: + target = self._resolve_virtual(path, allow_root=True) + except ValueError: + return [] + if not target.exists() or not target.is_dir(): + return [] + infos: list[FileInfo] = [] + for child in sorted(target.iterdir(), key=lambda p: (not p.is_dir(), p.name.lower())): + infos.append( + FileInfo( + path=self._to_virtual(child, self._root), + is_dir=child.is_dir(), + size=child.stat().st_size if child.is_file() else 0, + modified_at=str(child.stat().st_mtime), + ) + ) + return infos + + async def als_info(self, path: str) -> list[FileInfo]: + return await asyncio.to_thread(self.ls_info, path) + + def read(self, file_path: str, offset: int = 0, limit: int = 2000) -> str: + try: + path = self._resolve_virtual(file_path) + except ValueError: + return f"Error: Invalid path '{file_path}'" + if not path.exists(): + return f"Error: File '{file_path}' not found" + if not path.is_file(): + return f"Error: Path '{file_path}' is not a file" + content = path.read_text(encoding="utf-8", errors="replace") + file_data = create_file_data(content) + return format_read_response(file_data, offset, limit) + + async def aread(self, file_path: str, offset: int = 0, limit: int = 2000) -> str: + return await asyncio.to_thread(self.read, file_path, offset, limit) + + def read_raw(self, file_path: str) -> str: + """Read raw file text without line-number formatting.""" + try: + path = self._resolve_virtual(file_path) + except ValueError: + return f"Error: Invalid path '{file_path}'" + if not path.exists(): + return f"Error: File '{file_path}' not found" + if not path.is_file(): + return f"Error: Path '{file_path}' is not a file" + return path.read_text(encoding="utf-8", errors="replace") + + async def aread_raw(self, file_path: str) -> str: + """Async variant of read_raw.""" + return await asyncio.to_thread(self.read_raw, file_path) + + def write(self, file_path: str, content: str) -> WriteResult: + try: + path = self._resolve_virtual(file_path) + except ValueError: + return WriteResult(error=f"Error: Invalid path '{file_path}'") + lock = self._lock_for(file_path) + with lock: + if path.exists(): + return WriteResult( + error=( + f"Cannot write to {file_path} because it already exists. " + "Read and then make an edit, or write to a new path." + ) + ) + self._write_text_atomic(path, content) + return WriteResult(path=file_path, files_update=None) + + async def awrite(self, file_path: str, content: str) -> WriteResult: + return await asyncio.to_thread(self.write, file_path, content) + + def edit( + self, + file_path: str, + old_string: str, + new_string: str, + replace_all: bool = False, + ) -> EditResult: + try: + path = self._resolve_virtual(file_path) + except ValueError: + return EditResult(error=f"Error: Invalid path '{file_path}'") + lock = self._lock_for(file_path) + with lock: + if not path.exists() or not path.is_file(): + return EditResult(error=f"Error: File '{file_path}' not found") + content = path.read_text(encoding="utf-8", errors="replace") + result = perform_string_replacement(content, old_string, new_string, replace_all) + if isinstance(result, str): + return EditResult(error=result) + updated_content, occurrences = result + self._write_text_atomic(path, updated_content) + return EditResult(path=file_path, files_update=None, occurrences=int(occurrences)) + + async def aedit( + self, + file_path: str, + old_string: str, + new_string: str, + replace_all: bool = False, + ) -> EditResult: + return await asyncio.to_thread( + self.edit, file_path, old_string, new_string, replace_all + ) + + def glob_info(self, pattern: str, path: str = "/") -> list[FileInfo]: + try: + base = self._resolve_virtual(path, allow_root=True) + except ValueError: + return [] + + if pattern.startswith("/"): + search_base = self._root + normalized_pattern = pattern.lstrip("/") + else: + search_base = base + normalized_pattern = pattern + + matches: list[FileInfo] = [] + for hit in search_base.glob(normalized_pattern): + try: + resolved = hit.resolve() + if not resolved.is_relative_to(self._root): + continue + except Exception: + continue + matches.append( + FileInfo( + path=self._to_virtual(resolved, self._root), + is_dir=resolved.is_dir(), + size=resolved.stat().st_size if resolved.is_file() else 0, + modified_at=str(resolved.stat().st_mtime), + ) + ) + return matches + + async def aglob_info(self, pattern: str, path: str = "/") -> list[FileInfo]: + return await asyncio.to_thread(self.glob_info, pattern, path) + + def _iter_candidate_files(self, path: str | None, glob: str | None) -> list[Path]: + base_virtual = path or "/" + try: + base = self._resolve_virtual(base_virtual, allow_root=True) + except ValueError: + return [] + if not base.exists(): + return [] + + candidates = [p for p in base.rglob("*") if p.is_file()] + if glob: + candidates = [ + p + for p in candidates + if fnmatch.fnmatch(self._to_virtual(p, self._root), glob) + or fnmatch.fnmatch(p.name, glob) + ] + return candidates + + def grep_raw( + self, pattern: str, path: str | None = None, glob: str | None = None + ) -> list[GrepMatch] | str: + if not pattern: + return "Error: pattern cannot be empty" + matches: list[GrepMatch] = [] + for file_path in self._iter_candidate_files(path, glob): + try: + lines = file_path.read_text(encoding="utf-8", errors="replace").splitlines() + except Exception: + continue + for idx, line in enumerate(lines, start=1): + if pattern in line: + matches.append( + GrepMatch( + path=self._to_virtual(file_path, self._root), + line=idx, + text=line, + ) + ) + return matches + + async def agrep_raw( + self, pattern: str, path: str | None = None, glob: str | None = None + ) -> list[GrepMatch] | str: + return await asyncio.to_thread(self.grep_raw, pattern, path, glob) + + def upload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]: + responses: list[FileUploadResponse] = [] + for virtual_path, content in files: + try: + target = self._resolve_virtual(virtual_path) + target.parent.mkdir(parents=True, exist_ok=True) + temp_path = target.with_suffix(f"{target.suffix}.tmp") + temp_path.write_bytes(content) + os.replace(temp_path, target) + responses.append(FileUploadResponse(path=virtual_path, error=None)) + except FileNotFoundError: + responses.append( + FileUploadResponse(path=virtual_path, error=_FILE_NOT_FOUND) + ) + except IsADirectoryError: + responses.append(FileUploadResponse(path=virtual_path, error=_IS_DIRECTORY)) + except Exception: + responses.append(FileUploadResponse(path=virtual_path, error=_INVALID_PATH)) + return responses + + async def aupload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]: + return await asyncio.to_thread(self.upload_files, files) + + def download_files(self, paths: list[str]) -> list[FileDownloadResponse]: + responses: list[FileDownloadResponse] = [] + for virtual_path in paths: + try: + target = self._resolve_virtual(virtual_path) + if not target.exists(): + responses.append( + FileDownloadResponse( + path=virtual_path, content=None, error=_FILE_NOT_FOUND + ) + ) + continue + if target.is_dir(): + responses.append( + FileDownloadResponse( + path=virtual_path, content=None, error=_IS_DIRECTORY + ) + ) + continue + responses.append( + FileDownloadResponse( + path=virtual_path, content=target.read_bytes(), error=None + ) + ) + except Exception: + responses.append( + FileDownloadResponse(path=virtual_path, content=None, error=_INVALID_PATH) + ) + return responses + + async def adownload_files(self, paths: list[str]) -> list[FileDownloadResponse]: + return await asyncio.to_thread(self.download_files, paths) diff --git a/surfsense_backend/tests/unit/middleware/test_filesystem_backends.py b/surfsense_backend/tests/unit/middleware/test_filesystem_backends.py new file mode 100644 index 000000000..2377307f8 --- /dev/null +++ b/surfsense_backend/tests/unit/middleware/test_filesystem_backends.py @@ -0,0 +1,37 @@ +from pathlib import Path + +import pytest + +from app.agents.new_chat.filesystem_backends import build_backend_resolver +from app.agents.new_chat.filesystem_selection import ( + ClientPlatform, + FilesystemMode, + FilesystemSelection, +) +from app.agents.new_chat.middleware.local_folder_backend import LocalFolderBackend + +pytestmark = pytest.mark.unit + + +class _RuntimeStub: + state = {"files": {}} + + +def test_backend_resolver_returns_local_backend_for_local_mode(tmp_path: Path): + selection = FilesystemSelection( + mode=FilesystemMode.DESKTOP_LOCAL_FOLDER, + client_platform=ClientPlatform.DESKTOP, + local_root_path=str(tmp_path), + ) + resolver = build_backend_resolver(selection) + + backend = resolver(_RuntimeStub()) + assert isinstance(backend, LocalFolderBackend) + + +def test_backend_resolver_uses_cloud_mode_by_default(): + resolver = build_backend_resolver(FilesystemSelection()) + backend = resolver(_RuntimeStub()) + # StateBackend class name check keeps this test decoupled + # from internal deepagents runtime class identity. + assert backend.__class__.__name__ == "StateBackend" diff --git a/surfsense_backend/tests/unit/middleware/test_filesystem_verification.py b/surfsense_backend/tests/unit/middleware/test_filesystem_verification.py new file mode 100644 index 000000000..9f6b162aa --- /dev/null +++ b/surfsense_backend/tests/unit/middleware/test_filesystem_verification.py @@ -0,0 +1,64 @@ +import pytest + +from app.agents.new_chat.middleware.filesystem import SurfSenseFilesystemMiddleware + +pytestmark = pytest.mark.unit + + +class _BackendWithRawRead: + def __init__(self, content: str) -> None: + self._content = content + + def read(self, file_path: str, offset: int = 0, limit: int = 200000) -> str: + del file_path, offset, limit + return " 1\tline1\n 2\tline2" + + async def aread(self, file_path: str, offset: int = 0, limit: int = 200000) -> str: + return self.read(file_path, offset, limit) + + def read_raw(self, file_path: str) -> str: + del file_path + return self._content + + async def aread_raw(self, file_path: str) -> str: + return self.read_raw(file_path) + + +class _RuntimeNoSuggestedPath: + state = {"file_operation_contract": {}} + + +def test_verify_written_content_prefers_raw_sync() -> None: + middleware = SurfSenseFilesystemMiddleware.__new__(SurfSenseFilesystemMiddleware) + expected = "line1\nline2" + backend = _BackendWithRawRead(expected) + + verify_error = middleware._verify_written_content_sync( + backend=backend, + path="/note.md", + expected_content=expected, + ) + + assert verify_error is None + + +def test_contract_suggested_path_falls_back_to_notes_md() -> None: + suggested = SurfSenseFilesystemMiddleware._get_contract_suggested_path( + _RuntimeNoSuggestedPath() + ) + assert suggested == "/notes.md" + + +@pytest.mark.asyncio +async def test_verify_written_content_prefers_raw_async() -> None: + middleware = SurfSenseFilesystemMiddleware.__new__(SurfSenseFilesystemMiddleware) + expected = "line1\nline2" + backend = _BackendWithRawRead(expected) + + verify_error = await middleware._verify_written_content_async( + backend=backend, + path="/note.md", + expected_content=expected, + ) + + assert verify_error is None diff --git a/surfsense_backend/tests/unit/middleware/test_local_folder_backend.py b/surfsense_backend/tests/unit/middleware/test_local_folder_backend.py new file mode 100644 index 000000000..3484a2cc4 --- /dev/null +++ b/surfsense_backend/tests/unit/middleware/test_local_folder_backend.py @@ -0,0 +1,59 @@ +from pathlib import Path + +import pytest + +from app.agents.new_chat.middleware.local_folder_backend import LocalFolderBackend + +pytestmark = pytest.mark.unit + + +def test_local_backend_write_read_edit_roundtrip(tmp_path: Path): + backend = LocalFolderBackend(str(tmp_path)) + + write = backend.write("/notes/test.md", "line1\nline2") + assert write.error is None + assert write.path == "/notes/test.md" + + read = backend.read("/notes/test.md", offset=0, limit=20) + assert "line1" in read + assert "line2" in read + + edit = backend.edit("/notes/test.md", "line2", "updated") + assert edit.error is None + assert edit.occurrences == 1 + + read_after = backend.read("/notes/test.md", offset=0, limit=20) + assert "updated" in read_after + + +def test_local_backend_blocks_path_escape(tmp_path: Path): + backend = LocalFolderBackend(str(tmp_path)) + + result = backend.write("/../../etc/passwd", "bad") + assert result.error is not None + assert "Invalid path" in result.error + + +def test_local_backend_glob_and_grep(tmp_path: Path): + backend = LocalFolderBackend(str(tmp_path)) + (tmp_path / "docs").mkdir() + (tmp_path / "docs" / "a.txt").write_text("hello world\n") + (tmp_path / "docs" / "b.md").write_text("hello markdown\n") + + infos = backend.glob_info("**/*.txt", "/docs") + paths = {info["path"] for info in infos} + assert "/docs/a.txt" in paths + + grep = backend.grep_raw("hello", "/docs", "*.md") + assert isinstance(grep, list) + assert any(match["path"] == "/docs/b.md" for match in grep) + + +def test_local_backend_read_raw_returns_exact_content(tmp_path: Path): + backend = LocalFolderBackend(str(tmp_path)) + expected = "# Title\n\nline 1\nline 2\n" + write = backend.write("/notes/raw.md", expected) + assert write.error is None + + raw = backend.read_raw("/notes/raw.md") + assert raw == expected From 1eadecee235924c707221beed860d43994583830 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 15:45:33 +0530 Subject: [PATCH 082/113] feat(new-chat): integrate filesystem flow into agent pipeline --- .../app/agents/new_chat/chat_deepagent.py | 13 + .../app/agents/new_chat/context.py | 13 +- .../agents/new_chat/middleware/filesystem.py | 223 ++++++++++++++++-- .../new_chat/middleware/knowledge_search.py | 6 + surfsense_backend/app/app.py | 18 ++ surfsense_backend/app/config/__init__.py | 3 + .../app/routes/new_chat_routes.py | 80 +++++++ surfsense_backend/app/schemas/new_chat.py | 9 + .../app/tasks/chat/stream_new_chat.py | 186 ++++++++++++++- .../unit/test_stream_new_chat_contract.py | 48 ++++ 10 files changed, 574 insertions(+), 25 deletions(-) create mode 100644 surfsense_backend/tests/unit/test_stream_new_chat_contract.py diff --git a/surfsense_backend/app/agents/new_chat/chat_deepagent.py b/surfsense_backend/app/agents/new_chat/chat_deepagent.py index a901a7519..ff8215eff 100644 --- a/surfsense_backend/app/agents/new_chat/chat_deepagent.py +++ b/surfsense_backend/app/agents/new_chat/chat_deepagent.py @@ -33,9 +33,12 @@ from langgraph.types import Checkpointer from sqlalchemy.ext.asyncio import AsyncSession from app.agents.new_chat.context import SurfSenseContextSchema +from app.agents.new_chat.filesystem_backends import build_backend_resolver +from app.agents.new_chat.filesystem_selection import FilesystemSelection from app.agents.new_chat.llm_config import AgentConfig from app.agents.new_chat.middleware import ( DedupHITLToolCallsMiddleware, + FileIntentMiddleware, KnowledgeBaseSearchMiddleware, MemoryInjectionMiddleware, SurfSenseFilesystemMiddleware, @@ -164,6 +167,7 @@ async def create_surfsense_deep_agent( thread_visibility: ChatVisibility | None = None, mentioned_document_ids: list[int] | None = None, anon_session_id: str | None = None, + filesystem_selection: FilesystemSelection | None = None, ): """ Create a SurfSense deep agent with configurable tools and prompts. @@ -238,6 +242,8 @@ async def create_surfsense_deep_agent( ) """ _t_agent_total = time.perf_counter() + filesystem_selection = filesystem_selection or FilesystemSelection() + backend_resolver = build_backend_resolver(filesystem_selection) # Discover available connectors and document types for this search space available_connectors: list[str] | None = None @@ -439,7 +445,10 @@ async def create_surfsense_deep_agent( gp_middleware = [ TodoListMiddleware(), _memory_middleware, + FileIntentMiddleware(llm=llm), SurfSenseFilesystemMiddleware( + backend=backend_resolver, + filesystem_mode=filesystem_selection.mode, search_space_id=search_space_id, created_by_id=user_id, thread_id=thread_id, @@ -460,15 +469,19 @@ async def create_surfsense_deep_agent( deepagent_middleware = [ TodoListMiddleware(), _memory_middleware, + FileIntentMiddleware(llm=llm), KnowledgeBaseSearchMiddleware( llm=llm, search_space_id=search_space_id, + filesystem_mode=filesystem_selection.mode, available_connectors=available_connectors, available_document_types=available_document_types, mentioned_document_ids=mentioned_document_ids, anon_session_id=anon_session_id, ), SurfSenseFilesystemMiddleware( + backend=backend_resolver, + filesystem_mode=filesystem_selection.mode, search_space_id=search_space_id, created_by_id=user_id, thread_id=thread_id, diff --git a/surfsense_backend/app/agents/new_chat/context.py b/surfsense_backend/app/agents/new_chat/context.py index da113adf4..c1fe45aaa 100644 --- a/surfsense_backend/app/agents/new_chat/context.py +++ b/surfsense_backend/app/agents/new_chat/context.py @@ -4,7 +4,15 @@ Context schema definitions for SurfSense agents. This module defines the custom state schema used by the SurfSense deep agent. """ -from typing import TypedDict +from typing import NotRequired, TypedDict + + +class FileOperationContractState(TypedDict): + intent: str + confidence: float + suggested_path: str + timestamp: str + turn_id: str class SurfSenseContextSchema(TypedDict): @@ -24,5 +32,8 @@ class SurfSenseContextSchema(TypedDict): """ search_space_id: int + file_operation_contract: NotRequired[FileOperationContractState] + turn_id: NotRequired[str] + request_id: NotRequired[str] # These are runtime-injected and won't be serialized # db_session and connector_service are passed when invoking the agent diff --git a/surfsense_backend/app/agents/new_chat/middleware/filesystem.py b/surfsense_backend/app/agents/new_chat/middleware/filesystem.py index bcd544d61..0fa2085fc 100644 --- a/surfsense_backend/app/agents/new_chat/middleware/filesystem.py +++ b/surfsense_backend/app/agents/new_chat/middleware/filesystem.py @@ -32,6 +32,7 @@ from app.agents.new_chat.sandbox import ( get_or_create_sandbox, is_sandbox_enabled, ) +from app.agents.new_chat.filesystem_selection import FilesystemMode from app.db import Chunk, Document, DocumentType, Folder, shielded_async_session from app.indexing_pipeline.document_chunker import chunk_text from app.utils.document_converters import ( @@ -50,6 +51,8 @@ SURFSENSE_FILESYSTEM_SYSTEM_PROMPT = """## Following Conventions - Read files before editing — understand existing content before making changes. - Mimic existing style, naming conventions, and patterns. +- Never claim a file was created/updated unless filesystem tool output confirms success. +- If a file write/edit fails, explicitly report the failure. ## Filesystem Tools @@ -109,13 +112,20 @@ Usage: - Use chunk IDs (``) as citations in answers. """ -SURFSENSE_WRITE_FILE_TOOL_DESCRIPTION = """Writes a new file to the in-memory filesystem (session-only). +SURFSENSE_WRITE_FILE_TOOL_DESCRIPTION = """Writes a new text file to the in-memory filesystem (session-only). Use this to create scratch/working files during the conversation. Files created here are ephemeral and will not be saved to the user's knowledge base. To permanently save a document to the user's knowledge base, use the `save_document` tool instead. + +Supported outputs include common LLM-friendly text formats like markdown, json, +yaml, csv, xml, html, css, sql, and code files. + +When creating content from open-ended prompts, produce concrete and useful text, +not placeholders. Avoid adding dates/timestamps unless the user explicitly asks +for them. """ SURFSENSE_EDIT_FILE_TOOL_DESCRIPTION = """Performs exact string replacements in files. @@ -182,11 +192,14 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): def __init__( self, *, + backend: Any = None, + filesystem_mode: FilesystemMode = FilesystemMode.CLOUD, search_space_id: int | None = None, created_by_id: str | None = None, thread_id: int | str | None = None, tool_token_limit_before_evict: int | None = 20000, ) -> None: + self._filesystem_mode = filesystem_mode self._search_space_id = search_space_id self._created_by_id = created_by_id self._thread_id = thread_id @@ -204,8 +217,15 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): " extract the data, write it as a clean file (CSV, JSON, etc.)," " and then run your code against it." ) + if filesystem_mode == FilesystemMode.DESKTOP_LOCAL_FOLDER: + system_prompt += ( + "\n\n## Local Folder Mode" + "\n\nThis chat is running in desktop local-folder mode." + " Keep all file operations local. Do not use save_document." + ) super().__init__( + backend=backend, system_prompt=system_prompt, custom_tool_descriptions={ "ls": SURFSENSE_LIST_FILES_TOOL_DESCRIPTION, @@ -219,7 +239,8 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): max_execute_timeout=self._MAX_EXECUTE_TIMEOUT, ) self.tools = [t for t in self.tools if t.name != "execute"] - self.tools.append(self._create_save_document_tool()) + if self._should_persist_documents(): + self.tools.append(self._create_save_document_tool()) if self._sandbox_available: self.tools.append(self._create_execute_code_tool()) @@ -637,15 +658,25 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): runtime: ToolRuntime[None, FilesystemState], ) -> Command | str: resolved_backend = self._get_backend(runtime) + target_path = self._resolve_write_target_path(file_path, runtime) try: - validated_path = validate_path(file_path) + validated_path = validate_path(target_path) except ValueError as exc: return f"Error: {exc}" res: WriteResult = resolved_backend.write(validated_path, content) if res.error: return res.error + verify_error = self._verify_written_content_sync( + backend=resolved_backend, + path=validated_path, + expected_content=content, + ) + if verify_error: + return verify_error - if not self._is_kb_document(validated_path): + if self._should_persist_documents() and not self._is_kb_document( + validated_path + ): persist_result = self._run_async_blocking( self._persist_new_document( file_path=validated_path, content=content @@ -682,15 +713,25 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): runtime: ToolRuntime[None, FilesystemState], ) -> Command | str: resolved_backend = self._get_backend(runtime) + target_path = self._resolve_write_target_path(file_path, runtime) try: - validated_path = validate_path(file_path) + validated_path = validate_path(target_path) except ValueError as exc: return f"Error: {exc}" res: WriteResult = await resolved_backend.awrite(validated_path, content) if res.error: return res.error + verify_error = await self._verify_written_content_async( + backend=resolved_backend, + path=validated_path, + expected_content=content, + ) + if verify_error: + return verify_error - if not self._is_kb_document(validated_path): + if self._should_persist_documents() and not self._is_kb_document( + validated_path + ): persist_result = await self._persist_new_document( file_path=validated_path, content=content, @@ -726,6 +767,124 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): """Return True for paths under /documents/ (KB-sourced, XML-wrapped).""" return path.startswith("/documents/") + def _should_persist_documents(self) -> bool: + """Only cloud mode persists file content to Document/Chunk tables.""" + return self._filesystem_mode == FilesystemMode.CLOUD + + @staticmethod + def _get_contract_suggested_path(runtime: ToolRuntime[None, FilesystemState]) -> str: + contract = runtime.state.get("file_operation_contract") or {} + suggested = contract.get("suggested_path") + if isinstance(suggested, str) and suggested.strip(): + return suggested.strip() + return "/notes.md" + + def _resolve_write_target_path( + self, + file_path: str, + runtime: ToolRuntime[None, FilesystemState], + ) -> str: + candidate = file_path.strip() + if not candidate: + return self._get_contract_suggested_path(runtime) + if not candidate.startswith("/"): + return f"/{candidate.lstrip('/')}" + return candidate + + @staticmethod + def _is_error_text(value: str) -> bool: + return value.startswith("Error:") + + @staticmethod + def _read_for_verification_sync(backend: Any, path: str) -> str: + read_raw = getattr(backend, "read_raw", None) + if callable(read_raw): + return read_raw(path) + return backend.read(path, offset=0, limit=200000) + + @staticmethod + async def _read_for_verification_async(backend: Any, path: str) -> str: + aread_raw = getattr(backend, "aread_raw", None) + if callable(aread_raw): + return await aread_raw(path) + return await backend.aread(path, offset=0, limit=200000) + + def _verify_written_content_sync( + self, + *, + backend: Any, + path: str, + expected_content: str, + ) -> str | None: + actual = self._read_for_verification_sync(backend, path) + if self._is_error_text(actual): + return f"Error: could not verify written file '{path}'." + if actual.rstrip() != expected_content.rstrip(): + return ( + "Error: file write verification failed; expected content was not fully written " + f"to '{path}'." + ) + return None + + async def _verify_written_content_async( + self, + *, + backend: Any, + path: str, + expected_content: str, + ) -> str | None: + actual = await self._read_for_verification_async(backend, path) + if self._is_error_text(actual): + return f"Error: could not verify written file '{path}'." + if actual.rstrip() != expected_content.rstrip(): + return ( + "Error: file write verification failed; expected content was not fully written " + f"to '{path}'." + ) + return None + + def _verify_edited_content_sync( + self, + *, + backend: Any, + path: str, + new_string: str, + ) -> tuple[str | None, str | None]: + updated_content = self._read_for_verification_sync(backend, path) + if self._is_error_text(updated_content): + return ( + f"Error: could not verify edited file '{path}'.", + None, + ) + if new_string and new_string not in updated_content: + return ( + "Error: edit verification failed; updated content was not found in " + f"'{path}'.", + None, + ) + return None, updated_content + + async def _verify_edited_content_async( + self, + *, + backend: Any, + path: str, + new_string: str, + ) -> tuple[str | None, str | None]: + updated_content = await self._read_for_verification_async(backend, path) + if self._is_error_text(updated_content): + return ( + f"Error: could not verify edited file '{path}'.", + None, + ) + if new_string and new_string not in updated_content: + return ( + "Error: edit verification failed; updated content was not found in " + f"'{path}'.", + None, + ) + return None, updated_content + def _create_edit_file_tool(self) -> BaseTool: """Create edit_file with DB persistence (skipped for KB documents).""" tool_description = ( @@ -754,8 +913,9 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): ] = False, ) -> Command | str: resolved_backend = self._get_backend(runtime) + target_path = self._resolve_write_target_path(file_path, runtime) try: - validated_path = validate_path(file_path) + validated_path = validate_path(target_path) except ValueError as exc: return f"Error: {exc}" res: EditResult = resolved_backend.edit( @@ -767,13 +927,22 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): if res.error: return res.error - if not self._is_kb_document(validated_path): - read_result = resolved_backend.read( - validated_path, offset=0, limit=200000 - ) - if read_result.error or read_result.file_data is None: - return f"Error: could not reload edited file '{validated_path}' for persistence." - updated_content = read_result.file_data["content"] + verify_error, updated_content = self._verify_edited_content_sync( + backend=resolved_backend, + path=validated_path, + new_string=new_string, + ) + if verify_error: + return verify_error + + if self._should_persist_documents() and not self._is_kb_document( + validated_path + ): + if updated_content is None: + return ( + f"Error: could not reload edited file '{validated_path}' for " + "persistence." + ) persist_result = self._run_async_blocking( self._persist_edited_document( file_path=validated_path, @@ -818,8 +987,9 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): ] = False, ) -> Command | str: resolved_backend = self._get_backend(runtime) + target_path = self._resolve_write_target_path(file_path, runtime) try: - validated_path = validate_path(file_path) + validated_path = validate_path(target_path) except ValueError as exc: return f"Error: {exc}" res: EditResult = await resolved_backend.aedit( @@ -831,13 +1001,22 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): if res.error: return res.error - if not self._is_kb_document(validated_path): - read_result = await resolved_backend.aread( - validated_path, offset=0, limit=200000 - ) - if read_result.error or read_result.file_data is None: - return f"Error: could not reload edited file '{validated_path}' for persistence." - updated_content = read_result.file_data["content"] + verify_error, updated_content = await self._verify_edited_content_async( + backend=resolved_backend, + path=validated_path, + new_string=new_string, + ) + if verify_error: + return verify_error + + if self._should_persist_documents() and not self._is_kb_document( + validated_path + ): + if updated_content is None: + return ( + f"Error: could not reload edited file '{validated_path}' for " + "persistence." + ) persist_error = await self._persist_edited_document( file_path=validated_path, updated_content=updated_content, diff --git a/surfsense_backend/app/agents/new_chat/middleware/knowledge_search.py b/surfsense_backend/app/agents/new_chat/middleware/knowledge_search.py index c7bbe62e0..51378a013 100644 --- a/surfsense_backend/app/agents/new_chat/middleware/knowledge_search.py +++ b/surfsense_backend/app/agents/new_chat/middleware/knowledge_search.py @@ -28,6 +28,7 @@ from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession from app.agents.new_chat.utils import parse_date_or_datetime, resolve_date_range +from app.agents.new_chat.filesystem_selection import FilesystemMode from app.db import ( NATIVE_TO_LEGACY_DOCTYPE, Chunk, @@ -857,6 +858,7 @@ class KnowledgeBaseSearchMiddleware(AgentMiddleware): # type: ignore[type-arg] *, llm: BaseChatModel | None = None, search_space_id: int, + filesystem_mode: FilesystemMode = FilesystemMode.CLOUD, available_connectors: list[str] | None = None, available_document_types: list[str] | None = None, top_k: int = 10, @@ -865,6 +867,7 @@ class KnowledgeBaseSearchMiddleware(AgentMiddleware): # type: ignore[type-arg] ) -> None: self.llm = llm self.search_space_id = search_space_id + self.filesystem_mode = filesystem_mode self.available_connectors = available_connectors self.available_document_types = available_document_types self.top_k = top_k @@ -996,6 +999,9 @@ class KnowledgeBaseSearchMiddleware(AgentMiddleware): # type: ignore[type-arg] messages = state.get("messages") or [] if not messages: return None + if self.filesystem_mode != FilesystemMode.CLOUD: + # Local-folder mode should not seed cloud KB documents into filesystem. + return None last_human = None for msg in reversed(messages): diff --git a/surfsense_backend/app/app.py b/surfsense_backend/app/app.py index a1795853a..016c2de42 100644 --- a/surfsense_backend/app/app.py +++ b/surfsense_backend/app/app.py @@ -141,6 +141,15 @@ def _http_exception_handler(request: Request, exc: HTTPException) -> JSONRespons exc.status_code, message, ) + elif exc.status_code >= 400: + _error_logger.warning( + "[%s] %s %s - HTTPException %d: %s", + rid, + request.method, + request.url.path, + exc.status_code, + message, + ) if should_sanitize: message = GENERIC_5XX_MESSAGE err_code = "INTERNAL_ERROR" @@ -170,6 +179,15 @@ def _http_exception_handler(request: Request, exc: HTTPException) -> JSONRespons exc.status_code, detail, ) + elif exc.status_code >= 400: + _error_logger.warning( + "[%s] %s %s - HTTPException %d: %s", + rid, + request.method, + request.url.path, + exc.status_code, + detail, + ) if should_sanitize: detail = GENERIC_5XX_MESSAGE code = _status_to_code(exc.status_code, detail) diff --git a/surfsense_backend/app/config/__init__.py b/surfsense_backend/app/config/__init__.py index a515e9044..bd97d2bb1 100644 --- a/surfsense_backend/app/config/__init__.py +++ b/surfsense_backend/app/config/__init__.py @@ -339,6 +339,9 @@ class Config: # self-hosted: Full access to local file system connectors (Obsidian, etc.) # cloud: Only cloud-based connectors available DEPLOYMENT_MODE = os.getenv("SURFSENSE_DEPLOYMENT_MODE", "self-hosted") + ENABLE_DESKTOP_LOCAL_FILESYSTEM = ( + os.getenv("ENABLE_DESKTOP_LOCAL_FILESYSTEM", "FALSE").upper() == "TRUE" + ) @classmethod def is_self_hosted(cls) -> bool: diff --git a/surfsense_backend/app/routes/new_chat_routes.py b/surfsense_backend/app/routes/new_chat_routes.py index b914b297e..5e8e24c4a 100644 --- a/surfsense_backend/app/routes/new_chat_routes.py +++ b/surfsense_backend/app/routes/new_chat_routes.py @@ -22,6 +22,12 @@ from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select from sqlalchemy.orm import selectinload +from app.agents.new_chat.filesystem_selection import ( + ClientPlatform, + FilesystemMode, + FilesystemSelection, +) +from app.config import config from app.db import ( ChatComment, ChatVisibility, @@ -63,6 +69,51 @@ _background_tasks: set[asyncio.Task] = set() router = APIRouter() +def _resolve_filesystem_selection( + *, + mode: str, + client_platform: str, + local_root: str | None, +) -> FilesystemSelection: + """Validate and normalize filesystem mode settings from request payload.""" + try: + resolved_mode = FilesystemMode(mode) + except ValueError as exc: + raise HTTPException(status_code=400, detail="Invalid filesystem_mode") from exc + try: + resolved_platform = ClientPlatform(client_platform) + except ValueError as exc: + raise HTTPException(status_code=400, detail="Invalid client_platform") from exc + + if resolved_mode == FilesystemMode.DESKTOP_LOCAL_FOLDER: + if not config.ENABLE_DESKTOP_LOCAL_FILESYSTEM: + raise HTTPException( + status_code=400, + detail="Desktop local filesystem mode is disabled on this deployment.", + ) + if resolved_platform != ClientPlatform.DESKTOP: + raise HTTPException( + status_code=400, + detail="desktop_local_folder mode is only available on desktop runtime.", + ) + if not local_root or not local_root.strip(): + raise HTTPException( + status_code=400, + detail="local_filesystem_root is required for desktop_local_folder mode.", + ) + return FilesystemSelection( + mode=resolved_mode, + client_platform=resolved_platform, + local_root_path=local_root.strip(), + ) + + return FilesystemSelection( + mode=FilesystemMode.CLOUD, + client_platform=resolved_platform, + local_root_path=None, + ) + + def _try_delete_sandbox(thread_id: int) -> None: """Fire-and-forget sandbox + local file deletion so the HTTP response isn't blocked.""" from app.agents.new_chat.sandbox import ( @@ -474,6 +525,11 @@ async def get_thread_messages( # Check thread-level access based on visibility await check_thread_access(session, thread, user) + filesystem_selection = _resolve_filesystem_selection( + mode=request.filesystem_mode, + client_platform=request.client_platform, + local_root=request.local_filesystem_root, + ) # Get messages with their authors and token usage loaded messages_result = await session.execute( @@ -1098,6 +1154,7 @@ async def list_agent_tools( @router.post("/new_chat") async def handle_new_chat( request: NewChatRequest, + http_request: Request, session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): @@ -1133,6 +1190,11 @@ async def handle_new_chat( # Check thread-level access based on visibility await check_thread_access(session, thread, user) + filesystem_selection = _resolve_filesystem_selection( + mode=request.filesystem_mode, + client_platform=request.client_platform, + local_root=request.local_filesystem_root, + ) # Get search space to check LLM config preferences search_space_result = await session.execute( @@ -1175,6 +1237,8 @@ async def handle_new_chat( thread_visibility=thread.visibility, current_user_display_name=user.display_name or "A team member", disabled_tools=request.disabled_tools, + filesystem_selection=filesystem_selection, + request_id=getattr(http_request.state, "request_id", "unknown"), ), media_type="text/event-stream", headers={ @@ -1202,6 +1266,7 @@ async def handle_new_chat( async def regenerate_response( thread_id: int, request: RegenerateRequest, + http_request: Request, session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): @@ -1247,6 +1312,11 @@ async def regenerate_response( # Check thread-level access based on visibility await check_thread_access(session, thread, user) + filesystem_selection = _resolve_filesystem_selection( + mode=request.filesystem_mode, + client_platform=request.client_platform, + local_root=request.local_filesystem_root, + ) # Get the checkpointer and state history checkpointer = await get_checkpointer() @@ -1412,6 +1482,8 @@ async def regenerate_response( thread_visibility=thread.visibility, current_user_display_name=user.display_name or "A team member", disabled_tools=request.disabled_tools, + filesystem_selection=filesystem_selection, + request_id=getattr(http_request.state, "request_id", "unknown"), ): yield chunk streaming_completed = True @@ -1477,6 +1549,7 @@ async def regenerate_response( async def resume_chat( thread_id: int, request: ResumeRequest, + http_request: Request, session: AsyncSession = Depends(get_async_session), user: User = Depends(current_active_user), ): @@ -1498,6 +1571,11 @@ async def resume_chat( ) await check_thread_access(session, thread, user) + filesystem_selection = _resolve_filesystem_selection( + mode=request.filesystem_mode, + client_platform=request.client_platform, + local_root=request.local_filesystem_root, + ) search_space_result = await session.execute( select(SearchSpace).filter(SearchSpace.id == request.search_space_id) @@ -1526,6 +1604,8 @@ async def resume_chat( user_id=str(user.id), llm_config_id=llm_config_id, thread_visibility=thread.visibility, + filesystem_selection=filesystem_selection, + request_id=getattr(http_request.state, "request_id", "unknown"), ), media_type="text/event-stream", headers={ diff --git a/surfsense_backend/app/schemas/new_chat.py b/surfsense_backend/app/schemas/new_chat.py index e523657a4..593127c7e 100644 --- a/surfsense_backend/app/schemas/new_chat.py +++ b/surfsense_backend/app/schemas/new_chat.py @@ -184,6 +184,9 @@ class NewChatRequest(BaseModel): disabled_tools: list[str] | None = ( None # Optional list of tool names the user has disabled from the UI ) + filesystem_mode: Literal["cloud", "desktop_local_folder"] = "cloud" + client_platform: Literal["web", "desktop"] = "web" + local_filesystem_root: str | None = None class RegenerateRequest(BaseModel): @@ -204,6 +207,9 @@ class RegenerateRequest(BaseModel): mentioned_document_ids: list[int] | None = None mentioned_surfsense_doc_ids: list[int] | None = None disabled_tools: list[str] | None = None + filesystem_mode: Literal["cloud", "desktop_local_folder"] = "cloud" + client_platform: Literal["web", "desktop"] = "web" + local_filesystem_root: str | None = None # ============================================================================= @@ -227,6 +233,9 @@ class ResumeDecision(BaseModel): class ResumeRequest(BaseModel): search_space_id: int decisions: list[ResumeDecision] + filesystem_mode: Literal["cloud", "desktop_local_folder"] = "cloud" + client_platform: Literal["web", "desktop"] = "web" + local_filesystem_root: str | None = None # ============================================================================= diff --git a/surfsense_backend/app/tasks/chat/stream_new_chat.py b/surfsense_backend/app/tasks/chat/stream_new_chat.py index 4810f02e6..d551f3fd5 100644 --- a/surfsense_backend/app/tasks/chat/stream_new_chat.py +++ b/surfsense_backend/app/tasks/chat/stream_new_chat.py @@ -30,6 +30,8 @@ from sqlalchemy.orm import selectinload from app.agents.new_chat.chat_deepagent import create_surfsense_deep_agent from app.agents.new_chat.checkpointer import get_checkpointer +from app.agents.new_chat.filesystem_selection import FilesystemSelection +from app.config import config from app.agents.new_chat.llm_config import ( AgentConfig, create_chat_litellm_from_agent_config, @@ -145,6 +147,85 @@ class StreamResult: interrupt_value: dict[str, Any] | None = None sandbox_files: list[str] = field(default_factory=list) agent_called_update_memory: bool = False + request_id: str | None = None + turn_id: str = "" + filesystem_mode: str = "cloud" + client_platform: str = "web" + intent_detected: str = "chat_only" + intent_confidence: float = 0.0 + write_attempted: bool = False + write_succeeded: bool = False + verification_succeeded: bool = False + commit_gate_passed: bool = True + commit_gate_reason: str = "" + + +def _safe_float(value: Any, default: float = 0.0) -> float: + try: + return float(value) + except (TypeError, ValueError): + return default + + +def _tool_output_to_text(tool_output: Any) -> str: + if isinstance(tool_output, dict): + if isinstance(tool_output.get("result"), str): + return tool_output["result"] + if isinstance(tool_output.get("error"), str): + return tool_output["error"] + return json.dumps(tool_output, ensure_ascii=False) + return str(tool_output) + + +def _tool_output_has_error(tool_output: Any) -> bool: + if isinstance(tool_output, dict): + if tool_output.get("error"): + return True + result = tool_output.get("result") + if isinstance(result, str) and result.strip().lower().startswith("error:"): + return True + return False + if isinstance(tool_output, str): + return tool_output.strip().lower().startswith("error:") + return False + + +def _contract_enforcement_active(result: StreamResult) -> bool: + # Keep policy deterministic with no env-driven progression modes: + # enforce the file-operation contract only in desktop local-folder mode. + return result.filesystem_mode == "desktop_local_folder" + + +def _evaluate_file_contract_outcome(result: StreamResult) -> tuple[bool, str]: + if result.intent_detected != "file_write": + return True, "" + if not result.write_attempted: + return False, "no_write_attempt" + if not result.write_succeeded: + return False, "write_failed" + if not result.verification_succeeded: + return False, "verification_failed" + return True, "" + + +def _log_file_contract(stage: str, result: StreamResult, **extra: Any) -> None: + payload: dict[str, Any] = { + "stage": stage, + "request_id": result.request_id or "unknown", + "turn_id": result.turn_id or "unknown", + "chat_id": result.turn_id.split(":", 1)[0] if ":" in result.turn_id else "unknown", + "filesystem_mode": result.filesystem_mode, + "client_platform": result.client_platform, + "intent_detected": result.intent_detected, + "intent_confidence": result.intent_confidence, + "write_attempted": result.write_attempted, + "write_succeeded": result.write_succeeded, + "verification_succeeded": result.verification_succeeded, + "commit_gate_passed": result.commit_gate_passed, + "commit_gate_reason": result.commit_gate_reason or None, + } + payload.update(extra) + _perf_log.info("[file_operation_contract] %s", json.dumps(payload, ensure_ascii=False)) async def _stream_agent_events( @@ -239,6 +320,8 @@ async def _stream_agent_events( tool_name = event.get("name", "unknown_tool") run_id = event.get("run_id", "") tool_input = event.get("data", {}).get("input", {}) + if tool_name in ("write_file", "edit_file"): + result.write_attempted = True if current_text_id is not None: yield streaming_service.format_text_end(current_text_id) @@ -514,6 +597,14 @@ async def _stream_agent_events( else: tool_output = {"result": str(raw_output) if raw_output else "completed"} + if tool_name in ("write_file", "edit_file"): + if _tool_output_has_error(tool_output): + # Keep successful evidence if a previous write/edit in this turn succeeded. + pass + else: + result.write_succeeded = True + result.verification_succeeded = True + tool_call_id = f"call_{run_id[:32]}" if run_id else "call_unknown" original_step_id = tool_step_ids.get( run_id, f"{step_prefix}-unknown-{run_id[:8]}" @@ -1143,10 +1234,59 @@ async def _stream_agent_events( if completion_event: yield completion_event + state = await agent.aget_state(config) + state_values = getattr(state, "values", {}) or {} + contract_state = state_values.get("file_operation_contract") or {} + contract_turn_id = contract_state.get("turn_id") + current_turn_id = config.get("configurable", {}).get("turn_id", "") + intent_value = contract_state.get("intent") + if ( + isinstance(intent_value, str) + and intent_value in ("chat_only", "file_write", "file_read") + and contract_turn_id == current_turn_id + ): + result.intent_detected = intent_value + if ( + isinstance(intent_value, str) + and intent_value in ( + "chat_only", + "file_write", + "file_read", + ) + and contract_turn_id != current_turn_id + ): + # Ignore stale intent contracts from previous turns/checkpoints. + result.intent_detected = "chat_only" + result.intent_confidence = ( + _safe_float(contract_state.get("confidence"), default=0.0) + if contract_turn_id == current_turn_id + else 0.0 + ) + + if result.intent_detected == "file_write": + result.commit_gate_passed, result.commit_gate_reason = ( + _evaluate_file_contract_outcome(result) + ) + if not result.commit_gate_passed: + if _contract_enforcement_active(result): + gate_notice = ( + "I could not complete the requested file write because no successful " + "write_file/edit_file operation was confirmed." + ) + gate_text_id = streaming_service.generate_text_id() + yield streaming_service.format_text_start(gate_text_id) + yield streaming_service.format_text_delta(gate_text_id, gate_notice) + yield streaming_service.format_text_end(gate_text_id) + yield streaming_service.format_terminal_info(gate_notice, "error") + accumulated_text = gate_notice + else: + result.commit_gate_passed = True + result.commit_gate_reason = "" + result.accumulated_text = accumulated_text result.agent_called_update_memory = called_update_memory + _log_file_contract("turn_outcome", result) - state = await agent.aget_state(config) is_interrupted = state.tasks and any(task.interrupts for task in state.tasks) if is_interrupted: result.is_interrupted = True @@ -1167,6 +1307,8 @@ async def stream_new_chat( thread_visibility: ChatVisibility | None = None, current_user_display_name: str | None = None, disabled_tools: list[str] | None = None, + filesystem_selection: FilesystemSelection | None = None, + request_id: str | None = None, ) -> AsyncGenerator[str, None]: """ Stream chat responses from the new SurfSense deep agent. @@ -1194,6 +1336,20 @@ async def stream_new_chat( streaming_service = VercelStreamingService() stream_result = StreamResult() _t_total = time.perf_counter() + fs_mode = filesystem_selection.mode.value if filesystem_selection else "cloud" + fs_platform = ( + filesystem_selection.client_platform.value if filesystem_selection else "web" + ) + stream_result.request_id = request_id + stream_result.turn_id = f"{chat_id}:{int(time.time() * 1000)}" + stream_result.filesystem_mode = fs_mode + stream_result.client_platform = fs_platform + _log_file_contract("turn_start", stream_result) + _perf_log.info( + "[stream_new_chat] filesystem_mode=%s client_platform=%s", + fs_mode, + fs_platform, + ) log_system_snapshot("stream_new_chat_START") from app.services.token_tracking_service import start_turn @@ -1329,6 +1485,7 @@ async def stream_new_chat( thread_visibility=visibility, disabled_tools=disabled_tools, mentioned_document_ids=mentioned_document_ids, + filesystem_selection=filesystem_selection, ) _perf_log.info( "[stream_new_chat] Agent created in %.3fs", time.perf_counter() - _t0 @@ -1435,6 +1592,8 @@ async def stream_new_chat( # We will use this to simulate group chat functionality in the future "messages": langchain_messages, "search_space_id": search_space_id, + "request_id": request_id or "unknown", + "turn_id": stream_result.turn_id, } _perf_log.info( @@ -1464,6 +1623,8 @@ async def stream_new_chat( # Configure LangGraph with thread_id for memory # If checkpoint_id is provided, fork from that checkpoint (for edit/reload) configurable = {"thread_id": str(chat_id)} + configurable["request_id"] = request_id or "unknown" + configurable["turn_id"] = stream_result.turn_id if checkpoint_id: configurable["checkpoint_id"] = checkpoint_id @@ -1871,10 +2032,26 @@ async def stream_resume_chat( user_id: str | None = None, llm_config_id: int = -1, thread_visibility: ChatVisibility | None = None, + filesystem_selection: FilesystemSelection | None = None, + request_id: str | None = None, ) -> AsyncGenerator[str, None]: streaming_service = VercelStreamingService() stream_result = StreamResult() _t_total = time.perf_counter() + fs_mode = filesystem_selection.mode.value if filesystem_selection else "cloud" + fs_platform = ( + filesystem_selection.client_platform.value if filesystem_selection else "web" + ) + stream_result.request_id = request_id + stream_result.turn_id = f"{chat_id}:{int(time.time() * 1000)}" + stream_result.filesystem_mode = fs_mode + stream_result.client_platform = fs_platform + _log_file_contract("turn_start", stream_result) + _perf_log.info( + "[stream_resume] filesystem_mode=%s client_platform=%s", + fs_mode, + fs_platform, + ) from app.services.token_tracking_service import start_turn @@ -1991,6 +2168,7 @@ async def stream_resume_chat( agent_config=agent_config, firecrawl_api_key=firecrawl_api_key, thread_visibility=visibility, + filesystem_selection=filesystem_selection, ) _perf_log.info( "[stream_resume] Agent created in %.3fs", time.perf_counter() - _t0 @@ -2009,7 +2187,11 @@ async def stream_resume_chat( from langgraph.types import Command config = { - "configurable": {"thread_id": str(chat_id)}, + "configurable": { + "thread_id": str(chat_id), + "request_id": request_id or "unknown", + "turn_id": stream_result.turn_id, + }, "recursion_limit": 80, } diff --git a/surfsense_backend/tests/unit/test_stream_new_chat_contract.py b/surfsense_backend/tests/unit/test_stream_new_chat_contract.py new file mode 100644 index 000000000..f4adc3d73 --- /dev/null +++ b/surfsense_backend/tests/unit/test_stream_new_chat_contract.py @@ -0,0 +1,48 @@ +import pytest + +from app.tasks.chat.stream_new_chat import ( + StreamResult, + _contract_enforcement_active, + _evaluate_file_contract_outcome, + _tool_output_has_error, +) + +pytestmark = pytest.mark.unit + + +def test_tool_output_error_detection(): + assert _tool_output_has_error("Error: failed to write file") + assert _tool_output_has_error({"error": "boom"}) + assert _tool_output_has_error({"result": "Error: disk is full"}) + assert not _tool_output_has_error({"result": "Updated file /notes.md"}) + + +def test_file_write_contract_outcome_reasons(): + result = StreamResult(intent_detected="file_write") + passed, reason = _evaluate_file_contract_outcome(result) + assert not passed + assert reason == "no_write_attempt" + + result.write_attempted = True + passed, reason = _evaluate_file_contract_outcome(result) + assert not passed + assert reason == "write_failed" + + result.write_succeeded = True + passed, reason = _evaluate_file_contract_outcome(result) + assert not passed + assert reason == "verification_failed" + + result.verification_succeeded = True + passed, reason = _evaluate_file_contract_outcome(result) + assert passed + assert reason == "" + + +def test_contract_enforcement_local_only(): + result = StreamResult(filesystem_mode="desktop_local_folder") + assert _contract_enforcement_active(result) + + result.filesystem_mode = "cloud" + assert not _contract_enforcement_active(result) + From 5c3a327a0cedc0717f89515e6cf804c792dd1689 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 15:45:59 +0530 Subject: [PATCH 083/113] feat(desktop): expose agent filesystem IPC APIs --- surfsense_desktop/src/ipc/channels.ts | 4 ++++ surfsense_desktop/src/ipc/handlers.ts | 19 +++++++++++++++++++ surfsense_desktop/src/preload.ts | 8 ++++++++ 3 files changed, 31 insertions(+) diff --git a/surfsense_desktop/src/ipc/channels.ts b/surfsense_desktop/src/ipc/channels.ts index 6731ecbfa..177a05fb4 100644 --- a/surfsense_desktop/src/ipc/channels.ts +++ b/surfsense_desktop/src/ipc/channels.ts @@ -51,4 +51,8 @@ export const IPC_CHANNELS = { ANALYTICS_RESET: 'analytics:reset', ANALYTICS_CAPTURE: 'analytics:capture', ANALYTICS_GET_CONTEXT: 'analytics:get-context', + // Agent filesystem mode + AGENT_FILESYSTEM_GET_SETTINGS: 'agent-filesystem:get-settings', + AGENT_FILESYSTEM_SET_SETTINGS: 'agent-filesystem:set-settings', + AGENT_FILESYSTEM_PICK_ROOT: 'agent-filesystem:pick-root', } as const; diff --git a/surfsense_desktop/src/ipc/handlers.ts b/surfsense_desktop/src/ipc/handlers.ts index 05c327436..3719a0b0f 100644 --- a/surfsense_desktop/src/ipc/handlers.ts +++ b/surfsense_desktop/src/ipc/handlers.ts @@ -36,6 +36,11 @@ import { resetUser as analyticsReset, trackEvent, } from '../modules/analytics'; +import { + getAgentFilesystemSettings, + pickAgentFilesystemRoot, + setAgentFilesystemSettings, +} from '../modules/agent-filesystem'; let authTokens: { bearer: string; refresh: string } | null = null; @@ -191,4 +196,18 @@ export function registerIpcHandlers(): void { platform: process.platform, }; }); + + ipcMain.handle(IPC_CHANNELS.AGENT_FILESYSTEM_GET_SETTINGS, () => + getAgentFilesystemSettings() + ); + + ipcMain.handle( + IPC_CHANNELS.AGENT_FILESYSTEM_SET_SETTINGS, + (_event, settings: { mode?: 'cloud' | 'desktop_local_folder'; localRootPath?: string | null }) => + setAgentFilesystemSettings(settings) + ); + + ipcMain.handle(IPC_CHANNELS.AGENT_FILESYSTEM_PICK_ROOT, () => + pickAgentFilesystemRoot() + ); } diff --git a/surfsense_desktop/src/preload.ts b/surfsense_desktop/src/preload.ts index 3a69f3239..f75cc240e 100644 --- a/surfsense_desktop/src/preload.ts +++ b/surfsense_desktop/src/preload.ts @@ -101,4 +101,12 @@ contextBridge.exposeInMainWorld('electronAPI', { analyticsCapture: (event: string, properties?: Record) => ipcRenderer.invoke(IPC_CHANNELS.ANALYTICS_CAPTURE, { event, properties }), getAnalyticsContext: () => ipcRenderer.invoke(IPC_CHANNELS.ANALYTICS_GET_CONTEXT), + // Agent filesystem mode + getAgentFilesystemSettings: () => + ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_GET_SETTINGS), + setAgentFilesystemSettings: (settings: { + mode?: "cloud" | "desktop_local_folder"; + localRootPath?: string | null; + }) => ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_SET_SETTINGS, settings), + pickAgentFilesystemRoot: () => ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_PICK_ROOT), }); From 4899588cd701f41155962a466373b2cfc89d6123 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 15:46:39 +0530 Subject: [PATCH 084/113] feat(web): connect new chat UI to agent filesystem APIs --- .../src/modules/agent-filesystem.ts | 74 ++++++++++++++ .../new-chat/[[...chat_id]]/page.tsx | 20 ++++ .../components/assistant-ui/thread.tsx | 98 ++++++++++++++++++- surfsense_web/lib/apis/base-api.service.ts | 3 + surfsense_web/types/window.d.ts | 15 +++ 5 files changed, 209 insertions(+), 1 deletion(-) create mode 100644 surfsense_desktop/src/modules/agent-filesystem.ts diff --git a/surfsense_desktop/src/modules/agent-filesystem.ts b/surfsense_desktop/src/modules/agent-filesystem.ts new file mode 100644 index 000000000..44f12a465 --- /dev/null +++ b/surfsense_desktop/src/modules/agent-filesystem.ts @@ -0,0 +1,74 @@ +import { app, dialog } from "electron"; +import { mkdir, readFile, writeFile } from "node:fs/promises"; +import { dirname, join } from "node:path"; + +export type AgentFilesystemMode = "cloud" | "desktop_local_folder"; + +export interface AgentFilesystemSettings { + mode: AgentFilesystemMode; + localRootPath: string | null; + updatedAt: string; +} + +const SETTINGS_FILENAME = "agent-filesystem-settings.json"; + +function getSettingsPath(): string { + return join(app.getPath("userData"), SETTINGS_FILENAME); +} + +function getDefaultSettings(): AgentFilesystemSettings { + return { + mode: "cloud", + localRootPath: null, + updatedAt: new Date().toISOString(), + }; +} + +export async function getAgentFilesystemSettings(): Promise { + try { + const raw = await readFile(getSettingsPath(), "utf8"); + const parsed = JSON.parse(raw) as Partial; + if (parsed.mode !== "cloud" && parsed.mode !== "desktop_local_folder") { + return getDefaultSettings(); + } + return { + mode: parsed.mode, + localRootPath: parsed.localRootPath ?? null, + updatedAt: parsed.updatedAt ?? new Date().toISOString(), + }; + } catch { + return getDefaultSettings(); + } +} + +export async function setAgentFilesystemSettings( + settings: Partial> +): Promise { + const current = await getAgentFilesystemSettings(); + const nextMode = + settings.mode === "cloud" || settings.mode === "desktop_local_folder" + ? settings.mode + : current.mode; + const next: AgentFilesystemSettings = { + mode: nextMode, + localRootPath: + settings.localRootPath === undefined ? current.localRootPath : settings.localRootPath, + updatedAt: new Date().toISOString(), + }; + + const settingsPath = getSettingsPath(); + await mkdir(dirname(settingsPath), { recursive: true }); + await writeFile(settingsPath, JSON.stringify(next, null, 2), "utf8"); + return next; +} + +export async function pickAgentFilesystemRoot(): Promise { + const result = await dialog.showOpenDialog({ + title: "Select local folder for Agent Filesystem", + properties: ["openDirectory"], + }); + if (result.canceled || result.filePaths.length === 0) { + return null; + } + return result.filePaths[0] ?? null; +} diff --git a/surfsense_web/app/dashboard/[search_space_id]/new-chat/[[...chat_id]]/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/new-chat/[[...chat_id]]/page.tsx index 6c94134b7..bdb77ade2 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/new-chat/[[...chat_id]]/page.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/new-chat/[[...chat_id]]/page.tsx @@ -46,6 +46,7 @@ import { import { useChatSessionStateSync } from "@/hooks/use-chat-session-state"; import { useMessagesSync } from "@/hooks/use-messages-sync"; import { documentsApiService } from "@/lib/apis/documents-api.service"; +import { getAgentFilesystemSelection } from "@/lib/agent-filesystem"; import { getBearerToken } from "@/lib/auth-utils"; import { convertToThreadMessage } from "@/lib/chat/message-utils"; import { @@ -656,6 +657,14 @@ export default function NewChatPage() { try { const backendUrl = process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL || "http://localhost:8000"; + const selection = await getAgentFilesystemSelection(); + if ( + selection.filesystem_mode === "desktop_local_folder" && + !selection.local_filesystem_root + ) { + toast.error("Select a local folder before using Local Folder mode."); + return; + } // Build message history for context const messageHistory = messages @@ -691,6 +700,9 @@ export default function NewChatPage() { chat_id: currentThreadId, user_query: userQuery.trim(), search_space_id: searchSpaceId, + filesystem_mode: selection.filesystem_mode, + client_platform: selection.client_platform, + local_filesystem_root: selection.local_filesystem_root, messages: messageHistory, mentioned_document_ids: hasDocumentIds ? mentionedDocumentIds.document_ids : undefined, mentioned_surfsense_doc_ids: hasSurfsenseDocIds @@ -1074,6 +1086,7 @@ export default function NewChatPage() { try { const backendUrl = process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL || "http://localhost:8000"; + const selection = await getAgentFilesystemSelection(); const response = await fetch(`${backendUrl}/api/v1/threads/${resumeThreadId}/resume`, { method: "POST", headers: { @@ -1083,6 +1096,9 @@ export default function NewChatPage() { body: JSON.stringify({ search_space_id: searchSpaceId, decisions, + filesystem_mode: selection.filesystem_mode, + client_platform: selection.client_platform, + local_filesystem_root: selection.local_filesystem_root, }), signal: controller.signal, }); @@ -1406,6 +1422,7 @@ export default function NewChatPage() { ]); try { + const selection = await getAgentFilesystemSelection(); const response = await fetch(getRegenerateUrl(threadId), { method: "POST", headers: { @@ -1416,6 +1433,9 @@ export default function NewChatPage() { search_space_id: searchSpaceId, user_query: newUserQuery || null, disabled_tools: disabledTools.length > 0 ? disabledTools : undefined, + filesystem_mode: selection.filesystem_mode, + client_platform: selection.client_platform, + local_filesystem_root: selection.local_filesystem_root, }), signal: controller.signal, }); diff --git a/surfsense_web/components/assistant-ui/thread.tsx b/surfsense_web/components/assistant-ui/thread.tsx index 8d60e2c5c..094d99a29 100644 --- a/surfsense_web/components/assistant-ui/thread.tsx +++ b/surfsense_web/components/assistant-ui/thread.tsx @@ -94,6 +94,12 @@ import { cn } from "@/lib/utils"; const COMPOSER_PLACEHOLDER = "Ask anything, type / for prompts, type @ to mention docs"; +type ComposerFilesystemSettings = { + mode: "cloud" | "desktop_local_folder"; + localRootPath: string | null; + updatedAt: string; +}; + export const Thread: FC = () => { return ; }; @@ -362,6 +368,9 @@ const Composer: FC = () => { }, []); const electronAPI = useElectronAPI(); + const [filesystemSettings, setFilesystemSettings] = useState( + null + ); const [clipboardInitialText, setClipboardInitialText] = useState(); const clipboardLoadedRef = useRef(false); useEffect(() => { @@ -374,6 +383,48 @@ const Composer: FC = () => { }); }, [electronAPI]); + useEffect(() => { + if (!electronAPI?.getAgentFilesystemSettings) return; + let mounted = true; + electronAPI + .getAgentFilesystemSettings() + .then((settings) => { + if (!mounted) return; + setFilesystemSettings(settings); + }) + .catch(() => { + if (!mounted) return; + setFilesystemSettings({ + mode: "cloud", + localRootPath: null, + updatedAt: new Date().toISOString(), + }); + }); + return () => { + mounted = false; + }; + }, [electronAPI]); + + const handleFilesystemModeChange = useCallback( + async (mode: "cloud" | "desktop_local_folder") => { + if (!electronAPI?.setAgentFilesystemSettings) return; + const updated = await electronAPI.setAgentFilesystemSettings({ mode }); + setFilesystemSettings(updated); + }, + [electronAPI] + ); + + const handlePickFilesystemRoot = useCallback(async () => { + if (!electronAPI?.pickAgentFilesystemRoot || !electronAPI?.setAgentFilesystemSettings) return; + const picked = await electronAPI.pickAgentFilesystemRoot(); + if (!picked) return; + const updated = await electronAPI.setAgentFilesystemSettings({ + mode: "desktop_local_folder", + localRootPath: picked, + }); + setFilesystemSettings(updated); + }, [electronAPI]); + const isThreadEmpty = useAuiState(({ thread }) => thread.isEmpty); const isThreadRunning = useAuiState(({ thread }) => thread.isRunning); @@ -668,6 +719,45 @@ const Composer: FC = () => { currentUserId={currentUser?.id ?? null} members={members ?? []} /> + {electronAPI && filesystemSettings ? ( +
+ + +
+ +
+ ) : null} {showDocumentPopover && (
= ({ isBlockedByOtherUser = false group.tools.flatMap((t, i) => i === 0 ? [t.description] - : [, t.description] + : [ + , + t.description, + ] )} diff --git a/surfsense_web/lib/apis/base-api.service.ts b/surfsense_web/lib/apis/base-api.service.ts index 04e9fad54..269fd916c 100644 --- a/surfsense_web/lib/apis/base-api.service.ts +++ b/surfsense_web/lib/apis/base-api.service.ts @@ -1,4 +1,5 @@ import type { ZodType } from "zod"; +import { getClientPlatform } from "../agent-filesystem"; import { getBearerToken, handleUnauthorized, refreshAccessToken } from "../auth-utils"; import { AbortedError, @@ -75,6 +76,8 @@ class BaseApiService { const defaultOptions: RequestOptions = { headers: { Authorization: `Bearer ${this.bearerToken || ""}`, + "X-SurfSense-Client-Platform": + typeof window === "undefined" ? "web" : getClientPlatform(), }, method: "GET", responseType: ResponseType.JSON, diff --git a/surfsense_web/types/window.d.ts b/surfsense_web/types/window.d.ts index a80520684..661c0f7d6 100644 --- a/surfsense_web/types/window.d.ts +++ b/surfsense_web/types/window.d.ts @@ -41,6 +41,14 @@ interface FolderFileEntry { mtimeMs: number; } +type AgentFilesystemMode = "cloud" | "desktop_local_folder"; + +interface AgentFilesystemSettings { + mode: AgentFilesystemMode; + localRootPath: string | null; + updatedAt: string; +} + interface ElectronAPI { versions: { electron: string; @@ -125,6 +133,13 @@ interface ElectronAPI { appVersion: string; platform: string; }>; + // Agent filesystem mode + getAgentFilesystemSettings: () => Promise; + setAgentFilesystemSettings: (settings: { + mode?: AgentFilesystemMode; + localRootPath?: string | null; + }) => Promise; + pickAgentFilesystemRoot: () => Promise; } declare global { From a2ddf4765012983c7071c569d2b0cdf995542ba1 Mon Sep 17 00:00:00 2001 From: Trevin Chow Date: Thu, 23 Apr 2026 03:26:42 -0700 Subject: [PATCH 085/113] refactor(anon-chat): route upload through anonymousChatApiService Fixes #1245. Deduplicate the anonymous-chat file upload request, which was inlined verbatim in DocumentsSidebar.tsx and free-composer.tsx while anonymousChatApiService.uploadDocument already existed. Key change: service now returns a discriminated result instead of throwing on 409. Callers need to distinguish 409 (quota exceeded, -> gate to login) from other non-OK responses (real errors, -> throw). export type AnonUploadResult = | { ok: true; data: { filename: string; size_bytes: number } } | { ok: false; reason: "quota_exceeded" }; Both call sites now do: const result = await anonymousChatApiService.uploadDocument(file); if (!result.ok) { if (result.reason === "quota_exceeded") gate("upload more documents"); return; } const data = result.data; Dropped the BACKEND_URL import in both files (no longer used). Verified zero remaining /api/v1/public/anon-chat/upload references in surfsense_web/. --- .../components/free-chat/free-composer.tsx | 22 +++++-------------- .../layout/ui/sidebar/DocumentsSidebar.tsx | 22 +++++-------------- .../lib/apis/anonymous-chat-api.service.ts | 12 ++++++++-- 3 files changed, 20 insertions(+), 36 deletions(-) diff --git a/surfsense_web/components/free-chat/free-composer.tsx b/surfsense_web/components/free-chat/free-composer.tsx index 57a3e8dd9..a22d2b205 100644 --- a/surfsense_web/components/free-chat/free-composer.tsx +++ b/surfsense_web/components/free-chat/free-composer.tsx @@ -9,7 +9,7 @@ import { Switch } from "@/components/ui/switch"; import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; import { useAnonymousMode } from "@/contexts/anonymous-mode"; import { useLoginGate } from "@/contexts/login-gate"; -import { BACKEND_URL } from "@/lib/env-config"; +import { anonymousChatApiService } from "@/lib/apis/anonymous-chat-api.service"; import { cn } from "@/lib/utils"; const ANON_ALLOWED_EXTENSIONS = new Set([ @@ -128,24 +128,12 @@ export const FreeComposer: FC = () => { } try { - const formData = new FormData(); - formData.append("file", file); - const res = await fetch(`${BACKEND_URL}/api/v1/public/anon-chat/upload`, { - method: "POST", - credentials: "include", - body: formData, - }); - - if (res.status === 409) { - gate("upload more documents"); + const result = await anonymousChatApiService.uploadDocument(file); + if (!result.ok) { + if (result.reason === "quota_exceeded") gate("upload more documents"); return; } - if (!res.ok) { - const body = await res.json().catch(() => ({})); - throw new Error(body.detail || `Upload failed: ${res.status}`); - } - - const data = await res.json(); + const data = result.data; if (anonMode.isAnonymous) { anonMode.setUploadedDoc({ filename: data.filename, diff --git a/surfsense_web/components/layout/ui/sidebar/DocumentsSidebar.tsx b/surfsense_web/components/layout/ui/sidebar/DocumentsSidebar.tsx index daed8747d..b7f4cff07 100644 --- a/surfsense_web/components/layout/ui/sidebar/DocumentsSidebar.tsx +++ b/surfsense_web/components/layout/ui/sidebar/DocumentsSidebar.tsx @@ -68,11 +68,11 @@ import type { DocumentTypeEnum } from "@/contracts/types/document.types"; import { useDebouncedValue } from "@/hooks/use-debounced-value"; import { useMediaQuery } from "@/hooks/use-media-query"; import { useElectronAPI } from "@/hooks/use-platform"; +import { anonymousChatApiService } from "@/lib/apis/anonymous-chat-api.service"; import { documentsApiService } from "@/lib/apis/documents-api.service"; import { foldersApiService } from "@/lib/apis/folders-api.service"; import { searchSpacesApiService } from "@/lib/apis/search-spaces-api.service"; import { authenticatedFetch } from "@/lib/auth-utils"; -import { BACKEND_URL } from "@/lib/env-config"; import { uploadFolderScan } from "@/lib/folder-sync-upload"; import { getSupportedExtensionsSet } from "@/lib/supported-extensions"; import { queries } from "@/zero/queries/index"; @@ -1312,24 +1312,12 @@ function AnonymousDocumentsSidebar({ setIsUploading(true); try { - const formData = new FormData(); - formData.append("file", file); - const res = await fetch(`${BACKEND_URL}/api/v1/public/anon-chat/upload`, { - method: "POST", - credentials: "include", - body: formData, - }); - - if (res.status === 409) { - gate("upload more documents"); + const result = await anonymousChatApiService.uploadDocument(file); + if (!result.ok) { + if (result.reason === "quota_exceeded") gate("upload more documents"); return; } - if (!res.ok) { - const body = await res.json().catch(() => ({})); - throw new Error(body.detail || `Upload failed: ${res.status}`); - } - - const data = await res.json(); + const data = result.data; if (anonMode.isAnonymous) { anonMode.setUploadedDoc({ filename: data.filename, diff --git a/surfsense_web/lib/apis/anonymous-chat-api.service.ts b/surfsense_web/lib/apis/anonymous-chat-api.service.ts index 968f58be2..843576a50 100644 --- a/surfsense_web/lib/apis/anonymous-chat-api.service.ts +++ b/surfsense_web/lib/apis/anonymous-chat-api.service.ts @@ -12,6 +12,10 @@ import { ValidationError } from "../error"; const BASE = "/api/v1/public/anon-chat"; +export type AnonUploadResult = + | { ok: true; data: { filename: string; size_bytes: number } } + | { ok: false; reason: "quota_exceeded" }; + class AnonymousChatApiService { private baseUrl: string; @@ -71,7 +75,7 @@ class AnonymousChatApiService { }); }; - uploadDocument = async (file: File): Promise<{ filename: string; size_bytes: number }> => { + uploadDocument = async (file: File): Promise => { const formData = new FormData(); formData.append("file", file); const res = await fetch(this.fullUrl("/upload"), { @@ -79,11 +83,15 @@ class AnonymousChatApiService { credentials: "include", body: formData, }); + if (res.status === 409) { + return { ok: false, reason: "quota_exceeded" }; + } if (!res.ok) { const body = await res.json().catch(() => ({})); throw new Error(body.detail || `Upload failed: ${res.status}`); } - return res.json(); + const data = await res.json(); + return { ok: true, data }; }; getDocument = async (): Promise<{ filename: string; size_bytes: number } | null> => { From 864f6f798ab25d6c4112b5b68b8ebd9aa0abf4ec Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 17:23:38 +0530 Subject: [PATCH 086/113] feat(filesystem): enhance local file handling in editor and IPC integration --- surfsense_backend/.env.example | 3 + .../app/routes/new_chat_routes.py | 5 - surfsense_desktop/src/ipc/channels.ts | 2 + surfsense_desktop/src/ipc/handlers.ts | 25 ++++ .../src/modules/agent-filesystem.ts | 61 +++++++- surfsense_desktop/src/preload.ts | 4 + .../atoms/editor/editor-panel.atom.ts | 38 ++++- .../components/assistant-ui/markdown-text.tsx | 46 ++++++ .../components/editor-panel/editor-panel.tsx | 139 ++++++++++++++---- .../layout/ui/right-panel/RightPanel.tsx | 18 ++- surfsense_web/lib/agent-filesystem.ts | 44 ++++++ surfsense_web/types/window.d.ts | 12 ++ 12 files changed, 350 insertions(+), 47 deletions(-) create mode 100644 surfsense_web/lib/agent-filesystem.ts diff --git a/surfsense_backend/.env.example b/surfsense_backend/.env.example index 7f6389521..86bac0aaf 100644 --- a/surfsense_backend/.env.example +++ b/surfsense_backend/.env.example @@ -239,6 +239,9 @@ LLAMA_CLOUD_API_KEY=llx-nnn # DAYTONA_TARGET=us # DAYTONA_SNAPSHOT_ID= +# Desktop local filesystem mode (chat file tools run against a local folder root) +# ENABLE_DESKTOP_LOCAL_FILESYSTEM=FALSE + # OPTIONAL: Add these for LangSmith Observability LANGSMITH_TRACING=true LANGSMITH_ENDPOINT=https://api.smith.langchain.com diff --git a/surfsense_backend/app/routes/new_chat_routes.py b/surfsense_backend/app/routes/new_chat_routes.py index 5e8e24c4a..548bd1402 100644 --- a/surfsense_backend/app/routes/new_chat_routes.py +++ b/surfsense_backend/app/routes/new_chat_routes.py @@ -525,11 +525,6 @@ async def get_thread_messages( # Check thread-level access based on visibility await check_thread_access(session, thread, user) - filesystem_selection = _resolve_filesystem_selection( - mode=request.filesystem_mode, - client_platform=request.client_platform, - local_root=request.local_filesystem_root, - ) # Get messages with their authors and token usage loaded messages_result = await session.execute( diff --git a/surfsense_desktop/src/ipc/channels.ts b/surfsense_desktop/src/ipc/channels.ts index 177a05fb4..5cf6e9001 100644 --- a/surfsense_desktop/src/ipc/channels.ts +++ b/surfsense_desktop/src/ipc/channels.ts @@ -34,6 +34,8 @@ export const IPC_CHANNELS = { FOLDER_SYNC_SEED_MTIMES: 'folder-sync:seed-mtimes', BROWSE_FILES: 'browse:files', READ_LOCAL_FILES: 'browse:read-local-files', + READ_AGENT_LOCAL_FILE_TEXT: 'agent-filesystem:read-local-file-text', + WRITE_AGENT_LOCAL_FILE_TEXT: 'agent-filesystem:write-local-file-text', // Auth token sync across windows GET_AUTH_TOKENS: 'auth:get-tokens', SET_AUTH_TOKENS: 'auth:set-tokens', diff --git a/surfsense_desktop/src/ipc/handlers.ts b/surfsense_desktop/src/ipc/handlers.ts index 3719a0b0f..cc84a46e0 100644 --- a/surfsense_desktop/src/ipc/handlers.ts +++ b/surfsense_desktop/src/ipc/handlers.ts @@ -37,6 +37,8 @@ import { trackEvent, } from '../modules/analytics'; import { + readAgentLocalFileText, + writeAgentLocalFileText, getAgentFilesystemSettings, pickAgentFilesystemRoot, setAgentFilesystemSettings, @@ -123,6 +125,29 @@ export function registerIpcHandlers(): void { readLocalFiles(paths) ); + ipcMain.handle(IPC_CHANNELS.READ_AGENT_LOCAL_FILE_TEXT, async (_event, virtualPath: string) => { + try { + const result = await readAgentLocalFileText(virtualPath); + return { ok: true, path: result.path, content: result.content }; + } catch (error) { + const message = error instanceof Error ? error.message : 'Failed to read local file'; + return { ok: false, path: virtualPath, error: message }; + } + }); + + ipcMain.handle( + IPC_CHANNELS.WRITE_AGENT_LOCAL_FILE_TEXT, + async (_event, virtualPath: string, content: string) => { + try { + const result = await writeAgentLocalFileText(virtualPath, content); + return { ok: true, path: result.path }; + } catch (error) { + const message = error instanceof Error ? error.message : 'Failed to write local file'; + return { ok: false, path: virtualPath, error: message }; + } + } + ); + ipcMain.handle(IPC_CHANNELS.SET_AUTH_TOKENS, (_event, tokens: { bearer: string; refresh: string }) => { authTokens = tokens; }); diff --git a/surfsense_desktop/src/modules/agent-filesystem.ts b/surfsense_desktop/src/modules/agent-filesystem.ts index 44f12a465..9dfe79fb0 100644 --- a/surfsense_desktop/src/modules/agent-filesystem.ts +++ b/surfsense_desktop/src/modules/agent-filesystem.ts @@ -1,6 +1,6 @@ import { app, dialog } from "electron"; import { mkdir, readFile, writeFile } from "node:fs/promises"; -import { dirname, join } from "node:path"; +import { dirname, isAbsolute, join, relative, resolve } from "node:path"; export type AgentFilesystemMode = "cloud" | "desktop_local_folder"; @@ -72,3 +72,62 @@ export async function pickAgentFilesystemRoot(): Promise { } return result.filePaths[0] ?? null; } + +function resolveVirtualPath(rootPath: string, virtualPath: string): string { + if (!virtualPath.startsWith("/")) { + throw new Error("Path must start with '/'"); + } + const normalizedRoot = resolve(rootPath); + const relativePath = virtualPath.replace(/^\/+/, ""); + if (!relativePath) { + throw new Error("Path must refer to a file under the selected root"); + } + const absolutePath = resolve(normalizedRoot, relativePath); + const rel = relative(normalizedRoot, absolutePath); + if (!rel || rel.startsWith("..") || isAbsolute(rel)) { + throw new Error("Path escapes selected local root"); + } + return absolutePath; +} + +function toVirtualPath(rootPath: string, absolutePath: string): string { + const normalizedRoot = resolve(rootPath); + const rel = relative(normalizedRoot, absolutePath); + if (!rel || rel.startsWith("..") || isAbsolute(rel)) { + return "/"; + } + return `/${rel.replace(/\\/g, "/")}`; +} + +async function resolveCurrentRootPath(): Promise { + const settings = await getAgentFilesystemSettings(); + if (!settings.localRootPath) { + throw new Error("No local filesystem root selected"); + } + return settings.localRootPath; +} + +export async function readAgentLocalFileText( + virtualPath: string +): Promise<{ path: string; content: string }> { + const rootPath = await resolveCurrentRootPath(); + const absolutePath = resolveVirtualPath(rootPath, virtualPath); + const content = await readFile(absolutePath, "utf8"); + return { + path: toVirtualPath(rootPath, absolutePath), + content, + }; +} + +export async function writeAgentLocalFileText( + virtualPath: string, + content: string +): Promise<{ path: string }> { + const rootPath = await resolveCurrentRootPath(); + const absolutePath = resolveVirtualPath(rootPath, virtualPath); + await mkdir(dirname(absolutePath), { recursive: true }); + await writeFile(absolutePath, content, "utf8"); + return { + path: toVirtualPath(rootPath, absolutePath), + }; +} diff --git a/surfsense_desktop/src/preload.ts b/surfsense_desktop/src/preload.ts index f75cc240e..9fc213bfa 100644 --- a/surfsense_desktop/src/preload.ts +++ b/surfsense_desktop/src/preload.ts @@ -71,6 +71,10 @@ contextBridge.exposeInMainWorld('electronAPI', { // Browse files via native dialog browseFiles: () => ipcRenderer.invoke(IPC_CHANNELS.BROWSE_FILES), readLocalFiles: (paths: string[]) => ipcRenderer.invoke(IPC_CHANNELS.READ_LOCAL_FILES, paths), + readAgentLocalFileText: (virtualPath: string) => + ipcRenderer.invoke(IPC_CHANNELS.READ_AGENT_LOCAL_FILE_TEXT, virtualPath), + writeAgentLocalFileText: (virtualPath: string, content: string) => + ipcRenderer.invoke(IPC_CHANNELS.WRITE_AGENT_LOCAL_FILE_TEXT, virtualPath, content), // Auth token sync across windows getAuthTokens: () => ipcRenderer.invoke(IPC_CHANNELS.GET_AUTH_TOKENS), diff --git a/surfsense_web/atoms/editor/editor-panel.atom.ts b/surfsense_web/atoms/editor/editor-panel.atom.ts index 7dc6add28..28563e7d3 100644 --- a/surfsense_web/atoms/editor/editor-panel.atom.ts +++ b/surfsense_web/atoms/editor/editor-panel.atom.ts @@ -3,14 +3,18 @@ import { rightPanelCollapsedAtom, rightPanelTabAtom } from "@/atoms/layout/right interface EditorPanelState { isOpen: boolean; + kind: "document" | "local_file"; documentId: number | null; + localFilePath: string | null; searchSpaceId: number | null; title: string | null; } const initialState: EditorPanelState = { isOpen: false, + kind: "document", documentId: null, + localFilePath: null, searchSpaceId: null, title: null, }; @@ -26,20 +30,38 @@ export const openEditorPanelAtom = atom( ( get, set, - { - documentId, - searchSpaceId, - title, - }: { documentId: number; searchSpaceId: number; title?: string } + payload: + | { documentId: number; searchSpaceId: number; title?: string; kind?: "document" } + | { + kind: "local_file"; + localFilePath: string; + title?: string; + searchSpaceId?: number; + } ) => { if (!get(editorPanelAtom).isOpen) { set(preEditorCollapsedAtom, get(rightPanelCollapsedAtom)); } + if (payload.kind === "local_file") { + set(editorPanelAtom, { + isOpen: true, + kind: "local_file", + documentId: null, + localFilePath: payload.localFilePath, + searchSpaceId: payload.searchSpaceId ?? null, + title: payload.title ?? null, + }); + set(rightPanelTabAtom, "editor"); + set(rightPanelCollapsedAtom, false); + return; + } set(editorPanelAtom, { isOpen: true, - documentId, - searchSpaceId, - title: title ?? null, + kind: "document", + documentId: payload.documentId, + localFilePath: null, + searchSpaceId: payload.searchSpaceId, + title: payload.title ?? null, }); set(rightPanelTabAtom, "editor"); set(rightPanelCollapsedAtom, false); diff --git a/surfsense_web/components/assistant-ui/markdown-text.tsx b/surfsense_web/components/assistant-ui/markdown-text.tsx index 9d0c8a9ed..a2ce30111 100644 --- a/surfsense_web/components/assistant-ui/markdown-text.tsx +++ b/surfsense_web/components/assistant-ui/markdown-text.tsx @@ -7,16 +7,20 @@ import { unstable_memoizeMarkdownComponents as memoizeMarkdownComponents, useIsMarkdownCodeBlock, } from "@assistant-ui/react-markdown"; +import { useSetAtom } from "jotai"; import { ExternalLinkIcon } from "lucide-react"; import dynamic from "next/dynamic"; +import { useParams } from "next/navigation"; import { useTheme } from "next-themes"; import { memo, type ReactNode } from "react"; import rehypeKatex from "rehype-katex"; import remarkGfm from "remark-gfm"; import remarkMath from "remark-math"; +import { openEditorPanelAtom } from "@/atoms/editor/editor-panel.atom"; import { ImagePreview, ImageRoot, ImageZoom } from "@/components/assistant-ui/image"; import "katex/dist/katex.min.css"; import { InlineCitation, UrlCitation } from "@/components/assistant-ui/inline-citation"; +import { useElectronAPI } from "@/hooks/use-platform"; import { Skeleton } from "@/components/ui/skeleton"; import { Table, @@ -222,6 +226,12 @@ function extractDomain(url: string): string { } } +const LOCAL_FILE_PATH_REGEX = /^\/(?:[^/\s`]+\/)*[^/\s`]+\.[^/\s`]+$/; + +function isVirtualFilePathToken(value: string): boolean { + return LOCAL_FILE_PATH_REGEX.test(value); +} + function MarkdownImage({ src, alt }: { src?: string; alt?: string }) { if (!src) return null; @@ -392,7 +402,43 @@ const defaultComponents = memoizeMarkdownComponents({ code: function Code({ className, children, ...props }) { const isCodeBlock = useIsMarkdownCodeBlock(); const { resolvedTheme } = useTheme(); + const openEditorPanel = useSetAtom(openEditorPanelAtom); + const params = useParams(); + const electronAPI = useElectronAPI(); if (!isCodeBlock) { + const inlineValue = String(children ?? "").trim(); + const isLocalPath = + !!electronAPI && isVirtualFilePathToken(inlineValue) && !inlineValue.startsWith("//"); + const displayLocalPath = inlineValue.replace(/^\/+/, ""); + const searchSpaceIdParam = params?.search_space_id; + const parsedSearchSpaceId = Array.isArray(searchSpaceIdParam) + ? Number(searchSpaceIdParam[0]) + : Number(searchSpaceIdParam); + if (isLocalPath) { + return ( + + ); + } return ( void; }) { + const electronAPI = useElectronAPI(); const [editorDoc, setEditorDoc] = useState(null); const [isLoading, setIsLoading] = useState(true); const [error, setError] = useState(null); @@ -75,6 +81,7 @@ export function EditorPanelContent({ const initialLoadDone = useRef(false); const changeCountRef = useRef(0); const [displayTitle, setDisplayTitle] = useState(title || "Untitled"); + const isLocalFileMode = kind === "local_file"; const isLargeDocument = (editorDoc?.content_size_bytes ?? 0) > LARGE_DOCUMENT_THRESHOLD; @@ -88,13 +95,40 @@ export function EditorPanelContent({ changeCountRef.current = 0; const doFetch = async () => { - const token = getBearerToken(); - if (!token) { - redirectToLogin(); - return; - } - try { + if (isLocalFileMode) { + if (!localFilePath) { + throw new Error("Missing local file path"); + } + if (!electronAPI?.readAgentLocalFileText) { + throw new Error("Local file editor is available only in desktop mode."); + } + const readResult = await electronAPI.readAgentLocalFileText(localFilePath); + if (!readResult.ok) { + throw new Error(readResult.error || "Failed to read local file"); + } + const inferredTitle = localFilePath.split("/").pop() || localFilePath; + const content: EditorContent = { + document_id: -1, + title: inferredTitle, + document_type: "NOTE", + source_markdown: readResult.content, + }; + markdownRef.current = content.source_markdown; + setDisplayTitle(title || inferredTitle); + setEditorDoc(content); + initialLoadDone.current = true; + return; + } + if (!documentId || !searchSpaceId) { + throw new Error("Missing document context"); + } + const token = getBearerToken(); + if (!token) { + redirectToLogin(); + return; + } + const url = new URL( `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${searchSpaceId}/documents/${documentId}/editor-content` ); @@ -136,7 +170,7 @@ export function EditorPanelContent({ doFetch().catch(() => {}); return () => controller.abort(); - }, [documentId, searchSpaceId, title]); + }, [documentId, electronAPI, isLocalFileMode, localFilePath, searchSpaceId, title]); const handleMarkdownChange = useCallback((md: string) => { markdownRef.current = md; @@ -147,15 +181,38 @@ export function EditorPanelContent({ }, []); const handleSave = useCallback(async () => { - const token = getBearerToken(); - if (!token) { - toast.error("Please login to save"); - redirectToLogin(); - return; - } - setSaving(true); try { + if (isLocalFileMode) { + if (!localFilePath) { + throw new Error("Missing local file path"); + } + if (!electronAPI?.writeAgentLocalFileText) { + throw new Error("Local file editor is available only in desktop mode."); + } + const writeResult = await electronAPI.writeAgentLocalFileText( + localFilePath, + markdownRef.current + ); + if (!writeResult.ok) { + throw new Error(writeResult.error || "Failed to save local file"); + } + setEditorDoc((prev) => + prev ? { ...prev, source_markdown: markdownRef.current } : prev + ); + setEditedMarkdown(null); + toast.success("File saved"); + return; + } + if (!searchSpaceId || !documentId) { + throw new Error("Missing document context"); + } + const token = getBearerToken(); + if (!token) { + toast.error("Please login to save"); + redirectToLogin(); + return; + } const response = await authenticatedFetch( `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${searchSpaceId}/documents/${documentId}/save`, { @@ -181,10 +238,11 @@ export function EditorPanelContent({ } finally { setSaving(false); } - }, [documentId, searchSpaceId]); + }, [documentId, electronAPI, isLocalFileMode, localFilePath, searchSpaceId]); const isEditableType = editorDoc - ? EDITABLE_DOCUMENT_TYPES.has(editorDoc.document_type ?? "") && !isLargeDocument + ? (isLocalFileMode || EDITABLE_DOCUMENT_TYPES.has(editorDoc.document_type ?? "")) && + !isLargeDocument : false; return ( @@ -197,7 +255,7 @@ export function EditorPanelContent({ )}
- {editorDoc?.document_type && ( + {!isLocalFileMode && editorDoc?.document_type && documentId && ( )} {onClose && ( @@ -234,7 +292,7 @@ export function EditorPanelContent({

- ) : isLargeDocument ? ( + ) : isLargeDocument && !isLocalFileMode ? (
@@ -252,6 +310,9 @@ export function EditorPanelContent({ onClick={async () => { setDownloading(true); try { + if (!searchSpaceId || !documentId) { + throw new Error("Missing document context"); + } const response = await authenticatedFetch( `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-spaces/${searchSpaceId}/documents/${documentId}/download-markdown`, { method: "GET" } @@ -289,7 +350,7 @@ export function EditorPanelContent({
) : isEditableType ? ( document.removeEventListener("keydown", handleKeyDown); }, [closePanel]); - if (!panelState.isOpen || !panelState.documentId || !panelState.searchSpaceId) return null; + const hasTarget = + panelState.kind === "document" + ? !!panelState.documentId && !!panelState.searchSpaceId + : !!panelState.localFilePath; + if (!panelState.isOpen || !hasTarget) return null; return (
@@ -342,7 +409,11 @@ function MobileEditorDrawer() { const panelState = useAtomValue(editorPanelAtom); const closePanel = useSetAtom(closeEditorPanelAtom); - if (!panelState.documentId || !panelState.searchSpaceId) return null; + const hasTarget = + panelState.kind === "document" + ? !!panelState.documentId && !!panelState.searchSpaceId + : !!panelState.localFilePath; + if (!hasTarget) return null; return ( {panelState.title || "Editor"}
@@ -373,8 +446,12 @@ function MobileEditorDrawer() { export function EditorPanel() { const panelState = useAtomValue(editorPanelAtom); const isDesktop = useMediaQuery("(min-width: 1024px)"); + const hasTarget = + panelState.kind === "document" + ? !!panelState.documentId && !!panelState.searchSpaceId + : !!panelState.localFilePath; - if (!panelState.isOpen || !panelState.documentId) return null; + if (!panelState.isOpen || !hasTarget) return null; if (isDesktop) { return ; @@ -386,8 +463,12 @@ export function EditorPanel() { export function MobileEditorPanel() { const panelState = useAtomValue(editorPanelAtom); const isDesktop = useMediaQuery("(min-width: 1024px)"); + const hasTarget = + panelState.kind === "document" + ? !!panelState.documentId && !!panelState.searchSpaceId + : !!panelState.localFilePath; - if (isDesktop || !panelState.isOpen || !panelState.documentId) return null; + if (isDesktop || !panelState.isOpen || !hasTarget) return null; return ; } diff --git a/surfsense_web/components/layout/ui/right-panel/RightPanel.tsx b/surfsense_web/components/layout/ui/right-panel/RightPanel.tsx index febae35d3..f6debed34 100644 --- a/surfsense_web/components/layout/ui/right-panel/RightPanel.tsx +++ b/surfsense_web/components/layout/ui/right-panel/RightPanel.tsx @@ -70,7 +70,11 @@ export function RightPanelExpandButton() { const editorState = useAtomValue(editorPanelAtom); const hitlEditState = useAtomValue(hitlEditPanelAtom); const reportOpen = reportState.isOpen && !!reportState.reportId; - const editorOpen = editorState.isOpen && !!editorState.documentId; + const editorOpen = + editorState.isOpen && + (editorState.kind === "document" + ? !!editorState.documentId + : !!editorState.localFilePath); const hitlEditOpen = hitlEditState.isOpen && !!hitlEditState.onSave; const hasContent = documentsOpen || reportOpen || editorOpen || hitlEditOpen; @@ -110,7 +114,11 @@ export function RightPanel({ documentsPanel }: RightPanelProps) { const documentsOpen = documentsPanel?.open ?? false; const reportOpen = reportState.isOpen && !!reportState.reportId; - const editorOpen = editorState.isOpen && !!editorState.documentId; + const editorOpen = + editorState.isOpen && + (editorState.kind === "document" + ? !!editorState.documentId + : !!editorState.localFilePath); const hitlEditOpen = hitlEditState.isOpen && !!hitlEditState.onSave; useEffect(() => { @@ -179,8 +187,10 @@ export function RightPanel({ documentsPanel }: RightPanelProps) { {effectiveTab === "editor" && editorOpen && (
diff --git a/surfsense_web/lib/agent-filesystem.ts b/surfsense_web/lib/agent-filesystem.ts new file mode 100644 index 000000000..6bfb5d131 --- /dev/null +++ b/surfsense_web/lib/agent-filesystem.ts @@ -0,0 +1,44 @@ +export type AgentFilesystemMode = "cloud" | "desktop_local_folder"; +export type ClientPlatform = "web" | "desktop"; + +export interface AgentFilesystemSelection { + filesystem_mode: AgentFilesystemMode; + client_platform: ClientPlatform; + local_filesystem_root?: string; +} + +const DEFAULT_SELECTION: AgentFilesystemSelection = { + filesystem_mode: "cloud", + client_platform: "web", +}; + +export function getClientPlatform(): ClientPlatform { + if (typeof window === "undefined") return "web"; + return window.electronAPI ? "desktop" : "web"; +} + +export async function getAgentFilesystemSelection(): Promise { + const platform = getClientPlatform(); + if (platform !== "desktop" || !window.electronAPI?.getAgentFilesystemSettings) { + return { ...DEFAULT_SELECTION, client_platform: platform }; + } + try { + const settings = await window.electronAPI.getAgentFilesystemSettings(); + if (settings.mode === "desktop_local_folder" && settings.localRootPath) { + return { + filesystem_mode: "desktop_local_folder", + client_platform: "desktop", + local_filesystem_root: settings.localRootPath, + }; + } + return { + filesystem_mode: "cloud", + client_platform: "desktop", + }; + } catch { + return { + filesystem_mode: "cloud", + client_platform: "desktop", + }; + } +} diff --git a/surfsense_web/types/window.d.ts b/surfsense_web/types/window.d.ts index 661c0f7d6..fe80ef8c0 100644 --- a/surfsense_web/types/window.d.ts +++ b/surfsense_web/types/window.d.ts @@ -49,6 +49,13 @@ interface AgentFilesystemSettings { updatedAt: string; } +interface LocalTextFileResult { + ok: boolean; + path: string; + content?: string; + error?: string; +} + interface ElectronAPI { versions: { electron: string; @@ -102,6 +109,11 @@ interface ElectronAPI { // Browse files/folders via native dialogs browseFiles: () => Promise; readLocalFiles: (paths: string[]) => Promise; + readAgentLocalFileText: (virtualPath: string) => Promise; + writeAgentLocalFileText: ( + virtualPath: string, + content: string + ) => Promise; // Auth token sync across windows getAuthTokens: () => Promise<{ bearer: string; refresh: string } | null>; setAuthTokens: (bearer: string, refresh: string) => Promise; From bbc1c76c0d75432a85ade3cc2654d2ff0027e414 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 18:00:51 +0530 Subject: [PATCH 087/113] feat(editor): integrate Monaco Editor for local file editing and enhance language inference --- .../components/editor-panel/editor-panel.tsx | 20 +++++++ .../components/editor/local-file-monaco.tsx | 56 +++++++++++++++++++ surfsense_web/lib/editor-language.ts | 34 +++++++++++ surfsense_web/package.json | 2 + surfsense_web/pnpm-lock.yaml | 54 ++++++++++++++++++ 5 files changed, 166 insertions(+) create mode 100644 surfsense_web/components/editor/local-file-monaco.tsx create mode 100644 surfsense_web/lib/editor-language.ts diff --git a/surfsense_web/components/editor-panel/editor-panel.tsx b/surfsense_web/components/editor-panel/editor-panel.tsx index f7829d0cb..081359719 100644 --- a/surfsense_web/components/editor-panel/editor-panel.tsx +++ b/surfsense_web/components/editor-panel/editor-panel.tsx @@ -7,6 +7,7 @@ import { useCallback, useEffect, useRef, useState } from "react"; import { toast } from "sonner"; import { closeEditorPanelAtom, editorPanelAtom } from "@/atoms/editor/editor-panel.atom"; import { VersionHistoryButton } from "@/components/documents/version-history"; +import { LocalFileMonaco } from "@/components/editor/local-file-monaco"; import { MarkdownViewer } from "@/components/markdown-viewer"; import { Alert, AlertDescription } from "@/components/ui/alert"; import { Button } from "@/components/ui/button"; @@ -14,6 +15,7 @@ import { Drawer, DrawerContent, DrawerHandle, DrawerTitle } from "@/components/u import { useMediaQuery } from "@/hooks/use-media-query"; import { useElectronAPI } from "@/hooks/use-platform"; import { authenticatedFetch, getBearerToken, redirectToLogin } from "@/lib/auth-utils"; +import { inferMonacoLanguageFromPath } from "@/lib/editor-language"; const PlateEditor = dynamic( () => import("@/components/editor/plate-editor").then((m) => ({ default: m.PlateEditor })), @@ -77,6 +79,7 @@ export function EditorPanelContent({ const [downloading, setDownloading] = useState(false); const [editedMarkdown, setEditedMarkdown] = useState(null); + const [localFileContent, setLocalFileContent] = useState(""); const markdownRef = useRef(""); const initialLoadDone = useRef(false); const changeCountRef = useRef(0); @@ -91,6 +94,7 @@ export function EditorPanelContent({ setError(null); setEditorDoc(null); setEditedMarkdown(null); + setLocalFileContent(""); initialLoadDone.current = false; changeCountRef.current = 0; @@ -115,6 +119,7 @@ export function EditorPanelContent({ source_markdown: readResult.content, }; markdownRef.current = content.source_markdown; + setLocalFileContent(content.source_markdown); setDisplayTitle(title || inferredTitle); setEditorDoc(content); initialLoadDone.current = true; @@ -244,6 +249,7 @@ export function EditorPanelContent({ ? (isLocalFileMode || EDITABLE_DOCUMENT_TYPES.has(editorDoc.document_type ?? "")) && !isLargeDocument : false; + const localFileLanguage = inferMonacoLanguageFromPath(localFilePath); return ( <> @@ -348,6 +354,20 @@ export function EditorPanelContent({
+ ) : isLocalFileMode ? ( +
+ { + markdownRef.current = next; + setLocalFileContent(next); + if (!initialLoadDone.current) return; + setEditedMarkdown(next === (editorDoc?.source_markdown ?? "") ? null : next); + }} + /> +
) : isEditableType ? ( import("@monaco-editor/react"), { + ssr: false, +}); + +interface LocalFileMonacoProps { + filePath: string; + language: string; + value: string; + onChange: (next: string) => void; +} + +export function LocalFileMonaco({ filePath, language, value, onChange }: LocalFileMonacoProps) { + const { resolvedTheme } = useTheme(); + + return ( +
+ onChange(next ?? "")} + options={{ + automaticLayout: true, + minimap: { enabled: false }, + lineNumbers: "on", + lineNumbersMinChars: 3, + lineDecorationsWidth: 12, + glyphMargin: false, + folding: true, + overviewRulerLanes: 0, + hideCursorInOverviewRuler: true, + scrollBeyondLastLine: false, + wordWrap: "off", + scrollbar: { + vertical: "hidden", + horizontal: "hidden", + alwaysConsumeMouseWheel: false, + }, + tabSize: 2, + insertSpaces: true, + fontSize: 12, + fontFamily: + "ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, Liberation Mono, monospace", + renderWhitespace: "selection", + smoothScrolling: true, + }} + /> +
+ ); +} diff --git a/surfsense_web/lib/editor-language.ts b/surfsense_web/lib/editor-language.ts new file mode 100644 index 000000000..17227c15d --- /dev/null +++ b/surfsense_web/lib/editor-language.ts @@ -0,0 +1,34 @@ +const EXTENSION_TO_MONACO_LANGUAGE: Record = { + css: "css", + csv: "plaintext", + cjs: "javascript", + html: "html", + htm: "html", + ini: "ini", + js: "javascript", + json: "json", + markdown: "markdown", + md: "markdown", + mjs: "javascript", + py: "python", + sql: "sql", + toml: "plaintext", + ts: "typescript", + tsx: "typescript", + xml: "xml", + yaml: "yaml", + yml: "yaml", +}; + +export function inferMonacoLanguageFromPath(filePath: string | null | undefined): string { + if (!filePath) return "plaintext"; + + const fileName = filePath.split("/").pop() ?? filePath; + const extensionIndex = fileName.lastIndexOf("."); + if (extensionIndex <= 0 || extensionIndex === fileName.length - 1) { + return "plaintext"; + } + + const extension = fileName.slice(extensionIndex + 1).toLowerCase(); + return EXTENSION_TO_MONACO_LANGUAGE[extension] ?? "plaintext"; +} diff --git a/surfsense_web/package.json b/surfsense_web/package.json index a98c21f83..41175daeb 100644 --- a/surfsense_web/package.json +++ b/surfsense_web/package.json @@ -28,6 +28,7 @@ "@babel/standalone": "^7.29.2", "@hookform/resolvers": "^5.2.2", "@marsidev/react-turnstile": "^1.5.0", + "@monaco-editor/react": "^4.7.0", "@number-flow/react": "^0.5.10", "@platejs/autoformat": "^52.0.11", "@platejs/basic-nodes": "^52.0.11", @@ -106,6 +107,7 @@ "lenis": "^1.3.17", "lowlight": "^3.3.0", "lucide-react": "^0.577.0", + "monaco-editor": "^0.55.1", "motion": "^12.23.22", "next": "^16.1.0", "next-intl": "^4.6.1", diff --git a/surfsense_web/pnpm-lock.yaml b/surfsense_web/pnpm-lock.yaml index 1c3dd61e0..b1730e842 100644 --- a/surfsense_web/pnpm-lock.yaml +++ b/surfsense_web/pnpm-lock.yaml @@ -29,6 +29,9 @@ importers: '@marsidev/react-turnstile': specifier: ^1.5.0 version: 1.5.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4) + '@monaco-editor/react': + specifier: ^4.7.0 + version: 4.7.0(monaco-editor@0.55.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@number-flow/react': specifier: ^0.5.10 version: 0.5.14(react-dom@19.2.4(react@19.2.4))(react@19.2.4) @@ -263,6 +266,9 @@ importers: lucide-react: specifier: ^0.577.0 version: 0.577.0(react@19.2.4) + monaco-editor: + specifier: ^0.55.1 + version: 0.55.1 motion: specifier: ^12.23.22 version: 12.34.3(react-dom@19.2.4(react@19.2.4))(react@19.2.4) @@ -1980,6 +1986,16 @@ packages: peerDependencies: mediabunny: ^1.0.0 + '@monaco-editor/loader@1.7.0': + resolution: {integrity: sha512-gIwR1HrJrrx+vfyOhYmCZ0/JcWqG5kbfG7+d3f/C1LXk2EvzAbHSg3MQ5lO2sMlo9izoAZ04shohfKLVT6crVA==} + + '@monaco-editor/react@4.7.0': + resolution: {integrity: sha512-cyzXQCtO47ydzxpQtCGSQGOC8Gk3ZUeBXFAxD+CWXYFo5OqZyZUonFl0DwUlTyAfRHntBfw2p3w4s9R6oe1eCA==} + peerDependencies: + monaco-editor: '>= 0.25.0 < 1' + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + '@napi-rs/canvas-android-arm64@0.1.97': resolution: {integrity: sha512-V1c/WVw+NzH8vk7ZK/O8/nyBSCQimU8sfMsB/9qeSvdkGKNU7+mxy/bIF0gTgeBFmHpj30S4E9WHMSrxXGQuVQ==} engines: {node: '>= 10'} @@ -5368,6 +5384,9 @@ packages: resolution: {integrity: sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==} engines: {node: '>= 4'} + dompurify@3.2.7: + resolution: {integrity: sha512-WhL/YuveyGXJaerVlMYGWhvQswa7myDG17P7Vu65EWC05o8vfeNbvNf4d/BOvH99+ZW+LlQsc1GDKMa1vNK6dw==} + dompurify@3.3.1: resolution: {integrity: sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==} @@ -6745,6 +6764,11 @@ packages: markdown-table@3.0.4: resolution: {integrity: sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==} + marked@14.0.0: + resolution: {integrity: sha512-uIj4+faQ+MgHgwUW1l2PsPglZLOLOT1uErt06dAPtx2kjteLAkbsd/0FiYg/MGS+i7ZKLb7w2WClxHkzOOuryQ==} + engines: {node: '>= 18'} + hasBin: true + marked@15.0.12: resolution: {integrity: sha512-8dD6FusOQSrpv9Z1rdNMdlSgQOIP880DHqnohobOmYLElGEqAL/JvxvuxZO16r4HtjTlfPRDC1hbvxC9dPN2nA==} engines: {node: '>= 18'} @@ -6965,6 +6989,9 @@ packages: module-details-from-path@1.0.4: resolution: {integrity: sha512-EGWKgxALGMgzvxYF1UyGTy0HXX/2vHLkw6+NvDKW2jypWbHpjQuj4UMcqQWXHERJhVGKikolT06G3bcKe4fi7w==} + monaco-editor@0.55.1: + resolution: {integrity: sha512-jz4x+TJNFHwHtwuV9vA9rMujcZRb0CEilTEwG2rRSpe/A7Jdkuj8xPKttCgOh+v/lkHy7HsZ64oj+q3xoAFl9A==} + motion-dom@12.34.3: resolution: {integrity: sha512-sYgFe+pR9aIM7o4fhs2aXtOI+oqlUd33N9Yoxcgo1Fv7M20sRkHtCmzE/VRNIcq7uNJ+qio+Xubt1FXH3pQ+eQ==} @@ -7943,6 +7970,9 @@ packages: stable-hash@0.0.5: resolution: {integrity: sha512-+L3ccpzibovGXFK+Ap/f8LOS0ahMrHTf3xu7mMLSpEGU0EO9ucaysSylKo9eRDFNhWve/y275iPmIZ4z39a9iA==} + state-local@1.0.7: + resolution: {integrity: sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==} + stop-iteration-iterator@1.1.0: resolution: {integrity: sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==} engines: {node: '>= 0.4'} @@ -10050,6 +10080,17 @@ snapshots: dependencies: mediabunny: 1.39.2 + '@monaco-editor/loader@1.7.0': + dependencies: + state-local: 1.0.7 + + '@monaco-editor/react@4.7.0(monaco-editor@0.55.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': + dependencies: + '@monaco-editor/loader': 1.7.0 + monaco-editor: 0.55.1 + react: 19.2.4 + react-dom: 19.2.4(react@19.2.4) + '@napi-rs/canvas-android-arm64@0.1.97': optional: true @@ -13748,6 +13789,10 @@ snapshots: dependencies: domelementtype: 2.3.0 + dompurify@3.2.7: + optionalDependencies: + '@types/trusted-types': 2.0.7 + dompurify@3.3.1: optionalDependencies: '@types/trusted-types': 2.0.7 @@ -15327,6 +15372,8 @@ snapshots: markdown-table@3.0.4: {} + marked@14.0.0: {} + marked@15.0.12: {} marked@17.0.3: {} @@ -15822,6 +15869,11 @@ snapshots: module-details-from-path@1.0.4: {} + monaco-editor@0.55.1: + dependencies: + dompurify: 3.2.7 + marked: 14.0.0 + motion-dom@12.34.3: dependencies: motion-utils: 12.29.2 @@ -17073,6 +17125,8 @@ snapshots: stable-hash@0.0.5: {} + state-local@1.0.7: {} + stop-iteration-iterator@1.1.0: dependencies: es-errors: 1.3.0 From d397fec54fd829561967c524ad08638eed263531 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 18:21:50 +0530 Subject: [PATCH 088/113] feat(editor): add SourceCodeEditor component for enhanced code editing experience --- .../components/editor-panel/editor-panel.tsx | 13 +++++---- ...file-monaco.tsx => source-code-editor.tsx} | 28 +++++++++++++++---- 2 files changed, 30 insertions(+), 11 deletions(-) rename surfsense_web/components/editor/{local-file-monaco.tsx => source-code-editor.tsx} (69%) diff --git a/surfsense_web/components/editor-panel/editor-panel.tsx b/surfsense_web/components/editor-panel/editor-panel.tsx index 081359719..137ece5e2 100644 --- a/surfsense_web/components/editor-panel/editor-panel.tsx +++ b/surfsense_web/components/editor-panel/editor-panel.tsx @@ -7,7 +7,7 @@ import { useCallback, useEffect, useRef, useState } from "react"; import { toast } from "sonner"; import { closeEditorPanelAtom, editorPanelAtom } from "@/atoms/editor/editor-panel.atom"; import { VersionHistoryButton } from "@/components/documents/version-history"; -import { LocalFileMonaco } from "@/components/editor/local-file-monaco"; +import { SourceCodeEditor } from "@/components/editor/source-code-editor"; import { MarkdownViewer } from "@/components/markdown-viewer"; import { Alert, AlertDescription } from "@/components/ui/alert"; import { Button } from "@/components/ui/button"; @@ -35,6 +35,7 @@ interface EditorContent { } const EDITABLE_DOCUMENT_TYPES = new Set(["FILE", "NOTE"]); +type EditorRenderMode = "rich_markdown" | "source_code"; function EditorPanelSkeleton() { return ( @@ -85,6 +86,7 @@ export function EditorPanelContent({ const changeCountRef = useRef(0); const [displayTitle, setDisplayTitle] = useState(title || "Untitled"); const isLocalFileMode = kind === "local_file"; + const editorRenderMode: EditorRenderMode = isLocalFileMode ? "source_code" : "rich_markdown"; const isLargeDocument = (editorDoc?.content_size_bytes ?? 0) > LARGE_DOCUMENT_THRESHOLD; @@ -246,7 +248,8 @@ export function EditorPanelContent({ }, [documentId, electronAPI, isLocalFileMode, localFilePath, searchSpaceId]); const isEditableType = editorDoc - ? (isLocalFileMode || EDITABLE_DOCUMENT_TYPES.has(editorDoc.document_type ?? "")) && + ? (editorRenderMode === "source_code" || + EDITABLE_DOCUMENT_TYPES.has(editorDoc.document_type ?? "")) && !isLargeDocument : false; const localFileLanguage = inferMonacoLanguageFromPath(localFilePath); @@ -354,10 +357,10 @@ export function EditorPanelContent({
- ) : isLocalFileMode ? ( + ) : editorRenderMode === "source_code" ? (
- { diff --git a/surfsense_web/components/editor/local-file-monaco.tsx b/surfsense_web/components/editor/source-code-editor.tsx similarity index 69% rename from surfsense_web/components/editor/local-file-monaco.tsx rename to surfsense_web/components/editor/source-code-editor.tsx index b27203341..7bb7bee35 100644 --- a/surfsense_web/components/editor/local-file-monaco.tsx +++ b/surfsense_web/components/editor/source-code-editor.tsx @@ -2,29 +2,44 @@ import dynamic from "next/dynamic"; import { useTheme } from "next-themes"; +import { Spinner } from "@/components/ui/spinner"; const MonacoEditor = dynamic(() => import("@monaco-editor/react"), { ssr: false, }); -interface LocalFileMonacoProps { - filePath: string; - language: string; +interface SourceCodeEditorProps { value: string; onChange: (next: string) => void; + path?: string; + language?: string; + readOnly?: boolean; + fontSize?: number; } -export function LocalFileMonaco({ filePath, language, value, onChange }: LocalFileMonacoProps) { +export function SourceCodeEditor({ + value, + onChange, + path, + language = "plaintext", + readOnly = false, + fontSize = 12, +}: SourceCodeEditorProps) { const { resolvedTheme } = useTheme(); return (
onChange(next ?? "")} + loading={ +
+ +
+ } options={{ automaticLayout: true, minimap: { enabled: false }, @@ -44,11 +59,12 @@ export function LocalFileMonaco({ filePath, language, value, onChange }: LocalFi }, tabSize: 2, insertSpaces: true, - fontSize: 12, + fontSize, fontFamily: "ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, Liberation Mono, monospace", renderWhitespace: "selection", smoothScrolling: true, + readOnly, }} />
From 3f203f8c49cace8010d88d9dcf812852196fae16 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 18:29:32 +0530 Subject: [PATCH 089/113] feat(editor): implement auto-save functionality and manual save command in SourceCodeEditor --- .../components/editor-panel/editor-panel.tsx | 12 ++-- .../components/editor/source-code-editor.tsx | 55 +++++++++++++++++++ .../layout/ui/right-panel/RightPanel.tsx | 2 +- 3 files changed, 63 insertions(+), 6 deletions(-) diff --git a/surfsense_web/components/editor-panel/editor-panel.tsx b/surfsense_web/components/editor-panel/editor-panel.tsx index 137ece5e2..739428df3 100644 --- a/surfsense_web/components/editor-panel/editor-panel.tsx +++ b/surfsense_web/components/editor-panel/editor-panel.tsx @@ -187,7 +187,7 @@ export function EditorPanelContent({ setEditedMarkdown(md); }, []); - const handleSave = useCallback(async () => { + const handleSave = useCallback(async (options?: { silent?: boolean }) => { setSaving(true); try { if (isLocalFileMode) { @@ -197,18 +197,18 @@ export function EditorPanelContent({ if (!electronAPI?.writeAgentLocalFileText) { throw new Error("Local file editor is available only in desktop mode."); } + const contentToSave = markdownRef.current; const writeResult = await electronAPI.writeAgentLocalFileText( localFilePath, - markdownRef.current + contentToSave ); if (!writeResult.ok) { throw new Error(writeResult.error || "Failed to save local file"); } setEditorDoc((prev) => - prev ? { ...prev, source_markdown: markdownRef.current } : prev + prev ? { ...prev, source_markdown: contentToSave } : prev ); - setEditedMarkdown(null); - toast.success("File saved"); + setEditedMarkdown(markdownRef.current === contentToSave ? null : markdownRef.current); return; } if (!searchSpaceId || !documentId) { @@ -363,6 +363,8 @@ export function EditorPanelContent({ path={localFilePath ?? "local-file.txt"} language={localFileLanguage} value={localFileContent} + onSave={() => handleSave({ silent: true })} + saveMode="auto" onChange={(next) => { markdownRef.current = next; setLocalFileContent(next); diff --git a/surfsense_web/components/editor/source-code-editor.tsx b/surfsense_web/components/editor/source-code-editor.tsx index 7bb7bee35..bd3728721 100644 --- a/surfsense_web/components/editor/source-code-editor.tsx +++ b/surfsense_web/components/editor/source-code-editor.tsx @@ -1,6 +1,7 @@ "use client"; import dynamic from "next/dynamic"; +import { useEffect, useRef } from "react"; import { useTheme } from "next-themes"; import { Spinner } from "@/components/ui/spinner"; @@ -15,6 +16,9 @@ interface SourceCodeEditorProps { language?: string; readOnly?: boolean; fontSize?: number; + onSave?: () => Promise | void; + saveMode?: "manual" | "auto" | "both"; + autoSaveDelayMs?: number; } export function SourceCodeEditor({ @@ -24,8 +28,50 @@ export function SourceCodeEditor({ language = "plaintext", readOnly = false, fontSize = 12, + onSave, + saveMode = "manual", + autoSaveDelayMs = 800, }: SourceCodeEditorProps) { const { resolvedTheme } = useTheme(); + const saveTimerRef = useRef | null>(null); + const onSaveRef = useRef(onSave); + const skipNextAutoSaveRef = useRef(true); + + useEffect(() => { + onSaveRef.current = onSave; + }, [onSave]); + + useEffect(() => { + skipNextAutoSaveRef.current = true; + }, [path]); + + useEffect(() => { + if (readOnly || !onSaveRef.current) return; + if (saveMode !== "auto" && saveMode !== "both") return; + + if (skipNextAutoSaveRef.current) { + skipNextAutoSaveRef.current = false; + return; + } + + if (saveTimerRef.current) { + clearTimeout(saveTimerRef.current); + } + + saveTimerRef.current = setTimeout(() => { + void onSaveRef.current?.(); + saveTimerRef.current = null; + }, autoSaveDelayMs); + + return () => { + if (saveTimerRef.current) { + clearTimeout(saveTimerRef.current); + saveTimerRef.current = null; + } + }; + }, [autoSaveDelayMs, readOnly, saveMode, value]); + + const isManualSaveEnabled = !!onSave && !readOnly && (saveMode === "manual" || saveMode === "both"); return (
@@ -40,6 +86,12 @@ export function SourceCodeEditor({
} + onMount={(editor, monaco) => { + if (!isManualSaveEnabled) return; + editor.addCommand(monaco.KeyMod.CtrlCmd | monaco.KeyCode.KeyS, () => { + void onSaveRef.current?.(); + }); + }} options={{ automaticLayout: true, minimap: { enabled: false }, @@ -51,6 +103,9 @@ export function SourceCodeEditor({ overviewRulerLanes: 0, hideCursorInOverviewRuler: true, scrollBeyondLastLine: false, + renderLineHighlight: "none", + selectionHighlight: false, + occurrencesHighlight: "off", wordWrap: "off", scrollbar: { vertical: "hidden", diff --git a/surfsense_web/components/layout/ui/right-panel/RightPanel.tsx b/surfsense_web/components/layout/ui/right-panel/RightPanel.tsx index f6debed34..2394480b2 100644 --- a/surfsense_web/components/layout/ui/right-panel/RightPanel.tsx +++ b/surfsense_web/components/layout/ui/right-panel/RightPanel.tsx @@ -53,7 +53,7 @@ function CollapseButton({ onClick }: { onClick: () => void }) { Collapse panel - Collapse panel + Collapse panel ); } From fe9ffa1413557ce61244135728030ce72eca99ab Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 18:39:35 +0530 Subject: [PATCH 090/113] refactor(editor): improve SourceCodeEditor styling and enhance scrollbar behavior --- .../components/editor-panel/editor-panel.tsx | 5 +---- .../components/editor/source-code-editor.tsx | 13 ++++++++++--- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/surfsense_web/components/editor-panel/editor-panel.tsx b/surfsense_web/components/editor-panel/editor-panel.tsx index 739428df3..30dcdeb2c 100644 --- a/surfsense_web/components/editor-panel/editor-panel.tsx +++ b/surfsense_web/components/editor-panel/editor-panel.tsx @@ -256,12 +256,9 @@ export function EditorPanelContent({ return ( <> -
+

{displayTitle}

- {isEditableType && editedMarkdown !== null && ( -

Unsaved changes

- )}
{!isLocalFileMode && editorDoc?.document_type && documentId && ( diff --git a/surfsense_web/components/editor/source-code-editor.tsx b/surfsense_web/components/editor/source-code-editor.tsx index bd3728721..2c1f52989 100644 --- a/surfsense_web/components/editor/source-code-editor.tsx +++ b/surfsense_web/components/editor/source-code-editor.tsx @@ -74,7 +74,7 @@ export function SourceCodeEditor({ const isManualSaveEnabled = !!onSave && !readOnly && (saveMode === "manual" || saveMode === "both"); return ( -
+
Date: Thu, 23 Apr 2026 19:25:59 +0530 Subject: [PATCH 091/113] refactor(editor): remove auto-save functionality and simplify SourceCodeEditor props --- .../components/editor-panel/editor-panel.tsx | 168 ++++++++++++++++-- .../components/editor/source-code-editor.tsx | 84 +++++---- .../layout/ui/right-panel/RightPanel.tsx | 2 +- 3 files changed, 198 insertions(+), 56 deletions(-) diff --git a/surfsense_web/components/editor-panel/editor-panel.tsx b/surfsense_web/components/editor-panel/editor-panel.tsx index 30dcdeb2c..b83c4b1d7 100644 --- a/surfsense_web/components/editor-panel/editor-panel.tsx +++ b/surfsense_web/components/editor-panel/editor-panel.tsx @@ -1,7 +1,17 @@ "use client"; import { useAtomValue, useSetAtom } from "jotai"; -import { Download, FileQuestionMark, FileText, Loader2, RefreshCw, XIcon } from "lucide-react"; +import { + Check, + Copy, + Download, + FileQuestionMark, + FileText, + Loader2, + Pencil, + RefreshCw, + XIcon, +} from "lucide-react"; import dynamic from "next/dynamic"; import { useCallback, useEffect, useRef, useState } from "react"; import { toast } from "sonner"; @@ -78,10 +88,13 @@ export function EditorPanelContent({ const [error, setError] = useState(null); const [saving, setSaving] = useState(false); const [downloading, setDownloading] = useState(false); + const [isSourceEditing, setIsSourceEditing] = useState(false); const [editedMarkdown, setEditedMarkdown] = useState(null); const [localFileContent, setLocalFileContent] = useState(""); + const [hasCopied, setHasCopied] = useState(false); const markdownRef = useRef(""); + const copyResetTimeoutRef = useRef | null>(null); const initialLoadDone = useRef(false); const changeCountRef = useRef(0); const [displayTitle, setDisplayTitle] = useState(title || "Untitled"); @@ -97,6 +110,8 @@ export function EditorPanelContent({ setEditorDoc(null); setEditedMarkdown(null); setLocalFileContent(""); + setHasCopied(false); + setIsSourceEditing(false); initialLoadDone.current = false; changeCountRef.current = 0; @@ -179,6 +194,14 @@ export function EditorPanelContent({ return () => controller.abort(); }, [documentId, electronAPI, isLocalFileMode, localFilePath, searchSpaceId, title]); + useEffect(() => { + return () => { + if (copyResetTimeoutRef.current) { + clearTimeout(copyResetTimeoutRef.current); + } + }; + }, []); + const handleMarkdownChange = useCallback((md: string) => { markdownRef.current = md; if (!initialLoadDone.current) return; @@ -187,6 +210,22 @@ export function EditorPanelContent({ setEditedMarkdown(md); }, []); + const handleCopy = useCallback(async () => { + try { + const textToCopy = markdownRef.current ?? editorDoc?.source_markdown ?? ""; + await navigator.clipboard.writeText(textToCopy); + setHasCopied(true); + if (copyResetTimeoutRef.current) { + clearTimeout(copyResetTimeoutRef.current); + } + copyResetTimeoutRef.current = setTimeout(() => { + setHasCopied(false); + }, 1400); + } catch (err) { + console.error("Error copying content:", err); + } + }, [editorDoc?.source_markdown]); + const handleSave = useCallback(async (options?: { silent?: boolean }) => { setSaving(true); try { @@ -209,7 +248,7 @@ export function EditorPanelContent({ prev ? { ...prev, source_markdown: contentToSave } : prev ); setEditedMarkdown(markdownRef.current === contentToSave ? null : markdownRef.current); - return; + return true; } if (!searchSpaceId || !documentId) { throw new Error("Missing document context"); @@ -239,9 +278,11 @@ export function EditorPanelContent({ setEditorDoc((prev) => (prev ? { ...prev, source_markdown: markdownRef.current } : prev)); setEditedMarkdown(null); toast.success("Document saved! Reindexing in background..."); + return true; } catch (err) { console.error("Error saving document:", err); toast.error(err instanceof Error ? err.message : "Failed to save document"); + return false; } finally { setSaving(false); } @@ -252,26 +293,111 @@ export function EditorPanelContent({ EDITABLE_DOCUMENT_TYPES.has(editorDoc.document_type ?? "")) && !isLargeDocument : false; + const hasUnsavedChanges = editedMarkdown !== null; + const showDesktopHeader = !!onClose; + const isSourceCodeMode = editorRenderMode === "source_code"; + const showEditingActions = isSourceCodeMode && isSourceEditing; const localFileLanguage = inferMonacoLanguageFromPath(localFilePath); return ( <> -
-
-

{displayTitle}

+ {showDesktopHeader ? ( +
+
+

File

+
+ +
+
+
+
+

{displayTitle}

+
+
+ {showEditingActions ? ( + <> + + + + ) : ( + <> + + {isSourceCodeMode && ( + + )} + + )} + {!showEditingActions && !isLocalFileMode && editorDoc?.document_type && documentId && ( + + )} +
+
-
- {!isLocalFileMode && editorDoc?.document_type && documentId && ( - - )} - {onClose && ( - - )} + ) : ( +
+
+

{displayTitle}

+
+
+ {!isLocalFileMode && editorDoc?.document_type && documentId && ( + + )} +
-
+ )}
{isLoading ? ( @@ -360,8 +486,10 @@ export function EditorPanelContent({ path={localFilePath ?? "local-file.txt"} language={localFileLanguage} value={localFileContent} - onSave={() => handleSave({ silent: true })} - saveMode="auto" + onSave={() => { + void handleSave({ silent: true }); + }} + readOnly={!isSourceEditing} onChange={(next) => { markdownRef.current = next; setLocalFileContent(next); @@ -379,7 +507,9 @@ export function EditorPanelContent({ readOnly={false} placeholder="Start writing..." editorVariant="default" - onSave={handleSave} + onSave={() => { + void handleSave(); + }} hasUnsavedChanges={editedMarkdown !== null} isSaving={saving} defaultEditing={true} diff --git a/surfsense_web/components/editor/source-code-editor.tsx b/surfsense_web/components/editor/source-code-editor.tsx index 2c1f52989..11f9266b6 100644 --- a/surfsense_web/components/editor/source-code-editor.tsx +++ b/surfsense_web/components/editor/source-code-editor.tsx @@ -17,8 +17,6 @@ interface SourceCodeEditorProps { readOnly?: boolean; fontSize?: number; onSave?: () => Promise | void; - saveMode?: "manual" | "auto" | "both"; - autoSaveDelayMs?: number; } export function SourceCodeEditor({ @@ -29,64 +27,78 @@ export function SourceCodeEditor({ readOnly = false, fontSize = 12, onSave, - saveMode = "manual", - autoSaveDelayMs = 800, }: SourceCodeEditorProps) { const { resolvedTheme } = useTheme(); - const saveTimerRef = useRef | null>(null); const onSaveRef = useRef(onSave); - const skipNextAutoSaveRef = useRef(true); + const monacoRef = useRef(null); useEffect(() => { onSaveRef.current = onSave; }, [onSave]); - useEffect(() => { - skipNextAutoSaveRef.current = true; - }, [path]); + const resolveCssColorToHex = (cssColorValue: string): string | null => { + if (typeof document === "undefined") return null; + const probe = document.createElement("div"); + probe.style.color = cssColorValue; + probe.style.position = "absolute"; + probe.style.pointerEvents = "none"; + probe.style.opacity = "0"; + document.body.appendChild(probe); + const computedColor = getComputedStyle(probe).color; + probe.remove(); + const match = computedColor.match(/rgba?\((\d+),\s*(\d+),\s*(\d+)/i); + if (!match) return null; + const toHex = (value: string) => Number(value).toString(16).padStart(2, "0"); + return `#${toHex(match[1])}${toHex(match[2])}${toHex(match[3])}`; + }; + + const applySidebarTheme = (monaco: any) => { + const isDark = resolvedTheme === "dark"; + const themeName = isDark ? "surfsense-dark" : "surfsense-light"; + const fallbackBg = isDark ? "#1e1e1e" : "#ffffff"; + const sidebarBgHex = resolveCssColorToHex("var(--sidebar)") ?? fallbackBg; + monaco.editor.defineTheme(themeName, { + base: isDark ? "vs-dark" : "vs", + inherit: true, + rules: [], + colors: { + "editor.background": sidebarBgHex, + "editorGutter.background": sidebarBgHex, + "minimap.background": sidebarBgHex, + "editorLineNumber.background": sidebarBgHex, + "editor.lineHighlightBackground": "#00000000", + }, + }); + monaco.editor.setTheme(themeName); + }; useEffect(() => { - if (readOnly || !onSaveRef.current) return; - if (saveMode !== "auto" && saveMode !== "both") return; + if (!monacoRef.current) return; + applySidebarTheme(monacoRef.current); + }, [resolvedTheme]); - if (skipNextAutoSaveRef.current) { - skipNextAutoSaveRef.current = false; - return; - } - - if (saveTimerRef.current) { - clearTimeout(saveTimerRef.current); - } - - saveTimerRef.current = setTimeout(() => { - void onSaveRef.current?.(); - saveTimerRef.current = null; - }, autoSaveDelayMs); - - return () => { - if (saveTimerRef.current) { - clearTimeout(saveTimerRef.current); - saveTimerRef.current = null; - } - }; - }, [autoSaveDelayMs, readOnly, saveMode, value]); - - const isManualSaveEnabled = !!onSave && !readOnly && (saveMode === "manual" || saveMode === "both"); + const isManualSaveEnabled = !!onSave && !readOnly; return ( -
+
onChange(next ?? "")} loading={
} + beforeMount={(monaco) => { + monacoRef.current = monaco; + applySidebarTheme(monaco); + }} onMount={(editor, monaco) => { + monacoRef.current = monaco; + applySidebarTheme(monaco); if (!isManualSaveEnabled) return; editor.addCommand(monaco.KeyMod.CtrlCmd | monaco.KeyCode.KeyS, () => { void onSaveRef.current?.(); diff --git a/surfsense_web/components/layout/ui/right-panel/RightPanel.tsx b/surfsense_web/components/layout/ui/right-panel/RightPanel.tsx index 2394480b2..c2422bf34 100644 --- a/surfsense_web/components/layout/ui/right-panel/RightPanel.tsx +++ b/surfsense_web/components/layout/ui/right-panel/RightPanel.tsx @@ -94,7 +94,7 @@ export function RightPanelExpandButton() { Expand panel - Expand panel + Expand panel
); From 06b509213cf506c7ddd6f6eaabd2a648f8d5dbca Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 19:52:55 +0530 Subject: [PATCH 092/113] feat(editor): add mode toggle functionality and improve editor state management --- .../components/editor-panel/editor-panel.tsx | 123 +++++++++++++----- .../components/editor/plate-editor.tsx | 5 +- .../editor/plugins/fixed-toolbar-kit.tsx | 24 +++- 3 files changed, 116 insertions(+), 36 deletions(-) diff --git a/surfsense_web/components/editor-panel/editor-panel.tsx b/surfsense_web/components/editor-panel/editor-panel.tsx index b83c4b1d7..0170d13da 100644 --- a/surfsense_web/components/editor-panel/editor-panel.tsx +++ b/surfsense_web/components/editor-panel/editor-panel.tsx @@ -88,7 +88,7 @@ export function EditorPanelContent({ const [error, setError] = useState(null); const [saving, setSaving] = useState(false); const [downloading, setDownloading] = useState(false); - const [isSourceEditing, setIsSourceEditing] = useState(false); + const [isEditing, setIsEditing] = useState(false); const [editedMarkdown, setEditedMarkdown] = useState(null); const [localFileContent, setLocalFileContent] = useState(""); @@ -111,7 +111,7 @@ export function EditorPanelContent({ setEditedMarkdown(null); setLocalFileContent(""); setHasCopied(false); - setIsSourceEditing(false); + setIsEditing(false); initialLoadDone.current = false; changeCountRef.current = 0; @@ -295,10 +295,18 @@ export function EditorPanelContent({ : false; const hasUnsavedChanges = editedMarkdown !== null; const showDesktopHeader = !!onClose; - const isSourceCodeMode = editorRenderMode === "source_code"; - const showEditingActions = isSourceCodeMode && isSourceEditing; + const showEditingActions = isEditableType && isEditing; const localFileLanguage = inferMonacoLanguageFromPath(localFilePath); + const handleCancelEditing = useCallback(() => { + const savedContent = editorDoc?.source_markdown ?? ""; + markdownRef.current = savedContent; + setLocalFileContent(savedContent); + setEditedMarkdown(null); + changeCountRef.current = 0; + setIsEditing(false); + }, [editorDoc?.source_markdown]); + return ( <> {showDesktopHeader ? ( @@ -323,13 +331,7 @@ export function EditorPanelContent({ variant="ghost" size="sm" className="h-6 px-2 text-xs" - onClick={() => { - const savedContent = editorDoc?.source_markdown ?? ""; - markdownRef.current = savedContent; - setLocalFileContent(savedContent); - setEditedMarkdown(null); - setIsSourceEditing(false); - }} + onClick={handleCancelEditing} disabled={saving} > Cancel @@ -340,7 +342,7 @@ export function EditorPanelContent({ className="relative h-6 w-[56px] px-0 text-xs" onClick={async () => { const saveSucceeded = await handleSave({ silent: true }); - if (saveSucceeded) setIsSourceEditing(false); + if (saveSucceeded) setIsEditing(false); }} disabled={saving || !hasUnsavedChanges} > @@ -364,15 +366,19 @@ export function EditorPanelContent({ {hasCopied ? "Copied file contents" : "Copy file contents"} - {isSourceCodeMode && ( + {isEditableType && ( )} @@ -389,11 +395,69 @@ export function EditorPanelContent({

{displayTitle}

- {!isLocalFileMode && editorDoc?.document_type && documentId && ( - + {showEditingActions ? ( + <> + + + + ) : ( + <> + + {isEditableType && ( + + )} + {!isLocalFileMode && editorDoc?.document_type && documentId && ( + + )} + )}
@@ -489,7 +553,7 @@ export function EditorPanelContent({ onSave={() => { void handleSave({ silent: true }); }} - readOnly={!isSourceEditing} + readOnly={!isEditing} onChange={(next) => { markdownRef.current = next; setLocalFileContent(next); @@ -500,19 +564,15 @@ export function EditorPanelContent({
) : isEditableType ? ( { - void handleSave(); - }} - hasUnsavedChanges={editedMarkdown !== null} - isSaving={saving} - defaultEditing={true} + allowModeToggle={false} + defaultEditing={isEditing} className="[&_[role=toolbar]]:!bg-sidebar" /> ) : ( @@ -561,6 +621,8 @@ function MobileEditorDrawer() { const panelState = useAtomValue(editorPanelAtom); const closePanel = useSetAtom(closeEditorPanelAtom); + if (panelState.kind === "local_file") return null; + const hasTarget = panelState.kind === "document" ? !!panelState.documentId && !!panelState.searchSpaceId @@ -604,6 +666,7 @@ export function EditorPanel() { : !!panelState.localFilePath; if (!panelState.isOpen || !hasTarget) return null; + if (!isDesktop && panelState.kind === "local_file") return null; if (isDesktop) { return ; @@ -620,7 +683,7 @@ export function MobileEditorPanel() { ? !!panelState.documentId && !!panelState.searchSpaceId : !!panelState.localFilePath; - if (isDesktop || !panelState.isOpen || !hasTarget) return null; + if (isDesktop || !panelState.isOpen || !hasTarget || panelState.kind === "local_file") return null; return ; } diff --git a/surfsense_web/components/editor/plate-editor.tsx b/surfsense_web/components/editor/plate-editor.tsx index 61f84126c..371326bd3 100644 --- a/surfsense_web/components/editor/plate-editor.tsx +++ b/surfsense_web/components/editor/plate-editor.tsx @@ -42,6 +42,8 @@ export interface PlateEditorProps { hasUnsavedChanges?: boolean; /** Whether a save is in progress */ isSaving?: boolean; + /** Whether edit/view mode toggle UI should be available in toolbars. */ + allowModeToggle?: boolean; /** Start the editor in editing mode instead of viewing mode. Ignored when readOnly is true. */ defaultEditing?: boolean; /** @@ -91,6 +93,7 @@ export function PlateEditor({ onSave, hasUnsavedChanges = false, isSaving = false, + allowModeToggle = true, defaultEditing = false, preset = "full", extraPlugins = [], @@ -174,7 +177,7 @@ export function PlateEditor({ }, [html, markdown, editor]); // When not forced read-only, the user can toggle between editing/viewing. - const canToggleMode = !readOnly; + const canToggleMode = !readOnly && allowModeToggle; const contextProviderValue = useMemo( () => ({ diff --git a/surfsense_web/components/editor/plugins/fixed-toolbar-kit.tsx b/surfsense_web/components/editor/plugins/fixed-toolbar-kit.tsx index 85e0a08f2..8b776a456 100644 --- a/surfsense_web/components/editor/plugins/fixed-toolbar-kit.tsx +++ b/surfsense_web/components/editor/plugins/fixed-toolbar-kit.tsx @@ -1,19 +1,33 @@ "use client"; import { createPlatePlugin } from "platejs/react"; +import { useEditorReadOnly } from "platejs/react"; +import { useEditorSave } from "@/components/editor/editor-save-context"; import { FixedToolbar } from "@/components/ui/fixed-toolbar"; import { FixedToolbarButtons } from "@/components/ui/fixed-toolbar-buttons"; +function ConditionalFixedToolbar() { + const readOnly = useEditorReadOnly(); + const { onSave, hasUnsavedChanges, canToggleMode } = useEditorSave(); + + const hasVisibleControls = + !readOnly || canToggleMode || (!!onSave && hasUnsavedChanges && !readOnly); + + if (!hasVisibleControls) return null; + + return ( + + + + ); +} + export const FixedToolbarKit = [ createPlatePlugin({ key: "fixed-toolbar", render: { - beforeEditable: () => ( - - - - ), + beforeEditable: () => , }, }), ]; From 0381632bc2a199bf1a93b7970aedba390b262e30 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 20:03:18 +0530 Subject: [PATCH 093/113] refactor(editor): replace Loader2 with Spinner component and enhance save button visibility --- .../components/editor-panel/editor-panel.tsx | 12 +-- .../components/report-panel/report-panel.tsx | 76 +++++++++++++++++-- 2 files changed, 76 insertions(+), 12 deletions(-) diff --git a/surfsense_web/components/editor-panel/editor-panel.tsx b/surfsense_web/components/editor-panel/editor-panel.tsx index 0170d13da..50ee158c4 100644 --- a/surfsense_web/components/editor-panel/editor-panel.tsx +++ b/surfsense_web/components/editor-panel/editor-panel.tsx @@ -7,7 +7,6 @@ import { Download, FileQuestionMark, FileText, - Loader2, Pencil, RefreshCw, XIcon, @@ -22,6 +21,7 @@ import { MarkdownViewer } from "@/components/markdown-viewer"; import { Alert, AlertDescription } from "@/components/ui/alert"; import { Button } from "@/components/ui/button"; import { Drawer, DrawerContent, DrawerHandle, DrawerTitle } from "@/components/ui/drawer"; +import { Spinner } from "@/components/ui/spinner"; import { useMediaQuery } from "@/hooks/use-media-query"; import { useElectronAPI } from "@/hooks/use-platform"; import { authenticatedFetch, getBearerToken, redirectToLogin } from "@/lib/auth-utils"; @@ -346,8 +346,8 @@ export function EditorPanelContent({ }} disabled={saving || !hasUnsavedChanges} > - Save - {saving && } + Save + {saving && } ) : ( @@ -416,8 +416,8 @@ export function EditorPanelContent({ }} disabled={saving || !hasUnsavedChanges} > - Save - {saving && } + Save + {saving && } ) : ( @@ -534,7 +534,7 @@ export function EditorPanelContent({ }} > {downloading ? ( - + ) : ( )} diff --git a/surfsense_web/components/report-panel/report-panel.tsx b/surfsense_web/components/report-panel/report-panel.tsx index 591155757..709b10467 100644 --- a/surfsense_web/components/report-panel/report-panel.tsx +++ b/surfsense_web/components/report-panel/report-panel.tsx @@ -1,7 +1,7 @@ "use client"; import { useAtomValue, useSetAtom } from "jotai"; -import { ChevronDownIcon, XIcon } from "lucide-react"; +import { ChevronDownIcon, Pencil, XIcon } from "lucide-react"; import dynamic from "next/dynamic"; import { useCallback, useEffect, useRef, useState } from "react"; import { toast } from "sonner"; @@ -125,6 +125,7 @@ export function ReportPanelContent({ // Editor state — tracks the latest markdown from the Plate editor const [editedMarkdown, setEditedMarkdown] = useState(null); + const [isEditing, setIsEditing] = useState(false); // Read-only when public (shareToken) OR shared (SEARCH_SPACE visibility) const currentThreadState = useAtomValue(currentThreadAtom); @@ -188,6 +189,7 @@ export function ReportPanelContent({ // Reset edited markdown when switching versions or reports useEffect(() => { setEditedMarkdown(null); + setIsEditing(false); }, [activeReportId]); // Copy markdown content (uses latest editor content) @@ -257,7 +259,7 @@ export function ReportPanelContent({ // Save edited report content const handleSave = useCallback(async () => { - if (!currentMarkdown || !activeReportId) return; + if (!currentMarkdown || !activeReportId) return false; setSaving(true); try { const response = await authenticatedFetch( @@ -278,9 +280,11 @@ export function ReportPanelContent({ setReportContent((prev) => (prev ? { ...prev, content: currentMarkdown } : prev)); setEditedMarkdown(null); toast.success("Report saved successfully"); + return true; } catch (err) { console.error("Error saving report:", err); toast.error(err instanceof Error ? err.message : "Failed to save report"); + return false; } finally { setSaving(false); } @@ -289,6 +293,14 @@ export function ReportPanelContent({ const activeVersionIndex = versions.findIndex((v) => v.id === activeReportId); const isPublic = !!shareToken; const btnBg = isPublic ? "bg-main-panel" : "bg-sidebar"; + const isResume = reportContent?.content_type === "typst"; + const showReportEditingTier = !isResume; + const hasUnsavedChanges = editedMarkdown !== null; + + const handleCancelEditing = useCallback(() => { + setEditedMarkdown(null); + setIsEditing(false); + }, []); return ( <> @@ -383,6 +395,58 @@ export function ReportPanelContent({ )}
+ {showReportEditingTier && ( +
+
+

+ {reportContent?.title || title} +

+
+
+ {!isReadOnly && + (isEditing ? ( + <> + + + + ) : ( + + ))} +
+
+ )} + {/* Report content — skeleton/error/viewer/editor shown only in this area */}
{isLoading ? ( @@ -406,15 +470,15 @@ export function ReportPanelContent({
) : ( ) From a1d3356bf55b8ebb4f70d37e2987f115609afde6 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 20:13:29 +0530 Subject: [PATCH 094/113] feat(editor): add reserveToolbarSpace option to enhance toolbar visibility management --- .../components/editor-panel/editor-panel.tsx | 1 + .../components/editor/editor-save-context.tsx | 3 +++ .../components/editor/plate-editor.tsx | 6 +++++- .../editor/plugins/fixed-toolbar-kit.tsx | 11 +++++++++-- .../components/report-panel/report-panel.tsx | 19 ++++++++++++++++++- 5 files changed, 36 insertions(+), 4 deletions(-) diff --git a/surfsense_web/components/editor-panel/editor-panel.tsx b/surfsense_web/components/editor-panel/editor-panel.tsx index 50ee158c4..d125ec143 100644 --- a/surfsense_web/components/editor-panel/editor-panel.tsx +++ b/surfsense_web/components/editor-panel/editor-panel.tsx @@ -572,6 +572,7 @@ export function EditorPanelContent({ placeholder="Start writing..." editorVariant="default" allowModeToggle={false} + reserveToolbarSpace defaultEditing={isEditing} className="[&_[role=toolbar]]:!bg-sidebar" /> diff --git a/surfsense_web/components/editor/editor-save-context.tsx b/surfsense_web/components/editor/editor-save-context.tsx index d53a4adce..b4b3935a4 100644 --- a/surfsense_web/components/editor/editor-save-context.tsx +++ b/surfsense_web/components/editor/editor-save-context.tsx @@ -11,12 +11,15 @@ interface EditorSaveContextValue { isSaving: boolean; /** Whether the user can toggle between editing and viewing modes */ canToggleMode: boolean; + /** Whether fixed-toolbar space should be reserved even when controls are hidden */ + reserveToolbarSpace: boolean; } export const EditorSaveContext = createContext({ hasUnsavedChanges: false, isSaving: false, canToggleMode: false, + reserveToolbarSpace: false, }); export function useEditorSave() { diff --git a/surfsense_web/components/editor/plate-editor.tsx b/surfsense_web/components/editor/plate-editor.tsx index 371326bd3..481a420fb 100644 --- a/surfsense_web/components/editor/plate-editor.tsx +++ b/surfsense_web/components/editor/plate-editor.tsx @@ -44,6 +44,8 @@ export interface PlateEditorProps { isSaving?: boolean; /** Whether edit/view mode toggle UI should be available in toolbars. */ allowModeToggle?: boolean; + /** Reserve fixed-toolbar vertical space even when controls are hidden. */ + reserveToolbarSpace?: boolean; /** Start the editor in editing mode instead of viewing mode. Ignored when readOnly is true. */ defaultEditing?: boolean; /** @@ -94,6 +96,7 @@ export function PlateEditor({ hasUnsavedChanges = false, isSaving = false, allowModeToggle = true, + reserveToolbarSpace = false, defaultEditing = false, preset = "full", extraPlugins = [], @@ -185,8 +188,9 @@ export function PlateEditor({ hasUnsavedChanges, isSaving, canToggleMode, + reserveToolbarSpace, }), - [onSave, hasUnsavedChanges, isSaving, canToggleMode] + [onSave, hasUnsavedChanges, isSaving, canToggleMode, reserveToolbarSpace] ); return ( diff --git a/surfsense_web/components/editor/plugins/fixed-toolbar-kit.tsx b/surfsense_web/components/editor/plugins/fixed-toolbar-kit.tsx index 8b776a456..bdda0263d 100644 --- a/surfsense_web/components/editor/plugins/fixed-toolbar-kit.tsx +++ b/surfsense_web/components/editor/plugins/fixed-toolbar-kit.tsx @@ -9,12 +9,19 @@ import { FixedToolbarButtons } from "@/components/ui/fixed-toolbar-buttons"; function ConditionalFixedToolbar() { const readOnly = useEditorReadOnly(); - const { onSave, hasUnsavedChanges, canToggleMode } = useEditorSave(); + const { onSave, hasUnsavedChanges, canToggleMode, reserveToolbarSpace } = useEditorSave(); const hasVisibleControls = !readOnly || canToggleMode || (!!onSave && hasUnsavedChanges && !readOnly); - if (!hasVisibleControls) return null; + if (!hasVisibleControls) { + if (!reserveToolbarSpace) return null; + return ( + +
+ + ); + } return ( diff --git a/surfsense_web/components/report-panel/report-panel.tsx b/surfsense_web/components/report-panel/report-panel.tsx index 709b10467..0f6614ebf 100644 --- a/surfsense_web/components/report-panel/report-panel.tsx +++ b/surfsense_web/components/report-panel/report-panel.tsx @@ -116,6 +116,7 @@ export function ReportPanelContent({ const [exporting, setExporting] = useState(null); const [saving, setSaving] = useState(false); const copyTimerRef = useRef | undefined>(undefined); + const changeCountRef = useRef(0); useEffect(() => { return () => { @@ -190,8 +191,21 @@ export function ReportPanelContent({ useEffect(() => { setEditedMarkdown(null); setIsEditing(false); + changeCountRef.current = 0; }, [activeReportId]); + const handleReportMarkdownChange = useCallback( + (nextMarkdown: string) => { + if (!isEditing) return; + changeCountRef.current += 1; + // Plate may emit an initial normalize/serialize change on mount. + if (changeCountRef.current <= 1) return; + const savedMarkdown = reportContent?.content ?? ""; + setEditedMarkdown(nextMarkdown === savedMarkdown ? null : nextMarkdown); + }, + [isEditing, reportContent?.content] + ); + // Copy markdown content (uses latest editor content) const handleCopy = useCallback(async () => { if (!currentMarkdown) return; @@ -299,6 +313,7 @@ export function ReportPanelContent({ const handleCancelEditing = useCallback(() => { setEditedMarkdown(null); + changeCountRef.current = 0; setIsEditing(false); }, []); @@ -436,6 +451,7 @@ export function ReportPanelContent({ className="size-6" onClick={() => { setEditedMarkdown(null); + changeCountRef.current = 0; setIsEditing(true); }} > @@ -473,11 +489,12 @@ export function ReportPanelContent({ key={`report-${activeReportId}-${isEditing ? "editing" : "viewing"}`} preset="full" markdown={reportContent.content} - onMarkdownChange={setEditedMarkdown} + onMarkdownChange={handleReportMarkdownChange} readOnly={!isEditing} placeholder="Report content..." editorVariant="default" allowModeToggle={false} + reserveToolbarSpace defaultEditing={isEditing} className="[&_[role=toolbar]]:!bg-sidebar" /> From b5921bf1399559c31c3a10afc03ba61af49b5fbf Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 20:47:00 +0530 Subject: [PATCH 095/113] feat(markdown): enhance code block rendering for local web files and improve inline code styling --- .../components/assistant-ui/markdown-text.tsx | 23 +++++++++++++++++-- .../components/editor-panel/editor-panel.tsx | 2 +- 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/surfsense_web/components/assistant-ui/markdown-text.tsx b/surfsense_web/components/assistant-ui/markdown-text.tsx index a2ce30111..8f2184bd3 100644 --- a/surfsense_web/components/assistant-ui/markdown-text.tsx +++ b/surfsense_web/components/assistant-ui/markdown-text.tsx @@ -405,6 +405,14 @@ const defaultComponents = memoizeMarkdownComponents({ const openEditorPanel = useSetAtom(openEditorPanelAtom); const params = useParams(); const electronAPI = useElectronAPI(); + const language = /language-(\w+)/.exec(className || "")?.[1] ?? "text"; + const codeString = String(children).replace(/\n$/, ""); + const isWebLocalFileCodeBlock = + isCodeBlock && + !electronAPI && + isVirtualFilePathToken(codeString.trim()) && + !codeString.trim().startsWith("//") && + !codeString.includes("\n"); if (!isCodeBlock) { const inlineValue = String(children ?? "").trim(); const isLocalPath = @@ -451,8 +459,19 @@ const defaultComponents = memoizeMarkdownComponents({ ); } - const language = /language-(\w+)/.exec(className || "")?.[1] ?? "text"; - const codeString = String(children).replace(/\n$/, ""); + if (isWebLocalFileCodeBlock) { + return ( + + {codeString.trim()} + + ); + } return (
-

File

+

File

diff --git a/surfsense_web/components/layout/ui/sidebar/SidebarCollapseButton.tsx b/surfsense_web/components/layout/ui/sidebar/SidebarCollapseButton.tsx index a01937cd6..0eb409349 100644 --- a/surfsense_web/components/layout/ui/sidebar/SidebarCollapseButton.tsx +++ b/surfsense_web/components/layout/ui/sidebar/SidebarCollapseButton.tsx @@ -1,6 +1,6 @@ "use client"; -import { PanelLeft, PanelLeftClose } from "lucide-react"; +import { PanelLeft } from "lucide-react"; import { useTranslations } from "next-intl"; import { Button } from "@/components/ui/button"; import { ShortcutKbd } from "@/components/ui/shortcut-kbd"; @@ -23,7 +23,7 @@ export function SidebarCollapseButton({ const button = ( ); diff --git a/surfsense_web/components/report-panel/report-panel.tsx b/surfsense_web/components/report-panel/report-panel.tsx index 0f6614ebf..c7a8509ed 100644 --- a/surfsense_web/components/report-panel/report-panel.tsx +++ b/surfsense_web/components/report-panel/report-panel.tsx @@ -1,7 +1,7 @@ "use client"; import { useAtomValue, useSetAtom } from "jotai"; -import { ChevronDownIcon, Pencil, XIcon } from "lucide-react"; +import { Check, ChevronDownIcon, Copy, Pencil, XIcon } from "lucide-react"; import dynamic from "next/dynamic"; import { useCallback, useEffect, useRef, useState } from "react"; import { toast } from "sonner"; @@ -306,7 +306,6 @@ export function ReportPanelContent({ const activeVersionIndex = versions.findIndex((v) => v.id === activeReportId); const isPublic = !!shareToken; - const btnBg = isPublic ? "bg-main-panel" : "bg-sidebar"; const isResume = reportContent?.content_type === "typst"; const showReportEditingTier = !isResume; const hasUnsavedChanges = editedMarkdown !== null; @@ -322,19 +321,6 @@ export function ReportPanelContent({ {/* Action bar — always visible; buttons are disabled while loading */}
- {/* Copy button — hidden for Typst (resume) */} - {reportContent?.content_type !== "typst" && ( - - )} - {/* Export — plain button for resume (typst), dropdown for others */} {reportContent?.content_type === "typst" ? ( @@ -353,7 +339,7 @@ export function ReportPanelContent({ variant="outline" size="sm" disabled={isLoading || !reportContent?.content} - className={`h-8 px-3.5 py-4 text-[15px] gap-1.5 ${btnBg} select-none`} + className={`h-8 px-3.5 py-4 text-[15px] gap-1.5 ${isPublic ? "bg-main-panel" : "bg-sidebar"} select-none`} > Export @@ -379,7 +365,7 @@ export function ReportPanelContent({
+ {!isEditing && ( + + )} {!isReadOnly && (isEditing ? ( <> From 84145566e3e7666a1b0d8cb514dbd70dbf44a948 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 22:27:58 +0530 Subject: [PATCH 097/113] feat(editor): implement local filesystem trust dialog and enhance filesystem mode selection --- .../components/assistant-ui/thread.tsx | 265 +++++++++++++++--- 1 file changed, 222 insertions(+), 43 deletions(-) diff --git a/surfsense_web/components/assistant-ui/thread.tsx b/surfsense_web/components/assistant-ui/thread.tsx index 094d99a29..9df41ee55 100644 --- a/surfsense_web/components/assistant-ui/thread.tsx +++ b/surfsense_web/components/assistant-ui/thread.tsx @@ -12,11 +12,15 @@ import { AlertCircle, ArrowDownIcon, ArrowUpIcon, + Check, ChevronDown, ChevronUp, Clipboard, Dot, + Folder, + FolderPlus, Globe, + Laptop, Plus, Settings2, SquareIcon, @@ -66,6 +70,16 @@ import { } from "@/components/new-chat/document-mention-picker"; import { PromptPicker, type PromptPickerRef } from "@/components/new-chat/prompt-picker"; import { Avatar, AvatarFallback, AvatarGroup } from "@/components/ui/avatar"; +import { + AlertDialog, + AlertDialogAction, + AlertDialogCancel, + AlertDialogContent, + AlertDialogDescription, + AlertDialogFooter, + AlertDialogHeader, + AlertDialogTitle, +} from "@/components/ui/alert-dialog"; import { Button } from "@/components/ui/button"; import { Drawer, DrawerContent, DrawerHandle, DrawerTitle } from "@/components/ui/drawer"; import { @@ -100,6 +114,8 @@ type ComposerFilesystemSettings = { updatedAt: string; }; +const LOCAL_FILESYSTEM_TRUST_KEY = "surfsense.local-filesystem-trust.v1"; + export const Thread: FC = () => { return ; }; @@ -371,6 +387,8 @@ const Composer: FC = () => { const [filesystemSettings, setFilesystemSettings] = useState( null ); + const [localTrustDialogOpen, setLocalTrustDialogOpen] = useState(false); + const [pendingLocalPath, setPendingLocalPath] = useState(null); const [clipboardInitialText, setClipboardInitialText] = useState(); const clipboardLoadedRef = useRef(false); useEffect(() => { @@ -388,7 +406,7 @@ const Composer: FC = () => { let mounted = true; electronAPI .getAgentFilesystemSettings() - .then((settings) => { + .then((settings: ComposerFilesystemSettings) => { if (!mounted) return; setFilesystemSettings(settings); }) @@ -405,22 +423,66 @@ const Composer: FC = () => { }; }, [electronAPI]); - const handleFilesystemModeChange = useCallback( - async (mode: "cloud" | "desktop_local_folder") => { + const hasLocalFilesystemTrust = useCallback(() => { + try { + return window.localStorage.getItem(LOCAL_FILESYSTEM_TRUST_KEY) === "true"; + } catch { + return false; + } + }, []); + + const applyLocalRootPath = useCallback( + async (path: string) => { if (!electronAPI?.setAgentFilesystemSettings) return; - const updated = await electronAPI.setAgentFilesystemSettings({ mode }); + const updated = await electronAPI.setAgentFilesystemSettings({ + mode: "desktop_local_folder", + localRootPath: path, + }); setFilesystemSettings(updated); }, [electronAPI] ); - const handlePickFilesystemRoot = useCallback(async () => { - if (!electronAPI?.pickAgentFilesystemRoot || !electronAPI?.setAgentFilesystemSettings) return; + const runSwitchToLocalMode = useCallback(async () => { + if (!electronAPI?.setAgentFilesystemSettings) return; + const updated = await electronAPI.setAgentFilesystemSettings({ mode: "desktop_local_folder" }); + setFilesystemSettings(updated); + }, [electronAPI]); + + const runPickLocalRoot = useCallback(async () => { + if (!electronAPI?.pickAgentFilesystemRoot) return; const picked = await electronAPI.pickAgentFilesystemRoot(); if (!picked) return; + await applyLocalRootPath(picked); + }, [applyLocalRootPath, electronAPI]); + + const handleFilesystemModeChange = useCallback( + async (mode: "cloud" | "desktop_local_folder") => { + if (!electronAPI?.setAgentFilesystemSettings) return; + if (mode === "desktop_local_folder") return void runSwitchToLocalMode(); + const updated = await electronAPI.setAgentFilesystemSettings({ mode }); + setFilesystemSettings(updated); + }, + [electronAPI, runSwitchToLocalMode] + ); + + const handlePickFilesystemRoot = useCallback(async () => { + if (hasLocalFilesystemTrust()) { + await runPickLocalRoot(); + return; + } + if (!electronAPI?.pickAgentFilesystemRoot) return; + const picked = await electronAPI.pickAgentFilesystemRoot(); + if (!picked) return; + setPendingLocalPath(picked); + setLocalTrustDialogOpen(true); + }, [electronAPI, hasLocalFilesystemTrust, runPickLocalRoot]); + + const handleClearFilesystemRoot = useCallback(async () => { + if (!electronAPI?.setAgentFilesystemSettings) return; const updated = await electronAPI.setAgentFilesystemSettings({ mode: "desktop_local_folder", - localRootPath: picked, + localRootPath: null, }); setFilesystemSettings(updated); }, [electronAPI]); @@ -720,44 +782,161 @@ const Composer: FC = () => { members={members ?? []} /> {electronAPI && filesystemSettings ? ( -
- - -
- +
+ + + + + + handleFilesystemModeChange("cloud")} + className="flex items-center justify-between" + > + + + Cloud + + {filesystemSettings.mode === "cloud" && } + + handleFilesystemModeChange("desktop_local_folder")} + className="flex items-center justify-between" + > + + + Local + + {filesystemSettings.mode === "desktop_local_folder" && ( + + )} + + + + + {filesystemSettings.mode === "desktop_local_folder" && ( + <> +
+
+ {filesystemSettings.localRootPath ? ( + <> +
+ + + {filesystemSettings.localRootPath.split("/").at(-1) || + filesystemSettings.localRootPath} + + +
+ + + ) : ( + + )} +
+ + )}
) : null} + { + setLocalTrustDialogOpen(open); + if (!open) { + setPendingLocalPath(null); + } + }} + > + + + Trust this workspace? + + Local mode can read and edit files inside the folders you select. Continue only if + you trust this workspace and its contents. + + {(pendingLocalPath || filesystemSettings?.localRootPath) && ( + + Folder path: {pendingLocalPath || filesystemSettings?.localRootPath} + + )} + + + Cancel + { + try { + window.localStorage.setItem(LOCAL_FILESYSTEM_TRUST_KEY, "true"); + } catch {} + setLocalTrustDialogOpen(false); + const path = pendingLocalPath; + setPendingLocalPath(null); + if (path) { + await applyLocalRootPath(path); + } else { + await runPickLocalRoot(); + } + }} + > + I trust this workspace + + + + {showDocumentPopover && (
Date: Thu, 23 Apr 2026 22:49:59 +0530 Subject: [PATCH 098/113] feat(settings): add DesktopShortcutsContent component for managing hotkeys and update user settings dialog --- .../components/DesktopContent.tsx | 81 +------------ .../components/DesktopShortcutsContent.tsx | 108 ++++++++++++++++++ surfsense_web/app/desktop/login/page.tsx | 2 +- .../settings/user-settings-dialog.tsx | 19 ++- 4 files changed, 127 insertions(+), 83 deletions(-) create mode 100644 surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopShortcutsContent.tsx diff --git a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopContent.tsx b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopContent.tsx index 63ca9f5df..3ec14076d 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopContent.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopContent.tsx @@ -1,9 +1,7 @@ "use client"; -import { BrainCog, Power, Rocket, Zap } from "lucide-react"; import { useEffect, useState } from "react"; import { toast } from "sonner"; -import { DEFAULT_SHORTCUTS, ShortcutRecorder } from "@/components/desktop/shortcut-recorder"; import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"; import { Label } from "@/components/ui/label"; import { @@ -24,9 +22,6 @@ export function DesktopContent() { const [loading, setLoading] = useState(true); const [enabled, setEnabled] = useState(true); - const [shortcuts, setShortcuts] = useState(DEFAULT_SHORTCUTS); - const [shortcutsLoaded, setShortcutsLoaded] = useState(false); - const [searchSpaces, setSearchSpaces] = useState([]); const [activeSpaceId, setActiveSpaceId] = useState(null); @@ -37,7 +32,6 @@ export function DesktopContent() { useEffect(() => { if (!api) { setLoading(false); - setShortcutsLoaded(true); return; } @@ -48,15 +42,13 @@ export function DesktopContent() { Promise.all([ api.getAutocompleteEnabled(), - api.getShortcuts?.() ?? Promise.resolve(null), api.getActiveSearchSpace?.() ?? Promise.resolve(null), searchSpacesApiService.getSearchSpaces(), hasAutoLaunchApi ? api.getAutoLaunch() : Promise.resolve(null), ]) - .then(([autoEnabled, config, spaceId, spaces, autoLaunch]) => { + .then(([autoEnabled, spaceId, spaces, autoLaunch]) => { if (!mounted) return; setEnabled(autoEnabled); - if (config) setShortcuts(config); setActiveSpaceId(spaceId); if (spaces) setSearchSpaces(spaces); if (autoLaunch) { @@ -65,12 +57,10 @@ export function DesktopContent() { setAutoLaunchSupported(autoLaunch.supported); } setLoading(false); - setShortcutsLoaded(true); }) .catch(() => { if (!mounted) return; setLoading(false); - setShortcutsLoaded(true); }); return () => { @@ -101,24 +91,6 @@ export function DesktopContent() { await api.setAutocompleteEnabled(checked); }; - const updateShortcut = ( - key: "generalAssist" | "quickAsk" | "autocomplete", - accelerator: string - ) => { - setShortcuts((prev) => { - const updated = { ...prev, [key]: accelerator }; - api.setShortcuts?.({ [key]: accelerator }).catch(() => { - toast.error("Failed to update shortcut"); - }); - return updated; - }); - toast.success("Shortcut updated"); - }; - - const resetShortcut = (key: "generalAssist" | "quickAsk" | "autocomplete") => { - updateShortcut(key, DEFAULT_SHORTCUTS[key]); - }; - const handleAutoLaunchToggle = async (checked: boolean) => { if (!autoLaunchSupported || !api.setAutoLaunch) { toast.error("Please update the desktop app to configure launch on startup"); @@ -196,7 +168,6 @@ export function DesktopContent() { - Launch on Startup @@ -245,56 +216,6 @@ export function DesktopContent() { - {/* Keyboard Shortcuts */} - - - Keyboard Shortcuts - - Customize the global keyboard shortcuts for desktop features. - - - - {shortcutsLoaded ? ( -
- updateShortcut("generalAssist", accel)} - onReset={() => resetShortcut("generalAssist")} - defaultValue={DEFAULT_SHORTCUTS.generalAssist} - label="General Assist" - description="Launch SurfSense instantly from any application" - icon={Rocket} - /> - updateShortcut("quickAsk", accel)} - onReset={() => resetShortcut("quickAsk")} - defaultValue={DEFAULT_SHORTCUTS.quickAsk} - label="Quick Assist" - description="Select text anywhere, then ask AI to explain, rewrite, or act on it" - icon={Zap} - /> - updateShortcut("autocomplete", accel)} - onReset={() => resetShortcut("autocomplete")} - defaultValue={DEFAULT_SHORTCUTS.autocomplete} - label="Extreme Assist" - description="AI drafts text using your screen context and knowledge base" - icon={BrainCog} - /> -

- Click a shortcut and press a new key combination to change it. -

-
- ) : ( -
- -
- )} -
-
- {/* Extreme Assist Toggle */} diff --git a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopShortcutsContent.tsx b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopShortcutsContent.tsx new file mode 100644 index 000000000..773665e63 --- /dev/null +++ b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopShortcutsContent.tsx @@ -0,0 +1,108 @@ +"use client"; + +import { BrainCog, Info, Rocket, Zap } from "lucide-react"; +import { useEffect, useState } from "react"; +import { toast } from "sonner"; +import { DEFAULT_SHORTCUTS, ShortcutRecorder } from "@/components/desktop/shortcut-recorder"; +import { Alert, AlertDescription } from "@/components/ui/alert"; +import { Spinner } from "@/components/ui/spinner"; +import { useElectronAPI } from "@/hooks/use-platform"; + +export function DesktopShortcutsContent() { + const api = useElectronAPI(); + const [shortcuts, setShortcuts] = useState(DEFAULT_SHORTCUTS); + const [shortcutsLoaded, setShortcutsLoaded] = useState(false); + + useEffect(() => { + if (!api) { + setShortcutsLoaded(true); + return; + } + + let mounted = true; + (api.getShortcuts?.() ?? Promise.resolve(null)) + .then((config) => { + if (!mounted) return; + if (config) setShortcuts(config); + setShortcutsLoaded(true); + }) + .catch(() => { + if (!mounted) return; + setShortcutsLoaded(true); + }); + + return () => { + mounted = false; + }; + }, [api]); + + if (!api) { + return ( +
+

Hotkeys are only available in the SurfSense desktop app.

+
+ ); + } + + const updateShortcut = ( + key: "generalAssist" | "quickAsk" | "autocomplete", + accelerator: string + ) => { + setShortcuts((prev) => { + const updated = { ...prev, [key]: accelerator }; + api.setShortcuts?.({ [key]: accelerator }).catch(() => { + toast.error("Failed to update shortcut"); + }); + return updated; + }); + toast.success("Shortcut updated"); + }; + + const resetShortcut = (key: "generalAssist" | "quickAsk" | "autocomplete") => { + updateShortcut(key, DEFAULT_SHORTCUTS[key]); + }; + + return ( + shortcutsLoaded ? ( +
+ + + +

Click a shortcut and press a new key combination to change it.

+
+
+ updateShortcut("generalAssist", accel)} + onReset={() => resetShortcut("generalAssist")} + defaultValue={DEFAULT_SHORTCUTS.generalAssist} + label="General Assist" + description="Launch SurfSense instantly from any application" + icon={Rocket} + /> + updateShortcut("quickAsk", accel)} + onReset={() => resetShortcut("quickAsk")} + defaultValue={DEFAULT_SHORTCUTS.quickAsk} + label="Quick Assist" + description="Select text anywhere, then ask AI to explain, rewrite, or act on it" + icon={Zap} + /> + updateShortcut("autocomplete", accel)} + onReset={() => resetShortcut("autocomplete")} + defaultValue={DEFAULT_SHORTCUTS.autocomplete} + label="Extreme Assist" + description="AI drafts text using your screen context and knowledge base" + icon={BrainCog} + /> +
+ ) : ( +
+ +
+ ) + ); +} diff --git a/surfsense_web/app/desktop/login/page.tsx b/surfsense_web/app/desktop/login/page.tsx index 8f68d20c1..1b43f89c0 100644 --- a/surfsense_web/app/desktop/login/page.tsx +++ b/surfsense_web/app/desktop/login/page.tsx @@ -152,7 +152,7 @@ export default function DesktopLoginPage() { {shortcutsLoaded ? (

- Keyboard Shortcuts + Hotkeys

+ import("@/app/dashboard/[search_space_id]/user-settings/components/DesktopShortcutsContent").then( + (m) => ({ default: m.DesktopShortcutsContent }) + ), + { ssr: false } +); const MemoryContent = dynamic( () => import("@/app/dashboard/[search_space_id]/user-settings/components/MemoryContent").then( @@ -93,7 +100,14 @@ export function UserSettingsDialog() { icon: , }, ...(isDesktop - ? [{ value: "desktop", label: "Desktop", icon: }] + ? [ + { value: "desktop", label: "Desktop", icon: }, + { + value: "desktop-shortcuts", + label: "Hotkeys", + icon: , + }, + ] : []), ], [t, isDesktop] @@ -116,6 +130,7 @@ export function UserSettingsDialog() { {state.initialTab === "memory" && } {state.initialTab === "purchases" && } {state.initialTab === "desktop" && } + {state.initialTab === "desktop-shortcuts" && }
); From 46056ee514cdd29e21dfac0b69eeaf63ce266b9a Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Thu, 23 Apr 2026 23:52:49 +0530 Subject: [PATCH 099/113] fix(settings): update user settings dialog labels and enhance DesktopShortcutsContent component for better hotkey management --- .../components/DesktopContent.tsx | 2 +- .../components/DesktopShortcutsContent.tsx | 194 ++++++++++++++---- .../settings/user-settings-dialog.tsx | 6 +- 3 files changed, 161 insertions(+), 41 deletions(-) diff --git a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopContent.tsx b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopContent.tsx index 3ec14076d..9861f5536 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopContent.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopContent.tsx @@ -72,7 +72,7 @@ export function DesktopContent() { return (

- Desktop settings are only available in the SurfSense desktop app. + App preferences are only available in the SurfSense desktop app.

); diff --git a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopShortcutsContent.tsx b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopShortcutsContent.tsx index 773665e63..f4981b8f0 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopShortcutsContent.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopShortcutsContent.tsx @@ -1,17 +1,152 @@ "use client"; -import { BrainCog, Info, Rocket, Zap } from "lucide-react"; -import { useEffect, useState } from "react"; +import { ArrowBigUp, BrainCog, Command, Option, Rocket, RotateCcw, Zap } from "lucide-react"; +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { toast } from "sonner"; -import { DEFAULT_SHORTCUTS, ShortcutRecorder } from "@/components/desktop/shortcut-recorder"; -import { Alert, AlertDescription } from "@/components/ui/alert"; +import { DEFAULT_SHORTCUTS, keyEventToAccelerator } from "@/components/desktop/shortcut-recorder"; +import { Button } from "@/components/ui/button"; import { Spinner } from "@/components/ui/spinner"; import { useElectronAPI } from "@/hooks/use-platform"; +type ShortcutKey = "generalAssist" | "quickAsk" | "autocomplete"; +type ShortcutMap = typeof DEFAULT_SHORTCUTS; + +const HOTKEY_ROWS: Array<{ key: ShortcutKey; label: string; icon: React.ElementType }> = [ + { key: "generalAssist", label: "General Assist", icon: Rocket }, + { key: "quickAsk", label: "Quick Assist", icon: Zap }, + { key: "autocomplete", label: "Extreme Assist", icon: BrainCog }, +]; + +type ShortcutToken = + | { kind: "text"; value: string } + | { kind: "icon"; value: "command" | "option" | "shift" }; + +function acceleratorToTokens(accel: string, isMac: boolean): ShortcutToken[] { + if (!accel) return []; + return accel.split("+").map((part) => { + if (part === "CommandOrControl") { + return isMac ? { kind: "icon", value: "command" as const } : { kind: "text", value: "Ctrl" }; + } + if (part === "Alt") { + return isMac ? { kind: "icon", value: "option" as const } : { kind: "text", value: "Alt" }; + } + if (part === "Shift") { + return isMac ? { kind: "icon", value: "shift" as const } : { kind: "text", value: "Shift" }; + } + if (part === "Space") return { kind: "text", value: "Space" }; + return { kind: "text", value: part.length === 1 ? part.toUpperCase() : part }; + }); +} + +function HotkeyRow({ + label, + value, + defaultValue, + icon: Icon, + isMac, + onChange, + onReset, +}: { + label: string; + value: string; + defaultValue: string; + icon: React.ElementType; + isMac: boolean; + onChange: (accelerator: string) => void; + onReset: () => void; +}) { + const [recording, setRecording] = useState(false); + const inputRef = useRef(null); + const isDefault = value === defaultValue; + const displayTokens = useMemo(() => acceleratorToTokens(value, isMac), [value, isMac]); + + const handleKeyDown = useCallback( + (e: React.KeyboardEvent) => { + if (!recording) return; + e.preventDefault(); + e.stopPropagation(); + + if (e.key === "Escape") { + setRecording(false); + return; + } + + const accel = keyEventToAccelerator(e); + if (accel) { + onChange(accel); + setRecording(false); + } + }, + [onChange, recording] + ); + + return ( +
+
+
+ +
+

{label}

+
+
+ {!isDefault && ( + + )} + +
+
+ ); +} + export function DesktopShortcutsContent() { const api = useElectronAPI(); const [shortcuts, setShortcuts] = useState(DEFAULT_SHORTCUTS); const [shortcutsLoaded, setShortcutsLoaded] = useState(false); + const isMac = api?.versions?.platform === "darwin"; useEffect(() => { if (!api) { @@ -21,7 +156,7 @@ export function DesktopShortcutsContent() { let mounted = true; (api.getShortcuts?.() ?? Promise.resolve(null)) - .then((config) => { + .then((config: ShortcutMap | null) => { if (!mounted) return; if (config) setShortcuts(config); setShortcutsLoaded(true); @@ -58,46 +193,27 @@ export function DesktopShortcutsContent() { toast.success("Shortcut updated"); }; - const resetShortcut = (key: "generalAssist" | "quickAsk" | "autocomplete") => { + const resetShortcut = (key: ShortcutKey) => { updateShortcut(key, DEFAULT_SHORTCUTS[key]); }; return ( shortcutsLoaded ? (
- - - -

Click a shortcut and press a new key combination to change it.

-
-
- updateShortcut("generalAssist", accel)} - onReset={() => resetShortcut("generalAssist")} - defaultValue={DEFAULT_SHORTCUTS.generalAssist} - label="General Assist" - description="Launch SurfSense instantly from any application" - icon={Rocket} - /> - updateShortcut("quickAsk", accel)} - onReset={() => resetShortcut("quickAsk")} - defaultValue={DEFAULT_SHORTCUTS.quickAsk} - label="Quick Assist" - description="Select text anywhere, then ask AI to explain, rewrite, or act on it" - icon={Zap} - /> - updateShortcut("autocomplete", accel)} - onReset={() => resetShortcut("autocomplete")} - defaultValue={DEFAULT_SHORTCUTS.autocomplete} - label="Extreme Assist" - description="AI drafts text using your screen context and knowledge base" - icon={BrainCog} - /> +
+ {HOTKEY_ROWS.map((row) => ( + updateShortcut(row.key, accel)} + onReset={() => resetShortcut(row.key)} + /> + ))} +
) : (
diff --git a/surfsense_web/components/settings/user-settings-dialog.tsx b/surfsense_web/components/settings/user-settings-dialog.tsx index a406f6352..cc36392ae 100644 --- a/surfsense_web/components/settings/user-settings-dialog.tsx +++ b/surfsense_web/components/settings/user-settings-dialog.tsx @@ -101,7 +101,11 @@ export function UserSettingsDialog() { }, ...(isDesktop ? [ - { value: "desktop", label: "Desktop", icon: }, + { + value: "desktop", + label: "App Preferences", + icon: , + }, { value: "desktop-shortcuts", label: "Hotkeys", From daac6b52691844edb530a47548606aeb2238f64e Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Fri, 24 Apr 2026 00:06:38 +0530 Subject: [PATCH 100/113] feat(login): implement customizable hotkey management in the login page with enhanced UI components --- surfsense_web/app/desktop/login/page.tsx | 241 +++++++++++++++++------ 1 file changed, 180 insertions(+), 61 deletions(-) diff --git a/surfsense_web/app/desktop/login/page.tsx b/surfsense_web/app/desktop/login/page.tsx index 1b43f89c0..6d5e2abd4 100644 --- a/surfsense_web/app/desktop/login/page.tsx +++ b/surfsense_web/app/desktop/login/page.tsx @@ -2,13 +2,13 @@ import { IconBrandGoogleFilled } from "@tabler/icons-react"; import { useAtom } from "jotai"; -import { BrainCog, Eye, EyeOff, Rocket, Zap } from "lucide-react"; +import { ArrowBigUp, BrainCog, Command, Eye, EyeOff, Option, Rocket, RotateCcw, Zap } from "lucide-react"; import Image from "next/image"; import { useRouter } from "next/navigation"; -import { useCallback, useEffect, useState } from "react"; +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { toast } from "sonner"; import { loginMutationAtom } from "@/atoms/auth/auth-mutation.atoms"; -import { DEFAULT_SHORTCUTS, ShortcutRecorder } from "@/components/desktop/shortcut-recorder"; +import { DEFAULT_SHORTCUTS, keyEventToAccelerator } from "@/components/desktop/shortcut-recorder"; import { Button } from "@/components/ui/button"; import { Input } from "@/components/ui/input"; import { Label } from "@/components/ui/label"; @@ -20,6 +20,157 @@ import { setBearerToken } from "@/lib/auth-utils"; import { AUTH_TYPE, BACKEND_URL } from "@/lib/env-config"; const isGoogleAuth = AUTH_TYPE === "GOOGLE"; +type ShortcutKey = "generalAssist" | "quickAsk" | "autocomplete"; +type ShortcutMap = typeof DEFAULT_SHORTCUTS; + +type ShortcutToken = + | { kind: "text"; value: string } + | { kind: "icon"; value: "command" | "option" | "shift" }; + +const HOTKEY_ROWS: Array<{ key: ShortcutKey; label: string; description: string; icon: React.ElementType }> = [ + { + key: "generalAssist", + label: "General Assist", + description: "Launch SurfSense instantly from any application", + icon: Rocket, + }, + { + key: "quickAsk", + label: "Quick Assist", + description: "Select text anywhere, then ask AI to explain, rewrite, or act on it", + icon: Zap, + }, + { + key: "autocomplete", + label: "Extreme Assist", + description: "AI drafts text using your screen context and knowledge base", + icon: BrainCog, + }, +]; + +function acceleratorToTokens(accel: string, isMac: boolean): ShortcutToken[] { + if (!accel) return []; + return accel.split("+").map((part) => { + if (part === "CommandOrControl") { + return isMac ? { kind: "icon", value: "command" as const } : { kind: "text", value: "Ctrl" }; + } + if (part === "Alt") { + return isMac ? { kind: "icon", value: "option" as const } : { kind: "text", value: "Alt" }; + } + if (part === "Shift") { + return isMac ? { kind: "icon", value: "shift" as const } : { kind: "text", value: "Shift" }; + } + if (part === "Space") return { kind: "text", value: "Space" }; + return { kind: "text", value: part.length === 1 ? part.toUpperCase() : part }; + }); +} + +function HotkeyRow({ + label, + description, + value, + defaultValue, + icon: Icon, + isMac, + onChange, + onReset, +}: { + label: string; + description: string; + value: string; + defaultValue: string; + icon: React.ElementType; + isMac: boolean; + onChange: (accelerator: string) => void; + onReset: () => void; +}) { + const [recording, setRecording] = useState(false); + const inputRef = useRef(null); + const isDefault = value === defaultValue; + const displayTokens = useMemo(() => acceleratorToTokens(value, isMac), [value, isMac]); + + const handleKeyDown = useCallback( + (e: React.KeyboardEvent) => { + if (!recording) return; + e.preventDefault(); + e.stopPropagation(); + + if (e.key === "Escape") { + setRecording(false); + return; + } + + const accel = keyEventToAccelerator(e); + if (accel) { + onChange(accel); + setRecording(false); + } + }, + [onChange, recording] + ); + + return ( +
+
+
+ +
+
+

{label}

+

{description}

+
+
+
+ {!isDefault && ( + + )} + +
+
+ ); +} export default function DesktopLoginPage() { const router = useRouter(); @@ -33,6 +184,7 @@ export default function DesktopLoginPage() { const [shortcuts, setShortcuts] = useState(DEFAULT_SHORTCUTS); const [shortcutsLoaded, setShortcutsLoaded] = useState(false); + const isMac = api?.versions?.platform === "darwin"; useEffect(() => { if (!api?.getShortcuts) { @@ -41,7 +193,7 @@ export default function DesktopLoginPage() { } api .getShortcuts() - .then((config) => { + .then((config: ShortcutMap | null) => { if (config) setShortcuts(config); setShortcutsLoaded(true); }) @@ -117,18 +269,8 @@ export default function DesktopLoginPage() { }; return ( -
- {/* Subtle radial glow */} -
-
-
- -
+
+
{/* Header */}

Welcome to SurfSense Desktop

- Configure shortcuts, then sign in to get started. + Configure shortcuts, then sign in to get started

@@ -151,41 +293,24 @@ export default function DesktopLoginPage() { {/* ---- Shortcuts ---- */} {shortcutsLoaded ? (
-

+ {/*

Hotkeys -

-
- updateShortcut("generalAssist", accel)} - onReset={() => resetShortcut("generalAssist")} - defaultValue={DEFAULT_SHORTCUTS.generalAssist} - label="General Assist" - description="Launch SurfSense instantly from any application" - icon={Rocket} - /> - updateShortcut("quickAsk", accel)} - onReset={() => resetShortcut("quickAsk")} - defaultValue={DEFAULT_SHORTCUTS.quickAsk} - label="Quick Assist" - description="Select text anywhere, then ask AI to explain, rewrite, or act on it" - icon={Zap} - /> - updateShortcut("autocomplete", accel)} - onReset={() => resetShortcut("autocomplete")} - defaultValue={DEFAULT_SHORTCUTS.autocomplete} - label="Extreme Assist" - description="AI drafts text using your screen context and knowledge base" - icon={BrainCog} - /> +

*/} +
+ {HOTKEY_ROWS.map((row) => ( + updateShortcut(row.key, accel)} + onReset={() => resetShortcut(row.key)} + /> + ))}
-

- Click a shortcut and press a new key combination to change it. -

) : (
@@ -197,9 +322,9 @@ export default function DesktopLoginPage() { {/* ---- Auth ---- */}
-

+ {/*

Sign In -

+

*/} {isGoogleAuth ? (
- )} From 6721919398241bbc2696b19ea526915d95807f50 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Fri, 24 Apr 2026 01:44:23 +0530 Subject: [PATCH 101/113] feat(filesystem): add multi-root local folder support in backend --- .../agents/new_chat/filesystem_backends.py | 21 +- .../agents/new_chat/filesystem_selection.py | 2 +- .../agents/new_chat/middleware/filesystem.py | 45 ++- .../multi_root_local_folder_backend.py | 328 ++++++++++++++++++ .../app/routes/new_chat_routes.py | 24 +- surfsense_backend/app/schemas/new_chat.py | 6 +- .../middleware/test_filesystem_backends.py | 26 +- 7 files changed, 422 insertions(+), 30 deletions(-) create mode 100644 surfsense_backend/app/agents/new_chat/middleware/multi_root_local_folder_backend.py diff --git a/surfsense_backend/app/agents/new_chat/filesystem_backends.py b/surfsense_backend/app/agents/new_chat/filesystem_backends.py index 8af7e8558..0c32ef845 100644 --- a/surfsense_backend/app/agents/new_chat/filesystem_backends.py +++ b/surfsense_backend/app/agents/new_chat/filesystem_backends.py @@ -9,26 +9,27 @@ from deepagents.backends.state import StateBackend from langgraph.prebuilt.tool_node import ToolRuntime from app.agents.new_chat.filesystem_selection import FilesystemMode, FilesystemSelection -from app.agents.new_chat.middleware.local_folder_backend import LocalFolderBackend +from app.agents.new_chat.middleware.multi_root_local_folder_backend import ( + MultiRootLocalFolderBackend, +) @lru_cache(maxsize=64) -def _cached_local_backend(root_path: str) -> LocalFolderBackend: - return LocalFolderBackend(root_path) +def _cached_multi_root_backend( + root_paths: tuple[str, ...], +) -> MultiRootLocalFolderBackend: + return MultiRootLocalFolderBackend(root_paths) def build_backend_resolver( selection: FilesystemSelection, -) -> Callable[[ToolRuntime], StateBackend | LocalFolderBackend]: +) -> Callable[[ToolRuntime], StateBackend | MultiRootLocalFolderBackend]: """Create deepagents backend resolver for the selected filesystem mode.""" - if ( - selection.mode == FilesystemMode.DESKTOP_LOCAL_FOLDER - and selection.local_root_path is not None - ): + if selection.mode == FilesystemMode.DESKTOP_LOCAL_FOLDER and selection.local_root_paths: - def _resolve_local(_runtime: ToolRuntime) -> LocalFolderBackend: - return _cached_local_backend(selection.local_root_path or "") + def _resolve_local(_runtime: ToolRuntime) -> MultiRootLocalFolderBackend: + return _cached_multi_root_backend(selection.local_root_paths) return _resolve_local diff --git a/surfsense_backend/app/agents/new_chat/filesystem_selection.py b/surfsense_backend/app/agents/new_chat/filesystem_selection.py index 3094a0b29..4b8f42847 100644 --- a/surfsense_backend/app/agents/new_chat/filesystem_selection.py +++ b/surfsense_backend/app/agents/new_chat/filesystem_selection.py @@ -26,7 +26,7 @@ class FilesystemSelection: mode: FilesystemMode = FilesystemMode.CLOUD client_platform: ClientPlatform = ClientPlatform.WEB - local_root_path: str | None = None + local_root_paths: tuple[str, ...] = () @property def is_local_mode(self) -> bool: diff --git a/surfsense_backend/app/agents/new_chat/middleware/filesystem.py b/surfsense_backend/app/agents/new_chat/middleware/filesystem.py index 0fa2085fc..6c30b20ef 100644 --- a/surfsense_backend/app/agents/new_chat/middleware/filesystem.py +++ b/surfsense_backend/app/agents/new_chat/middleware/filesystem.py @@ -26,13 +26,16 @@ from langchain_core.tools import BaseTool, StructuredTool from langgraph.types import Command from sqlalchemy import delete, select +from app.agents.new_chat.filesystem_selection import FilesystemMode +from app.agents.new_chat.middleware.multi_root_local_folder_backend import ( + MultiRootLocalFolderBackend, +) from app.agents.new_chat.sandbox import ( _evict_sandbox_cache, delete_sandbox, get_or_create_sandbox, is_sandbox_enabled, ) -from app.agents.new_chat.filesystem_selection import FilesystemMode from app.db import Chunk, Document, DocumentType, Folder, shielded_async_session from app.indexing_pipeline.document_chunker import chunk_text from app.utils.document_converters import ( @@ -222,6 +225,8 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): "\n\n## Local Folder Mode" "\n\nThis chat is running in desktop local-folder mode." " Keep all file operations local. Do not use save_document." + " Always use mount-prefixed absolute paths like //file.ext." + " If you are unsure which mounts are available, call ls('/') first." ) super().__init__( @@ -771,12 +776,30 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): """Only cloud mode persists file content to Document/Chunk tables.""" return self._filesystem_mode == FilesystemMode.CLOUD - @staticmethod - def _get_contract_suggested_path(runtime: ToolRuntime[None, FilesystemState]) -> str: + def _default_mount_prefix(self, runtime: ToolRuntime[None, FilesystemState]) -> str: + backend = self._get_backend(runtime) + if isinstance(backend, MultiRootLocalFolderBackend): + return f"/{backend.default_mount()}" + return "" + + def _get_contract_suggested_path( + self, runtime: ToolRuntime[None, FilesystemState] + ) -> str: contract = runtime.state.get("file_operation_contract") or {} suggested = contract.get("suggested_path") if isinstance(suggested, str) and suggested.strip(): - return suggested.strip() + cleaned = suggested.strip() + if self._filesystem_mode == FilesystemMode.DESKTOP_LOCAL_FOLDER: + mount_prefix = self._default_mount_prefix(runtime) + if mount_prefix and cleaned.startswith("/") and not cleaned.startswith( + f"{mount_prefix}/" + ): + return f"{mount_prefix}{cleaned}" + return cleaned + if self._filesystem_mode == FilesystemMode.DESKTOP_LOCAL_FOLDER: + mount_prefix = self._default_mount_prefix(runtime) + if mount_prefix: + return f"{mount_prefix}/notes.md" return "/notes.md" def _resolve_write_target_path( @@ -787,6 +810,20 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): candidate = file_path.strip() if not candidate: return self._get_contract_suggested_path(runtime) + if self._filesystem_mode == FilesystemMode.DESKTOP_LOCAL_FOLDER: + backend = self._get_backend(runtime) + mount_prefix = self._default_mount_prefix(runtime) + if mount_prefix and not candidate.startswith("/"): + return f"{mount_prefix}/{candidate.lstrip('/')}" + if ( + mount_prefix + and isinstance(backend, MultiRootLocalFolderBackend) + and candidate.startswith("/") + ): + mount_names = backend.list_mounts() + first_segment = candidate.lstrip("/").split("/", 1)[0] + if first_segment not in mount_names: + return f"{mount_prefix}{candidate}" if not candidate.startswith("/"): return f"/{candidate.lstrip('/')}" return candidate diff --git a/surfsense_backend/app/agents/new_chat/middleware/multi_root_local_folder_backend.py b/surfsense_backend/app/agents/new_chat/middleware/multi_root_local_folder_backend.py new file mode 100644 index 000000000..2eb4e78dc --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/middleware/multi_root_local_folder_backend.py @@ -0,0 +1,328 @@ +"""Aggregate multiple LocalFolderBackend roots behind mount-prefixed virtual paths.""" + +from __future__ import annotations + +import asyncio +from pathlib import Path +from typing import Any + +from deepagents.backends.protocol import ( + EditResult, + FileDownloadResponse, + FileInfo, + FileUploadResponse, + GrepMatch, + WriteResult, +) + +from app.agents.new_chat.middleware.local_folder_backend import LocalFolderBackend + +_INVALID_PATH = "invalid_path" +_FILE_NOT_FOUND = "file_not_found" +_IS_DIRECTORY = "is_directory" + + +class MultiRootLocalFolderBackend: + """Route filesystem operations to one of several mounted local roots. + + Virtual paths are namespaced as: + - `//...` + where `` is derived from each selected root folder name. + """ + + def __init__(self, root_paths: tuple[str, ...]) -> None: + if not root_paths: + msg = "At least one local root path is required" + raise ValueError(msg) + self._mount_to_backend: dict[str, LocalFolderBackend] = {} + for raw_root in root_paths: + normalized_root = str(Path(raw_root).expanduser().resolve()) + base_mount = Path(normalized_root).name or "root" + mount = base_mount + suffix = 2 + while mount in self._mount_to_backend: + mount = f"{base_mount}-{suffix}" + suffix += 1 + self._mount_to_backend[mount] = LocalFolderBackend(normalized_root) + self._mount_order = tuple(self._mount_to_backend.keys()) + + def list_mounts(self) -> tuple[str, ...]: + return self._mount_order + + def default_mount(self) -> str: + return self._mount_order[0] + + def _mount_error(self) -> str: + mounts = ", ".join(f"/{mount}" for mount in self._mount_order) + return ( + "Path must start with one of the selected folders: " + f"{mounts}. Example: /{self._mount_order[0]}/file.txt" + ) + + def _split_mount_path(self, virtual_path: str) -> tuple[str, str]: + if not virtual_path.startswith("/"): + msg = f"Invalid path (must be absolute): {virtual_path}" + raise ValueError(msg) + rel = virtual_path.lstrip("/") + if not rel: + raise ValueError(self._mount_error()) + mount, _, remainder = rel.partition("/") + backend = self._mount_to_backend.get(mount) + if backend is None: + raise ValueError(self._mount_error()) + local_path = f"/{remainder}" if remainder else "/" + return mount, local_path + + @staticmethod + def _prefix_mount_path(mount: str, local_path: str) -> str: + if local_path == "/": + return f"/{mount}" + return f"/{mount}{local_path}" + + @staticmethod + def _get_value(item: Any, key: str) -> Any: + if isinstance(item, dict): + return item.get(key) + return getattr(item, key, None) + + @classmethod + def _get_str(cls, item: Any, key: str) -> str: + value = cls._get_value(item, key) + return value if isinstance(value, str) else "" + + @classmethod + def _get_int(cls, item: Any, key: str) -> int: + value = cls._get_value(item, key) + return int(value) if isinstance(value, int | float) else 0 + + @classmethod + def _get_bool(cls, item: Any, key: str) -> bool: + value = cls._get_value(item, key) + return bool(value) + + def _list_mount_roots(self) -> list[FileInfo]: + return [ + FileInfo(path=f"/{mount}", is_dir=True, size=0, modified_at="0") + for mount in self._mount_order + ] + + def _transform_infos(self, mount: str, infos: list[FileInfo]) -> list[FileInfo]: + transformed: list[FileInfo] = [] + for info in infos: + transformed.append( + FileInfo( + path=self._prefix_mount_path(mount, self._get_str(info, "path")), + is_dir=self._get_bool(info, "is_dir"), + size=self._get_int(info, "size"), + modified_at=self._get_str(info, "modified_at"), + ) + ) + return transformed + + def ls_info(self, path: str) -> list[FileInfo]: + if path == "/": + return self._list_mount_roots() + try: + mount, local_path = self._split_mount_path(path) + except ValueError: + return [] + return self._transform_infos(mount, self._mount_to_backend[mount].ls_info(local_path)) + + async def als_info(self, path: str) -> list[FileInfo]: + return await asyncio.to_thread(self.ls_info, path) + + def read(self, file_path: str, offset: int = 0, limit: int = 2000) -> str: + try: + mount, local_path = self._split_mount_path(file_path) + except ValueError as exc: + return f"Error: {exc}" + return self._mount_to_backend[mount].read(local_path, offset, limit) + + async def aread(self, file_path: str, offset: int = 0, limit: int = 2000) -> str: + return await asyncio.to_thread(self.read, file_path, offset, limit) + + def read_raw(self, file_path: str) -> str: + try: + mount, local_path = self._split_mount_path(file_path) + except ValueError as exc: + return f"Error: {exc}" + return self._mount_to_backend[mount].read_raw(local_path) + + async def aread_raw(self, file_path: str) -> str: + return await asyncio.to_thread(self.read_raw, file_path) + + def write(self, file_path: str, content: str) -> WriteResult: + try: + mount, local_path = self._split_mount_path(file_path) + except ValueError as exc: + return WriteResult(error=f"Error: {exc}") + result = self._mount_to_backend[mount].write(local_path, content) + if result.path: + result.path = self._prefix_mount_path(mount, result.path) + return result + + async def awrite(self, file_path: str, content: str) -> WriteResult: + return await asyncio.to_thread(self.write, file_path, content) + + def edit( + self, + file_path: str, + old_string: str, + new_string: str, + replace_all: bool = False, + ) -> EditResult: + try: + mount, local_path = self._split_mount_path(file_path) + except ValueError as exc: + return EditResult(error=f"Error: {exc}") + result = self._mount_to_backend[mount].edit( + local_path, old_string, new_string, replace_all + ) + if result.path: + result.path = self._prefix_mount_path(mount, result.path) + return result + + async def aedit( + self, + file_path: str, + old_string: str, + new_string: str, + replace_all: bool = False, + ) -> EditResult: + return await asyncio.to_thread( + self.edit, file_path, old_string, new_string, replace_all + ) + + def glob_info(self, pattern: str, path: str = "/") -> list[FileInfo]: + if path == "/": + prefixed_results: list[FileInfo] = [] + if pattern.startswith("/"): + mount, _, remainder = pattern.lstrip("/").partition("/") + backend = self._mount_to_backend.get(mount) + if not backend: + return [] + local_pattern = f"/{remainder}" if remainder else "/" + return self._transform_infos( + mount, backend.glob_info(local_pattern, path="/") + ) + for mount, backend in self._mount_to_backend.items(): + prefixed_results.extend( + self._transform_infos(mount, backend.glob_info(pattern, path="/")) + ) + return prefixed_results + + try: + mount, local_path = self._split_mount_path(path) + except ValueError: + return [] + return self._transform_infos( + mount, self._mount_to_backend[mount].glob_info(pattern, path=local_path) + ) + + async def aglob_info(self, pattern: str, path: str = "/") -> list[FileInfo]: + return await asyncio.to_thread(self.glob_info, pattern, path) + + def grep_raw( + self, pattern: str, path: str | None = None, glob: str | None = None + ) -> list[GrepMatch] | str: + if not pattern: + return "Error: pattern cannot be empty" + if path is None or path == "/": + all_matches: list[GrepMatch] = [] + for mount, backend in self._mount_to_backend.items(): + result = backend.grep_raw(pattern, path="/", glob=glob) + if isinstance(result, str): + return result + all_matches.extend( + [ + GrepMatch( + path=self._prefix_mount_path(mount, self._get_str(match, "path")), + line=self._get_int(match, "line"), + text=self._get_str(match, "text"), + ) + for match in result + ] + ) + return all_matches + try: + mount, local_path = self._split_mount_path(path) + except ValueError as exc: + return f"Error: {exc}" + + result = self._mount_to_backend[mount].grep_raw( + pattern, path=local_path, glob=glob + ) + if isinstance(result, str): + return result + return [ + GrepMatch( + path=self._prefix_mount_path(mount, self._get_str(match, "path")), + line=self._get_int(match, "line"), + text=self._get_str(match, "text"), + ) + for match in result + ] + + async def agrep_raw( + self, pattern: str, path: str | None = None, glob: str | None = None + ) -> list[GrepMatch] | str: + return await asyncio.to_thread(self.grep_raw, pattern, path, glob) + + def upload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]: + grouped: dict[str, list[tuple[str, bytes]]] = {} + invalid: list[FileUploadResponse] = [] + for virtual_path, content in files: + try: + mount, local_path = self._split_mount_path(virtual_path) + except ValueError: + invalid.append(FileUploadResponse(path=virtual_path, error=_INVALID_PATH)) + continue + grouped.setdefault(mount, []).append((local_path, content)) + + responses = list(invalid) + for mount, mount_files in grouped.items(): + result = self._mount_to_backend[mount].upload_files(mount_files) + responses.extend( + [ + FileUploadResponse( + path=self._prefix_mount_path(mount, self._get_str(item, "path")), + error=self._get_str(item, "error") or None, + ) + for item in result + ] + ) + return responses + + async def aupload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadResponse]: + return await asyncio.to_thread(self.upload_files, files) + + def download_files(self, paths: list[str]) -> list[FileDownloadResponse]: + grouped: dict[str, list[str]] = {} + invalid: list[FileDownloadResponse] = [] + for virtual_path in paths: + try: + mount, local_path = self._split_mount_path(virtual_path) + except ValueError: + invalid.append( + FileDownloadResponse(path=virtual_path, content=None, error=_INVALID_PATH) + ) + continue + grouped.setdefault(mount, []).append(local_path) + + responses = list(invalid) + for mount, mount_paths in grouped.items(): + result = self._mount_to_backend[mount].download_files(mount_paths) + responses.extend( + [ + FileDownloadResponse( + path=self._prefix_mount_path(mount, self._get_str(item, "path")), + content=self._get_value(item, "content"), + error=self._get_str(item, "error") or None, + ) + for item in result + ] + ) + return responses + + async def adownload_files(self, paths: list[str]) -> list[FileDownloadResponse]: + return await asyncio.to_thread(self.download_files, paths) diff --git a/surfsense_backend/app/routes/new_chat_routes.py b/surfsense_backend/app/routes/new_chat_routes.py index 548bd1402..e1a26ba04 100644 --- a/surfsense_backend/app/routes/new_chat_routes.py +++ b/surfsense_backend/app/routes/new_chat_routes.py @@ -73,7 +73,7 @@ def _resolve_filesystem_selection( *, mode: str, client_platform: str, - local_root: str | None, + local_roots: list[str] | None, ) -> FilesystemSelection: """Validate and normalize filesystem mode settings from request payload.""" try: @@ -96,21 +96,29 @@ def _resolve_filesystem_selection( status_code=400, detail="desktop_local_folder mode is only available on desktop runtime.", ) - if not local_root or not local_root.strip(): + normalized_roots: list[str] = [] + for root in local_roots or []: + trimmed = root.strip() + if trimmed and trimmed not in normalized_roots: + normalized_roots.append(trimmed) + if not normalized_roots: raise HTTPException( status_code=400, - detail="local_filesystem_root is required for desktop_local_folder mode.", + detail=( + "local_filesystem_roots must include at least one root for " + "desktop_local_folder mode." + ), ) return FilesystemSelection( mode=resolved_mode, client_platform=resolved_platform, - local_root_path=local_root.strip(), + local_root_paths=tuple(normalized_roots), ) return FilesystemSelection( mode=FilesystemMode.CLOUD, client_platform=resolved_platform, - local_root_path=None, + local_root_paths=(), ) @@ -1188,7 +1196,7 @@ async def handle_new_chat( filesystem_selection = _resolve_filesystem_selection( mode=request.filesystem_mode, client_platform=request.client_platform, - local_root=request.local_filesystem_root, + local_roots=request.local_filesystem_roots, ) # Get search space to check LLM config preferences @@ -1310,7 +1318,7 @@ async def regenerate_response( filesystem_selection = _resolve_filesystem_selection( mode=request.filesystem_mode, client_platform=request.client_platform, - local_root=request.local_filesystem_root, + local_roots=request.local_filesystem_roots, ) # Get the checkpointer and state history @@ -1569,7 +1577,7 @@ async def resume_chat( filesystem_selection = _resolve_filesystem_selection( mode=request.filesystem_mode, client_platform=request.client_platform, - local_root=request.local_filesystem_root, + local_roots=request.local_filesystem_roots, ) search_space_result = await session.execute( diff --git a/surfsense_backend/app/schemas/new_chat.py b/surfsense_backend/app/schemas/new_chat.py index 593127c7e..38cdf0b28 100644 --- a/surfsense_backend/app/schemas/new_chat.py +++ b/surfsense_backend/app/schemas/new_chat.py @@ -186,7 +186,7 @@ class NewChatRequest(BaseModel): ) filesystem_mode: Literal["cloud", "desktop_local_folder"] = "cloud" client_platform: Literal["web", "desktop"] = "web" - local_filesystem_root: str | None = None + local_filesystem_roots: list[str] | None = None class RegenerateRequest(BaseModel): @@ -209,7 +209,7 @@ class RegenerateRequest(BaseModel): disabled_tools: list[str] | None = None filesystem_mode: Literal["cloud", "desktop_local_folder"] = "cloud" client_platform: Literal["web", "desktop"] = "web" - local_filesystem_root: str | None = None + local_filesystem_roots: list[str] | None = None # ============================================================================= @@ -235,7 +235,7 @@ class ResumeRequest(BaseModel): decisions: list[ResumeDecision] filesystem_mode: Literal["cloud", "desktop_local_folder"] = "cloud" client_platform: Literal["web", "desktop"] = "web" - local_filesystem_root: str | None = None + local_filesystem_roots: list[str] | None = None # ============================================================================= diff --git a/surfsense_backend/tests/unit/middleware/test_filesystem_backends.py b/surfsense_backend/tests/unit/middleware/test_filesystem_backends.py index 2377307f8..a1867ff6c 100644 --- a/surfsense_backend/tests/unit/middleware/test_filesystem_backends.py +++ b/surfsense_backend/tests/unit/middleware/test_filesystem_backends.py @@ -8,7 +8,9 @@ from app.agents.new_chat.filesystem_selection import ( FilesystemMode, FilesystemSelection, ) -from app.agents.new_chat.middleware.local_folder_backend import LocalFolderBackend +from app.agents.new_chat.middleware.multi_root_local_folder_backend import ( + MultiRootLocalFolderBackend, +) pytestmark = pytest.mark.unit @@ -17,16 +19,16 @@ class _RuntimeStub: state = {"files": {}} -def test_backend_resolver_returns_local_backend_for_local_mode(tmp_path: Path): +def test_backend_resolver_returns_multi_root_backend_for_single_root(tmp_path: Path): selection = FilesystemSelection( mode=FilesystemMode.DESKTOP_LOCAL_FOLDER, client_platform=ClientPlatform.DESKTOP, - local_root_path=str(tmp_path), + local_root_paths=(str(tmp_path),), ) resolver = build_backend_resolver(selection) backend = resolver(_RuntimeStub()) - assert isinstance(backend, LocalFolderBackend) + assert isinstance(backend, MultiRootLocalFolderBackend) def test_backend_resolver_uses_cloud_mode_by_default(): @@ -35,3 +37,19 @@ def test_backend_resolver_uses_cloud_mode_by_default(): # StateBackend class name check keeps this test decoupled # from internal deepagents runtime class identity. assert backend.__class__.__name__ == "StateBackend" + + +def test_backend_resolver_returns_multi_root_backend_for_multiple_roots(tmp_path: Path): + root_one = tmp_path / "resume" + root_two = tmp_path / "notes" + root_one.mkdir() + root_two.mkdir() + selection = FilesystemSelection( + mode=FilesystemMode.DESKTOP_LOCAL_FOLDER, + client_platform=ClientPlatform.DESKTOP, + local_root_paths=(str(root_one), str(root_two)), + ) + resolver = build_backend_resolver(selection) + + backend = resolver(_RuntimeStub()) + assert isinstance(backend, MultiRootLocalFolderBackend) From 3ee2683391fb82c50c74b73a5b0522845e682100 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Fri, 24 Apr 2026 01:45:13 +0530 Subject: [PATCH 102/113] feat(filesystem): propagate localRootPaths across desktop and web API --- surfsense_desktop/src/ipc/handlers.ts | 2 +- .../src/modules/agent-filesystem.ts | 100 ++++++++++++++---- surfsense_desktop/src/preload.ts | 2 +- .../new-chat/[[...chat_id]]/page.tsx | 8 +- surfsense_web/lib/agent-filesystem.ts | 7 +- surfsense_web/types/window.d.ts | 4 +- 6 files changed, 93 insertions(+), 30 deletions(-) diff --git a/surfsense_desktop/src/ipc/handlers.ts b/surfsense_desktop/src/ipc/handlers.ts index cc84a46e0..247d171f5 100644 --- a/surfsense_desktop/src/ipc/handlers.ts +++ b/surfsense_desktop/src/ipc/handlers.ts @@ -228,7 +228,7 @@ export function registerIpcHandlers(): void { ipcMain.handle( IPC_CHANNELS.AGENT_FILESYSTEM_SET_SETTINGS, - (_event, settings: { mode?: 'cloud' | 'desktop_local_folder'; localRootPath?: string | null }) => + (_event, settings: { mode?: 'cloud' | 'desktop_local_folder'; localRootPaths?: string[] | null }) => setAgentFilesystemSettings(settings) ); diff --git a/surfsense_desktop/src/modules/agent-filesystem.ts b/surfsense_desktop/src/modules/agent-filesystem.ts index 9dfe79fb0..afad98f24 100644 --- a/surfsense_desktop/src/modules/agent-filesystem.ts +++ b/surfsense_desktop/src/modules/agent-filesystem.ts @@ -1,16 +1,17 @@ import { app, dialog } from "electron"; -import { mkdir, readFile, writeFile } from "node:fs/promises"; +import { access, mkdir, readFile, writeFile } from "node:fs/promises"; import { dirname, isAbsolute, join, relative, resolve } from "node:path"; export type AgentFilesystemMode = "cloud" | "desktop_local_folder"; export interface AgentFilesystemSettings { mode: AgentFilesystemMode; - localRootPath: string | null; + localRootPaths: string[]; updatedAt: string; } const SETTINGS_FILENAME = "agent-filesystem-settings.json"; +const MAX_LOCAL_ROOTS = 5; function getSettingsPath(): string { return join(app.getPath("userData"), SETTINGS_FILENAME); @@ -19,11 +20,28 @@ function getSettingsPath(): string { function getDefaultSettings(): AgentFilesystemSettings { return { mode: "cloud", - localRootPath: null, + localRootPaths: [], updatedAt: new Date().toISOString(), }; } +function normalizeLocalRootPaths(paths: unknown): string[] { + if (!Array.isArray(paths)) { + return []; + } + const uniquePaths = new Set(); + for (const path of paths) { + if (typeof path !== "string") continue; + const trimmed = path.trim(); + if (!trimmed) continue; + uniquePaths.add(trimmed); + if (uniquePaths.size >= MAX_LOCAL_ROOTS) { + break; + } + } + return [...uniquePaths]; +} + export async function getAgentFilesystemSettings(): Promise { try { const raw = await readFile(getSettingsPath(), "utf8"); @@ -33,7 +51,7 @@ export async function getAgentFilesystemSettings(): Promise> + settings: { + mode?: AgentFilesystemMode; + localRootPaths?: string[] | null; + } ): Promise { const current = await getAgentFilesystemSettings(); const nextMode = @@ -51,8 +72,10 @@ export async function setAgentFilesystemSettings( : current.mode; const next: AgentFilesystemSettings = { mode: nextMode, - localRootPath: - settings.localRootPath === undefined ? current.localRootPath : settings.localRootPath, + localRootPaths: + settings.localRootPaths === undefined + ? current.localRootPaths + : normalizeLocalRootPaths(settings.localRootPaths ?? []), updatedAt: new Date().toISOString(), }; @@ -101,20 +124,45 @@ function toVirtualPath(rootPath: string, absolutePath: string): string { async function resolveCurrentRootPath(): Promise { const settings = await getAgentFilesystemSettings(); - if (!settings.localRootPath) { - throw new Error("No local filesystem root selected"); + if (settings.localRootPaths.length === 0) { + throw new Error("No local filesystem roots selected"); } - return settings.localRootPath; + return settings.localRootPaths[0]; +} + +async function resolveCurrentRootPaths(): Promise { + const settings = await getAgentFilesystemSettings(); + if (settings.localRootPaths.length === 0) { + throw new Error("No local filesystem roots selected"); + } + return settings.localRootPaths; } export async function readAgentLocalFileText( virtualPath: string ): Promise<{ path: string; content: string }> { - const rootPath = await resolveCurrentRootPath(); - const absolutePath = resolveVirtualPath(rootPath, virtualPath); - const content = await readFile(absolutePath, "utf8"); + const rootPaths = await resolveCurrentRootPaths(); + for (const rootPath of rootPaths) { + const absolutePath = resolveVirtualPath(rootPath, virtualPath); + try { + const content = await readFile(absolutePath, "utf8"); + return { + path: toVirtualPath(rootPath, absolutePath), + content, + }; + } catch (error) { + if ((error as NodeJS.ErrnoException).code === "ENOENT") { + continue; + } + throw error; + } + } + // Keep the same relative virtual path in the error context. + const fallbackRootPath = await resolveCurrentRootPath(); + const fallbackAbsolutePath = resolveVirtualPath(fallbackRootPath, virtualPath); + const content = await readFile(fallbackAbsolutePath, "utf8"); return { - path: toVirtualPath(rootPath, absolutePath), + path: toVirtualPath(fallbackRootPath, fallbackAbsolutePath), content, }; } @@ -123,11 +171,25 @@ export async function writeAgentLocalFileText( virtualPath: string, content: string ): Promise<{ path: string }> { - const rootPath = await resolveCurrentRootPath(); - const absolutePath = resolveVirtualPath(rootPath, virtualPath); - await mkdir(dirname(absolutePath), { recursive: true }); - await writeFile(absolutePath, content, "utf8"); + const rootPaths = await resolveCurrentRootPaths(); + let selectedRootPath = rootPaths[0]; + let selectedAbsolutePath = resolveVirtualPath(selectedRootPath, virtualPath); + + for (const rootPath of rootPaths) { + const absolutePath = resolveVirtualPath(rootPath, virtualPath); + try { + await access(absolutePath); + selectedRootPath = rootPath; + selectedAbsolutePath = absolutePath; + break; + } catch { + // Keep searching for an existing file path across selected roots. + } + } + + await mkdir(dirname(selectedAbsolutePath), { recursive: true }); + await writeFile(selectedAbsolutePath, content, "utf8"); return { - path: toVirtualPath(rootPath, absolutePath), + path: toVirtualPath(selectedRootPath, selectedAbsolutePath), }; } diff --git a/surfsense_desktop/src/preload.ts b/surfsense_desktop/src/preload.ts index 9fc213bfa..f7aaf9633 100644 --- a/surfsense_desktop/src/preload.ts +++ b/surfsense_desktop/src/preload.ts @@ -110,7 +110,7 @@ contextBridge.exposeInMainWorld('electronAPI', { ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_GET_SETTINGS), setAgentFilesystemSettings: (settings: { mode?: "cloud" | "desktop_local_folder"; - localRootPath?: string | null; + localRootPaths?: string[] | null; }) => ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_SET_SETTINGS, settings), pickAgentFilesystemRoot: () => ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_PICK_ROOT), }); diff --git a/surfsense_web/app/dashboard/[search_space_id]/new-chat/[[...chat_id]]/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/new-chat/[[...chat_id]]/page.tsx index bdb77ade2..616637a49 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/new-chat/[[...chat_id]]/page.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/new-chat/[[...chat_id]]/page.tsx @@ -660,7 +660,7 @@ export default function NewChatPage() { const selection = await getAgentFilesystemSelection(); if ( selection.filesystem_mode === "desktop_local_folder" && - !selection.local_filesystem_root + (!selection.local_filesystem_roots || selection.local_filesystem_roots.length === 0) ) { toast.error("Select a local folder before using Local Folder mode."); return; @@ -702,7 +702,7 @@ export default function NewChatPage() { search_space_id: searchSpaceId, filesystem_mode: selection.filesystem_mode, client_platform: selection.client_platform, - local_filesystem_root: selection.local_filesystem_root, + local_filesystem_roots: selection.local_filesystem_roots, messages: messageHistory, mentioned_document_ids: hasDocumentIds ? mentionedDocumentIds.document_ids : undefined, mentioned_surfsense_doc_ids: hasSurfsenseDocIds @@ -1098,7 +1098,7 @@ export default function NewChatPage() { decisions, filesystem_mode: selection.filesystem_mode, client_platform: selection.client_platform, - local_filesystem_root: selection.local_filesystem_root, + local_filesystem_roots: selection.local_filesystem_roots, }), signal: controller.signal, }); @@ -1435,7 +1435,7 @@ export default function NewChatPage() { disabled_tools: disabledTools.length > 0 ? disabledTools : undefined, filesystem_mode: selection.filesystem_mode, client_platform: selection.client_platform, - local_filesystem_root: selection.local_filesystem_root, + local_filesystem_roots: selection.local_filesystem_roots, }), signal: controller.signal, }); diff --git a/surfsense_web/lib/agent-filesystem.ts b/surfsense_web/lib/agent-filesystem.ts index 6bfb5d131..c9096a294 100644 --- a/surfsense_web/lib/agent-filesystem.ts +++ b/surfsense_web/lib/agent-filesystem.ts @@ -4,7 +4,7 @@ export type ClientPlatform = "web" | "desktop"; export interface AgentFilesystemSelection { filesystem_mode: AgentFilesystemMode; client_platform: ClientPlatform; - local_filesystem_root?: string; + local_filesystem_roots?: string[]; } const DEFAULT_SELECTION: AgentFilesystemSelection = { @@ -24,11 +24,12 @@ export async function getAgentFilesystemSelection(): Promise Promise; setAgentFilesystemSettings: (settings: { mode?: AgentFilesystemMode; - localRootPath?: string | null; + localRootPaths?: string[] | null; }) => Promise; pickAgentFilesystemRoot: () => Promise; } From a250f971622e5f3a3cd4c32eb7ecf45c3682f186 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Fri, 24 Apr 2026 01:46:32 +0530 Subject: [PATCH 103/113] feat(thread): support selecting and managing multiple local folders --- .../components/assistant-ui/thread.tsx | 128 +++++++++++++++--- 1 file changed, 110 insertions(+), 18 deletions(-) diff --git a/surfsense_web/components/assistant-ui/thread.tsx b/surfsense_web/components/assistant-ui/thread.tsx index 9df41ee55..6fde33061 100644 --- a/surfsense_web/components/assistant-ui/thread.tsx +++ b/surfsense_web/components/assistant-ui/thread.tsx @@ -110,11 +110,15 @@ const COMPOSER_PLACEHOLDER = "Ask anything, type / for prompts, type @ to mentio type ComposerFilesystemSettings = { mode: "cloud" | "desktop_local_folder"; - localRootPath: string | null; + localRootPaths: string[]; updatedAt: string; }; const LOCAL_FILESYSTEM_TRUST_KEY = "surfsense.local-filesystem-trust.v1"; +const MAX_LOCAL_FILESYSTEM_ROOTS = 5; + +const getFolderDisplayName = (rootPath: string): string => + rootPath.split(/[\\/]/).at(-1) || rootPath; export const Thread: FC = () => { return ; @@ -388,6 +392,7 @@ const Composer: FC = () => { null ); const [localTrustDialogOpen, setLocalTrustDialogOpen] = useState(false); + const [localFoldersOpen, setLocalFoldersOpen] = useState(false); const [pendingLocalPath, setPendingLocalPath] = useState(null); const [clipboardInitialText, setClipboardInitialText] = useState(); const clipboardLoadedRef = useRef(false); @@ -414,7 +419,7 @@ const Composer: FC = () => { if (!mounted) return; setFilesystemSettings({ mode: "cloud", - localRootPath: null, + localRootPaths: [], updatedAt: new Date().toISOString(), }); }); @@ -431,16 +436,27 @@ const Composer: FC = () => { } }, []); + const localRootPaths = filesystemSettings?.localRootPaths ?? []; + const primaryLocalRootPath = localRootPaths[0] ?? null; + const extraLocalRootCount = Math.max(0, localRootPaths.length - 1); + const canAddMoreLocalRoots = localRootPaths.length < MAX_LOCAL_FILESYSTEM_ROOTS; + const applyLocalRootPath = useCallback( async (path: string) => { if (!electronAPI?.setAgentFilesystemSettings) return; + const nextLocalRootPaths = [...localRootPaths, path] + .filter((rootPath, index, allPaths) => allPaths.indexOf(rootPath) === index) + .slice(0, MAX_LOCAL_FILESYSTEM_ROOTS); + if (nextLocalRootPaths.length === localRootPaths.length) { + return; + } const updated = await electronAPI.setAgentFilesystemSettings({ mode: "desktop_local_folder", - localRootPath: path, + localRootPaths: nextLocalRootPaths, }); setFilesystemSettings(updated); }, - [electronAPI] + [electronAPI, localRootPaths] ); const runSwitchToLocalMode = useCallback(async () => { @@ -467,6 +483,7 @@ const Composer: FC = () => { ); const handlePickFilesystemRoot = useCallback(async () => { + if (!canAddMoreLocalRoots) return; if (hasLocalFilesystemTrust()) { await runPickLocalRoot(); return; @@ -476,13 +493,25 @@ const Composer: FC = () => { if (!picked) return; setPendingLocalPath(picked); setLocalTrustDialogOpen(true); - }, [electronAPI, hasLocalFilesystemTrust, runPickLocalRoot]); + }, [canAddMoreLocalRoots, electronAPI, hasLocalFilesystemTrust, runPickLocalRoot]); - const handleClearFilesystemRoot = useCallback(async () => { + const handleRemoveFilesystemRoot = useCallback( + async (rootPathToRemove: string) => { + if (!electronAPI?.setAgentFilesystemSettings) return; + const updated = await electronAPI.setAgentFilesystemSettings({ + mode: "desktop_local_folder", + localRootPaths: localRootPaths.filter((rootPath) => rootPath !== rootPathToRemove), + }); + setFilesystemSettings(updated); + }, + [electronAPI, localRootPaths] + ); + + const handleClearFilesystemRoots = useCallback(async () => { if (!electronAPI?.setAgentFilesystemSettings) return; const updated = await electronAPI.setAgentFilesystemSettings({ mode: "desktop_local_folder", - localRootPath: null, + localRootPaths: [], }); setFilesystemSettings(updated); }, [electronAPI]); @@ -833,31 +862,89 @@ const Composer: FC = () => { {filesystemSettings.mode === "desktop_local_folder" && ( <>
-
- {filesystemSettings.localRootPath ? ( +
+ {primaryLocalRootPath ? ( <>
- {filesystemSettings.localRootPath.split("/").at(-1) || - filesystemSettings.localRootPath} + {getFolderDisplayName(primaryLocalRootPath)}
+ {extraLocalRootCount > 0 && ( + + + + + +
+ {localRootPaths.map((rootPath) => ( +
+ + + {getFolderDisplayName(rootPath)} + + +
+ ))} +
+ +
+
+
+
+ )} @@ -909,9 +1001,9 @@ const Composer: FC = () => { Local mode can read and edit files inside the folders you select. Continue only if you trust this workspace and its contents. - {(pendingLocalPath || filesystemSettings?.localRootPath) && ( + {(pendingLocalPath || primaryLocalRootPath) && ( - Folder path: {pendingLocalPath || filesystemSettings?.localRootPath} + Folder path: {pendingLocalPath || primaryLocalRootPath} )} From c1a07a093e46c760c370df05c01f5c126d198286 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Fri, 24 Apr 2026 01:46:44 +0530 Subject: [PATCH 104/113] refactor(sidebar): use Monitor icon for system theme option --- .../components/layout/ui/sidebar/SidebarUserProfile.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/surfsense_web/components/layout/ui/sidebar/SidebarUserProfile.tsx b/surfsense_web/components/layout/ui/sidebar/SidebarUserProfile.tsx index 81fbeef91..acece2d5c 100644 --- a/surfsense_web/components/layout/ui/sidebar/SidebarUserProfile.tsx +++ b/surfsense_web/components/layout/ui/sidebar/SidebarUserProfile.tsx @@ -7,8 +7,8 @@ import { ExternalLink, Info, Languages, - Laptop, LogOut, + Monitor, Moon, Sun, UserCog, @@ -49,7 +49,7 @@ const LANGUAGES = [ const THEMES = [ { value: "light" as const, name: "Light", icon: Sun }, { value: "dark" as const, name: "Dark", icon: Moon }, - { value: "system" as const, name: "System", icon: Laptop }, + { value: "system" as const, name: "System", icon: Monitor }, ]; const LEARN_MORE_LINKS = [ From 1e9db6f26f12f399f9b94eed51184782ab7f7ae4 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Fri, 24 Apr 2026 02:12:30 +0530 Subject: [PATCH 105/113] feat(filesystem): enhance local mount path normalization and improve virtual path handling in agent filesystem --- .../agents/new_chat/middleware/filesystem.py | 41 ++++--- .../src/modules/agent-filesystem.ts | 110 ++++++++++++------ .../components/editor/source-code-editor.tsx | 2 +- 3 files changed, 96 insertions(+), 57 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/middleware/filesystem.py b/surfsense_backend/app/agents/new_chat/middleware/filesystem.py index 6c30b20ef..a086357af 100644 --- a/surfsense_backend/app/agents/new_chat/middleware/filesystem.py +++ b/surfsense_backend/app/agents/new_chat/middleware/filesystem.py @@ -782,6 +782,27 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): return f"/{backend.default_mount()}" return "" + def _normalize_local_mount_path( + self, candidate: str, runtime: ToolRuntime[None, FilesystemState] + ) -> str: + backend = self._get_backend(runtime) + mount_prefix = self._default_mount_prefix(runtime) + if not mount_prefix or not isinstance(backend, MultiRootLocalFolderBackend): + return candidate if candidate.startswith("/") else f"/{candidate.lstrip('/')}" + + mount_names = set(backend.list_mounts()) + if candidate.startswith("/"): + first_segment = candidate.lstrip("/").split("/", 1)[0] + if first_segment in mount_names: + return candidate + return f"{mount_prefix}{candidate}" + + relative = candidate.lstrip("/") + first_segment = relative.split("/", 1)[0] + if first_segment in mount_names: + return f"/{relative}" + return f"{mount_prefix}/{relative}" + def _get_contract_suggested_path( self, runtime: ToolRuntime[None, FilesystemState] ) -> str: @@ -790,11 +811,7 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): if isinstance(suggested, str) and suggested.strip(): cleaned = suggested.strip() if self._filesystem_mode == FilesystemMode.DESKTOP_LOCAL_FOLDER: - mount_prefix = self._default_mount_prefix(runtime) - if mount_prefix and cleaned.startswith("/") and not cleaned.startswith( - f"{mount_prefix}/" - ): - return f"{mount_prefix}{cleaned}" + return self._normalize_local_mount_path(cleaned, runtime) return cleaned if self._filesystem_mode == FilesystemMode.DESKTOP_LOCAL_FOLDER: mount_prefix = self._default_mount_prefix(runtime) @@ -811,19 +828,7 @@ class SurfSenseFilesystemMiddleware(FilesystemMiddleware): if not candidate: return self._get_contract_suggested_path(runtime) if self._filesystem_mode == FilesystemMode.DESKTOP_LOCAL_FOLDER: - backend = self._get_backend(runtime) - mount_prefix = self._default_mount_prefix(runtime) - if mount_prefix and not candidate.startswith("/"): - return f"{mount_prefix}/{candidate.lstrip('/')}" - if ( - mount_prefix - and isinstance(backend, MultiRootLocalFolderBackend) - and candidate.startswith("/") - ): - mount_names = backend.list_mounts() - first_segment = candidate.lstrip("/").split("/", 1)[0] - if first_segment not in mount_names: - return f"{mount_prefix}{candidate}" + return self._normalize_local_mount_path(candidate, runtime) if not candidate.startswith("/"): return f"/{candidate.lstrip('/')}" return candidate diff --git a/surfsense_desktop/src/modules/agent-filesystem.ts b/surfsense_desktop/src/modules/agent-filesystem.ts index afad98f24..2bf0101d6 100644 --- a/surfsense_desktop/src/modules/agent-filesystem.ts +++ b/surfsense_desktop/src/modules/agent-filesystem.ts @@ -122,12 +122,55 @@ function toVirtualPath(rootPath: string, absolutePath: string): string { return `/${rel.replace(/\\/g, "/")}`; } -async function resolveCurrentRootPath(): Promise { - const settings = await getAgentFilesystemSettings(); - if (settings.localRootPaths.length === 0) { - throw new Error("No local filesystem roots selected"); +type LocalRootMount = { + mount: string; + rootPath: string; +}; + +function buildRootMounts(rootPaths: string[]): LocalRootMount[] { + const mounts: LocalRootMount[] = []; + const usedMounts = new Set(); + for (const rawRootPath of rootPaths) { + const normalizedRoot = resolve(rawRootPath); + const baseMount = normalizedRoot.split(/[\\/]/).at(-1) || "root"; + let mount = baseMount; + let suffix = 2; + while (usedMounts.has(mount)) { + mount = `${baseMount}-${suffix}`; + suffix += 1; + } + usedMounts.add(mount); + mounts.push({ mount, rootPath: normalizedRoot }); } - return settings.localRootPaths[0]; + return mounts; +} + +function parseMountedVirtualPath(virtualPath: string): { + mount: string; + subPath: string; +} { + if (!virtualPath.startsWith("/")) { + throw new Error("Path must start with '/'"); + } + const trimmed = virtualPath.replace(/^\/+/, ""); + if (!trimmed) { + throw new Error("Path must include a mounted root segment"); + } + const [mount, ...rest] = trimmed.split("/"); + const remainder = rest.join("/"); + if (!remainder) { + throw new Error("Path must include a file path under the mounted root"); + } + return { mount, subPath: `/${remainder}` }; +} + +function findMountByName(mounts: LocalRootMount[], mountName: string): LocalRootMount | undefined { + return mounts.find((entry) => entry.mount === mountName); +} + +function toMountedVirtualPath(mount: string, rootPath: string, absolutePath: string): string { + const relativePath = toVirtualPath(rootPath, absolutePath); + return `/${mount}${relativePath}`; } async function resolveCurrentRootPaths(): Promise { @@ -142,27 +185,18 @@ export async function readAgentLocalFileText( virtualPath: string ): Promise<{ path: string; content: string }> { const rootPaths = await resolveCurrentRootPaths(); - for (const rootPath of rootPaths) { - const absolutePath = resolveVirtualPath(rootPath, virtualPath); - try { - const content = await readFile(absolutePath, "utf8"); - return { - path: toVirtualPath(rootPath, absolutePath), - content, - }; - } catch (error) { - if ((error as NodeJS.ErrnoException).code === "ENOENT") { - continue; - } - throw error; - } + const mounts = buildRootMounts(rootPaths); + const { mount, subPath } = parseMountedVirtualPath(virtualPath); + const rootMount = findMountByName(mounts, mount); + if (!rootMount) { + throw new Error( + `Unknown mounted root '${mount}'. Available roots: ${mounts.map((entry) => `/${entry.mount}`).join(", ")}` + ); } - // Keep the same relative virtual path in the error context. - const fallbackRootPath = await resolveCurrentRootPath(); - const fallbackAbsolutePath = resolveVirtualPath(fallbackRootPath, virtualPath); - const content = await readFile(fallbackAbsolutePath, "utf8"); + const absolutePath = resolveVirtualPath(rootMount.rootPath, subPath); + const content = await readFile(absolutePath, "utf8"); return { - path: toVirtualPath(fallbackRootPath, fallbackAbsolutePath), + path: toMountedVirtualPath(rootMount.mount, rootMount.rootPath, absolutePath), content, }; } @@ -172,24 +206,24 @@ export async function writeAgentLocalFileText( content: string ): Promise<{ path: string }> { const rootPaths = await resolveCurrentRootPaths(); - let selectedRootPath = rootPaths[0]; - let selectedAbsolutePath = resolveVirtualPath(selectedRootPath, virtualPath); - - for (const rootPath of rootPaths) { - const absolutePath = resolveVirtualPath(rootPath, virtualPath); - try { - await access(absolutePath); - selectedRootPath = rootPath; - selectedAbsolutePath = absolutePath; - break; - } catch { - // Keep searching for an existing file path across selected roots. - } + const mounts = buildRootMounts(rootPaths); + const { mount, subPath } = parseMountedVirtualPath(virtualPath); + const rootMount = findMountByName(mounts, mount); + if (!rootMount) { + throw new Error( + `Unknown mounted root '${mount}'. Available roots: ${mounts.map((entry) => `/${entry.mount}`).join(", ")}` + ); } + let selectedAbsolutePath = resolveVirtualPath(rootMount.rootPath, subPath); + try { + await access(selectedAbsolutePath); + } catch { + // New files are created under the selected mounted root. + } await mkdir(dirname(selectedAbsolutePath), { recursive: true }); await writeFile(selectedAbsolutePath, content, "utf8"); return { - path: toVirtualPath(selectedRootPath, selectedAbsolutePath), + path: toMountedVirtualPath(rootMount.mount, rootMount.rootPath, selectedAbsolutePath), }; } diff --git a/surfsense_web/components/editor/source-code-editor.tsx b/surfsense_web/components/editor/source-code-editor.tsx index 11f9266b6..c2d77be60 100644 --- a/surfsense_web/components/editor/source-code-editor.tsx +++ b/surfsense_web/components/editor/source-code-editor.tsx @@ -89,7 +89,7 @@ export function SourceCodeEditor({ onChange={(next) => onChange(next ?? "")} loading={
- +
} beforeMount={(monaco) => { From 17f9ee4b592d3ba696333c818dbcf51f6320a59d Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Fri, 24 Apr 2026 02:33:57 +0530 Subject: [PATCH 106/113] refactor(icons): replace 'Pen' icon with 'Pencil' across various components for consistency --- .../user-settings/components/MemoryContent.tsx | 4 ++-- .../user-settings/components/PromptsContent.tsx | 4 ++-- surfsense_web/components/assistant-ui/user-message.tsx | 4 ++-- .../chat-comments/comment-item/comment-actions.tsx | 4 ++-- surfsense_web/components/documents/DocumentNode.tsx | 6 +++--- surfsense_web/components/documents/FolderNode.tsx | 6 +++--- .../components/layout/ui/sidebar/AllPrivateChatsSidebar.tsx | 4 ++-- .../components/layout/ui/sidebar/AllSharedChatsSidebar.tsx | 4 ++-- surfsense_web/components/layout/ui/sidebar/ChatListItem.tsx | 4 ++-- surfsense_web/components/layout/ui/sidebar/Sidebar.tsx | 4 ++-- .../components/layout/ui/tabs/DocumentTabContent.tsx | 4 ++-- surfsense_web/components/settings/team-memory-manager.tsx | 4 ++-- .../tool-ui/confluence/create-confluence-page.tsx | 4 ++-- .../tool-ui/confluence/update-confluence-page.tsx | 4 ++-- surfsense_web/components/tool-ui/dropbox/create-file.tsx | 4 ++-- surfsense_web/components/tool-ui/generic-hitl-approval.tsx | 4 ++-- surfsense_web/components/tool-ui/gmail/create-draft.tsx | 4 ++-- surfsense_web/components/tool-ui/gmail/send-email.tsx | 4 ++-- surfsense_web/components/tool-ui/gmail/update-draft.tsx | 4 ++-- .../components/tool-ui/google-calendar/create-event.tsx | 4 ++-- .../components/tool-ui/google-calendar/update-event.tsx | 4 ++-- .../components/tool-ui/google-drive/create-file.tsx | 4 ++-- surfsense_web/components/tool-ui/jira/create-jira-issue.tsx | 4 ++-- surfsense_web/components/tool-ui/jira/update-jira-issue.tsx | 4 ++-- .../components/tool-ui/linear/create-linear-issue.tsx | 4 ++-- .../components/tool-ui/linear/update-linear-issue.tsx | 4 ++-- .../components/tool-ui/notion/create-notion-page.tsx | 4 ++-- .../components/tool-ui/notion/update-notion-page.tsx | 4 ++-- surfsense_web/components/tool-ui/onedrive/create-file.tsx | 4 ++-- surfsense_web/components/ui/mode-toolbar-button.tsx | 4 ++-- 30 files changed, 62 insertions(+), 62 deletions(-) diff --git a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/MemoryContent.tsx b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/MemoryContent.tsx index ef17e5a89..3d0550b6c 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/MemoryContent.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/MemoryContent.tsx @@ -1,7 +1,7 @@ "use client"; import { useAtomValue } from "jotai"; -import { ArrowUp, ChevronDown, ClipboardCopy, Download, Info, Pen } from "lucide-react"; +import { ArrowUp, ChevronDown, ClipboardCopy, Download, Info, Pencil } from "lucide-react"; import { useCallback, useEffect, useRef, useState } from "react"; import { toast } from "sonner"; import { z } from "zod"; @@ -241,7 +241,7 @@ export function MemoryContent() { onClick={openInput} className="absolute bottom-3 right-3 z-10 h-[54px] w-[54px] rounded-full border bg-muted/60 backdrop-blur-sm shadow-sm" > - + )}
diff --git a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/PromptsContent.tsx b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/PromptsContent.tsx index 1e7087afc..c78d4f9f0 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/PromptsContent.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/PromptsContent.tsx @@ -1,7 +1,7 @@ "use client"; import { useAtomValue } from "jotai"; -import { AlertTriangle, Globe, Lock, PenLine, Sparkles, Trash2 } from "lucide-react"; +import { AlertTriangle, Globe, Lock, Pencil, Sparkles, Trash2 } from "lucide-react"; import { useCallback, useState } from "react"; import { toast } from "sonner"; import { @@ -308,7 +308,7 @@ export function PromptsContent() { className="size-7" onClick={() => handleEdit(prompt)} > - + )} diff --git a/surfsense_web/components/settings/team-memory-manager.tsx b/surfsense_web/components/settings/team-memory-manager.tsx index 67369879b..371527530 100644 --- a/surfsense_web/components/settings/team-memory-manager.tsx +++ b/surfsense_web/components/settings/team-memory-manager.tsx @@ -2,7 +2,7 @@ import { useQuery, useQueryClient } from "@tanstack/react-query"; import { useAtomValue } from "jotai"; -import { ArrowUp, ChevronDown, ClipboardCopy, Download, Info, Pen } from "lucide-react"; +import { ArrowUp, ChevronDown, ClipboardCopy, Download, Info, Pencil } from "lucide-react"; import { useEffect, useRef, useState } from "react"; import { toast } from "sonner"; import { z } from "zod"; @@ -247,7 +247,7 @@ export function TeamMemoryManager({ searchSpaceId }: TeamMemoryManagerProps) { onClick={openInput} className="absolute bottom-3 right-3 z-10 h-[54px] w-[54px] rounded-full border bg-muted/60 backdrop-blur-sm shadow-sm" > - + )}
diff --git a/surfsense_web/components/tool-ui/confluence/create-confluence-page.tsx b/surfsense_web/components/tool-ui/confluence/create-confluence-page.tsx index 5344527f9..1bef1f008 100644 --- a/surfsense_web/components/tool-ui/confluence/create-confluence-page.tsx +++ b/surfsense_web/components/tool-ui/confluence/create-confluence-page.tsx @@ -2,7 +2,7 @@ import type { ToolCallMessagePartProps } from "@assistant-ui/react"; import { useSetAtom } from "jotai"; -import { CornerDownLeftIcon, Pen } from "lucide-react"; +import { CornerDownLeftIcon, Pencil } from "lucide-react"; import { useCallback, useEffect, useMemo, useState } from "react"; import { openHitlEditPanelAtom } from "@/atoms/chat/hitl-edit-panel.atom"; import { PlateEditor } from "@/components/editor/plate-editor"; @@ -222,7 +222,7 @@ function ApprovalCard({ }); }} > - + Edit )} diff --git a/surfsense_web/components/tool-ui/confluence/update-confluence-page.tsx b/surfsense_web/components/tool-ui/confluence/update-confluence-page.tsx index 2038f7a0e..c30357fb6 100644 --- a/surfsense_web/components/tool-ui/confluence/update-confluence-page.tsx +++ b/surfsense_web/components/tool-ui/confluence/update-confluence-page.tsx @@ -2,7 +2,7 @@ import type { ToolCallMessagePartProps } from "@assistant-ui/react"; import { useSetAtom } from "jotai"; -import { CornerDownLeftIcon, Pen } from "lucide-react"; +import { CornerDownLeftIcon, Pencil } from "lucide-react"; import { useCallback, useEffect, useState } from "react"; import { openHitlEditPanelAtom } from "@/atoms/chat/hitl-edit-panel.atom"; import { PlateEditor } from "@/components/editor/plate-editor"; @@ -241,7 +241,7 @@ function ApprovalCard({ }); }} > - + Edit )} diff --git a/surfsense_web/components/tool-ui/dropbox/create-file.tsx b/surfsense_web/components/tool-ui/dropbox/create-file.tsx index 02eae2c83..f76a45f62 100644 --- a/surfsense_web/components/tool-ui/dropbox/create-file.tsx +++ b/surfsense_web/components/tool-ui/dropbox/create-file.tsx @@ -2,7 +2,7 @@ import type { ToolCallMessagePartProps } from "@assistant-ui/react"; import { useSetAtom } from "jotai"; -import { CornerDownLeftIcon, FileIcon, Pen } from "lucide-react"; +import { CornerDownLeftIcon, FileIcon, Pencil } from "lucide-react"; import { useCallback, useEffect, useMemo, useState } from "react"; import { openHitlEditPanelAtom } from "@/atoms/chat/hitl-edit-panel.atom"; import { PlateEditor } from "@/components/editor/plate-editor"; @@ -224,7 +224,7 @@ function ApprovalCard({ }); }} > - + Edit )} diff --git a/surfsense_web/components/tool-ui/generic-hitl-approval.tsx b/surfsense_web/components/tool-ui/generic-hitl-approval.tsx index 809b76c38..d4ee61eeb 100644 --- a/surfsense_web/components/tool-ui/generic-hitl-approval.tsx +++ b/surfsense_web/components/tool-ui/generic-hitl-approval.tsx @@ -1,7 +1,7 @@ "use client"; import type { ToolCallMessagePartComponent } from "@assistant-ui/react"; -import { CornerDownLeftIcon, Pen } from "lucide-react"; +import { CornerDownLeftIcon, Pencil } from "lucide-react"; import { useCallback, useEffect, useMemo, useState } from "react"; import { TextShimmerLoader } from "@/components/prompt-kit/loader"; import { Button } from "@/components/ui/button"; @@ -167,7 +167,7 @@ function GenericApprovalCard({ className="rounded-lg text-muted-foreground -mt-1 -mr-2" onClick={() => setIsEditing(true)} > - + Edit )} diff --git a/surfsense_web/components/tool-ui/gmail/create-draft.tsx b/surfsense_web/components/tool-ui/gmail/create-draft.tsx index cfe61351a..a00760ca3 100644 --- a/surfsense_web/components/tool-ui/gmail/create-draft.tsx +++ b/surfsense_web/components/tool-ui/gmail/create-draft.tsx @@ -2,7 +2,7 @@ import type { ToolCallMessagePartProps } from "@assistant-ui/react"; import { useSetAtom } from "jotai"; -import { CornerDownLeftIcon, Pen, UserIcon, UsersIcon } from "lucide-react"; +import { CornerDownLeftIcon, Pencil, UserIcon, UsersIcon } from "lucide-react"; import { useCallback, useEffect, useMemo, useState } from "react"; import type { ExtraField } from "@/atoms/chat/hitl-edit-panel.atom"; import { openHitlEditPanelAtom } from "@/atoms/chat/hitl-edit-panel.atom"; @@ -251,7 +251,7 @@ function ApprovalCard({ }); }} > - + Edit )} diff --git a/surfsense_web/components/tool-ui/gmail/send-email.tsx b/surfsense_web/components/tool-ui/gmail/send-email.tsx index a21ece7b3..c22045fa1 100644 --- a/surfsense_web/components/tool-ui/gmail/send-email.tsx +++ b/surfsense_web/components/tool-ui/gmail/send-email.tsx @@ -2,7 +2,7 @@ import type { ToolCallMessagePartProps } from "@assistant-ui/react"; import { useSetAtom } from "jotai"; -import { CornerDownLeftIcon, MailIcon, Pen, UserIcon, UsersIcon } from "lucide-react"; +import { CornerDownLeftIcon, MailIcon, Pencil, UserIcon, UsersIcon } from "lucide-react"; import { useCallback, useEffect, useMemo, useState } from "react"; import type { ExtraField } from "@/atoms/chat/hitl-edit-panel.atom"; import { openHitlEditPanelAtom } from "@/atoms/chat/hitl-edit-panel.atom"; @@ -250,7 +250,7 @@ function ApprovalCard({ }); }} > - + Edit )} diff --git a/surfsense_web/components/tool-ui/gmail/update-draft.tsx b/surfsense_web/components/tool-ui/gmail/update-draft.tsx index 0cbf338d7..b8c8c10f6 100644 --- a/surfsense_web/components/tool-ui/gmail/update-draft.tsx +++ b/surfsense_web/components/tool-ui/gmail/update-draft.tsx @@ -2,7 +2,7 @@ import type { ToolCallMessagePartProps } from "@assistant-ui/react"; import { useSetAtom } from "jotai"; -import { CornerDownLeftIcon, MailIcon, Pen, UserIcon, UsersIcon } from "lucide-react"; +import { CornerDownLeftIcon, MailIcon, Pencil, UserIcon, UsersIcon } from "lucide-react"; import { useCallback, useEffect, useState } from "react"; import type { ExtraField } from "@/atoms/chat/hitl-edit-panel.atom"; import { openHitlEditPanelAtom } from "@/atoms/chat/hitl-edit-panel.atom"; @@ -283,7 +283,7 @@ function ApprovalCard({ }); }} > - + Edit )} diff --git a/surfsense_web/components/tool-ui/google-calendar/create-event.tsx b/surfsense_web/components/tool-ui/google-calendar/create-event.tsx index 40a9f0106..9427c989b 100644 --- a/surfsense_web/components/tool-ui/google-calendar/create-event.tsx +++ b/surfsense_web/components/tool-ui/google-calendar/create-event.tsx @@ -2,7 +2,7 @@ import type { ToolCallMessagePartProps } from "@assistant-ui/react"; import { useSetAtom } from "jotai"; -import { ClockIcon, CornerDownLeftIcon, GlobeIcon, MapPinIcon, Pen, UsersIcon } from "lucide-react"; +import { ClockIcon, CornerDownLeftIcon, GlobeIcon, MapPinIcon, Pencil, UsersIcon } from "lucide-react"; import { useCallback, useEffect, useMemo, useState } from "react"; import type { ExtraField } from "@/atoms/chat/hitl-edit-panel.atom"; import { openHitlEditPanelAtom } from "@/atoms/chat/hitl-edit-panel.atom"; @@ -332,7 +332,7 @@ function ApprovalCard({ }); }} > - + Edit )} diff --git a/surfsense_web/components/tool-ui/google-calendar/update-event.tsx b/surfsense_web/components/tool-ui/google-calendar/update-event.tsx index cd6ec0618..649174245 100644 --- a/surfsense_web/components/tool-ui/google-calendar/update-event.tsx +++ b/surfsense_web/components/tool-ui/google-calendar/update-event.tsx @@ -7,7 +7,7 @@ import { ClockIcon, CornerDownLeftIcon, MapPinIcon, - Pen, + Pencil, UsersIcon, } from "lucide-react"; import { useCallback, useEffect, useState } from "react"; @@ -415,7 +415,7 @@ function ApprovalCard({ }); }} > - + Edit )} diff --git a/surfsense_web/components/tool-ui/google-drive/create-file.tsx b/surfsense_web/components/tool-ui/google-drive/create-file.tsx index 638db3db9..b13089877 100644 --- a/surfsense_web/components/tool-ui/google-drive/create-file.tsx +++ b/surfsense_web/components/tool-ui/google-drive/create-file.tsx @@ -2,7 +2,7 @@ import type { ToolCallMessagePartProps } from "@assistant-ui/react"; import { useSetAtom } from "jotai"; -import { CornerDownLeftIcon, FileIcon, Pen } from "lucide-react"; +import { CornerDownLeftIcon, FileIcon, Pencil } from "lucide-react"; import { useCallback, useEffect, useMemo, useState } from "react"; import { openHitlEditPanelAtom } from "@/atoms/chat/hitl-edit-panel.atom"; import { PlateEditor } from "@/components/editor/plate-editor"; @@ -240,7 +240,7 @@ function ApprovalCard({ }); }} > - + Edit )} diff --git a/surfsense_web/components/tool-ui/jira/create-jira-issue.tsx b/surfsense_web/components/tool-ui/jira/create-jira-issue.tsx index 91041d15e..6916f9fa0 100644 --- a/surfsense_web/components/tool-ui/jira/create-jira-issue.tsx +++ b/surfsense_web/components/tool-ui/jira/create-jira-issue.tsx @@ -2,7 +2,7 @@ import type { ToolCallMessagePartProps } from "@assistant-ui/react"; import { useSetAtom } from "jotai"; -import { CornerDownLeftIcon, Pen } from "lucide-react"; +import { CornerDownLeftIcon, Pencil } from "lucide-react"; import { useCallback, useEffect, useMemo, useState } from "react"; import { openHitlEditPanelAtom } from "@/atoms/chat/hitl-edit-panel.atom"; import { PlateEditor } from "@/components/editor/plate-editor"; @@ -257,7 +257,7 @@ function ApprovalCard({ }); }} > - + Edit )} diff --git a/surfsense_web/components/tool-ui/jira/update-jira-issue.tsx b/surfsense_web/components/tool-ui/jira/update-jira-issue.tsx index f377563da..72e697532 100644 --- a/surfsense_web/components/tool-ui/jira/update-jira-issue.tsx +++ b/surfsense_web/components/tool-ui/jira/update-jira-issue.tsx @@ -2,7 +2,7 @@ import type { ToolCallMessagePartProps } from "@assistant-ui/react"; import { useSetAtom } from "jotai"; -import { CornerDownLeftIcon, Pen } from "lucide-react"; +import { CornerDownLeftIcon, Pencil } from "lucide-react"; import { useCallback, useEffect, useState } from "react"; import { openHitlEditPanelAtom } from "@/atoms/chat/hitl-edit-panel.atom"; import { PlateEditor } from "@/components/editor/plate-editor"; @@ -273,7 +273,7 @@ function ApprovalCard({ }); }} > - + Edit )} diff --git a/surfsense_web/components/tool-ui/linear/create-linear-issue.tsx b/surfsense_web/components/tool-ui/linear/create-linear-issue.tsx index 8abc7b50b..7d5098c3e 100644 --- a/surfsense_web/components/tool-ui/linear/create-linear-issue.tsx +++ b/surfsense_web/components/tool-ui/linear/create-linear-issue.tsx @@ -2,7 +2,7 @@ import type { ToolCallMessagePartProps } from "@assistant-ui/react"; import { useSetAtom } from "jotai"; -import { CornerDownLeftIcon, Pen } from "lucide-react"; +import { CornerDownLeftIcon, Pencil } from "lucide-react"; import { useCallback, useEffect, useMemo, useState } from "react"; import { openHitlEditPanelAtom } from "@/atoms/chat/hitl-edit-panel.atom"; import { PlateEditor } from "@/components/editor/plate-editor"; @@ -269,7 +269,7 @@ function ApprovalCard({ }); }} > - + Edit )} diff --git a/surfsense_web/components/tool-ui/linear/update-linear-issue.tsx b/surfsense_web/components/tool-ui/linear/update-linear-issue.tsx index daadfbc63..2d6846cea 100644 --- a/surfsense_web/components/tool-ui/linear/update-linear-issue.tsx +++ b/surfsense_web/components/tool-ui/linear/update-linear-issue.tsx @@ -2,7 +2,7 @@ import type { ToolCallMessagePartProps } from "@assistant-ui/react"; import { useSetAtom } from "jotai"; -import { CornerDownLeftIcon, Pen } from "lucide-react"; +import { CornerDownLeftIcon, Pencil } from "lucide-react"; import { useCallback, useEffect, useState } from "react"; import { openHitlEditPanelAtom } from "@/atoms/chat/hitl-edit-panel.atom"; import { PlateEditor } from "@/components/editor/plate-editor"; @@ -332,7 +332,7 @@ function ApprovalCard({ }); }} > - + Edit )} diff --git a/surfsense_web/components/tool-ui/notion/create-notion-page.tsx b/surfsense_web/components/tool-ui/notion/create-notion-page.tsx index 8c93c7648..b16a1d8cd 100644 --- a/surfsense_web/components/tool-ui/notion/create-notion-page.tsx +++ b/surfsense_web/components/tool-ui/notion/create-notion-page.tsx @@ -2,7 +2,7 @@ import type { ToolCallMessagePartProps } from "@assistant-ui/react"; import { useSetAtom } from "jotai"; -import { CornerDownLeftIcon, Pen } from "lucide-react"; +import { CornerDownLeftIcon, Pencil } from "lucide-react"; import { useCallback, useEffect, useMemo, useState } from "react"; import { openHitlEditPanelAtom } from "@/atoms/chat/hitl-edit-panel.atom"; import { PlateEditor } from "@/components/editor/plate-editor"; @@ -219,7 +219,7 @@ function ApprovalCard({ }); }} > - + Edit )} diff --git a/surfsense_web/components/tool-ui/notion/update-notion-page.tsx b/surfsense_web/components/tool-ui/notion/update-notion-page.tsx index cf714b1b4..ef75c5d92 100644 --- a/surfsense_web/components/tool-ui/notion/update-notion-page.tsx +++ b/surfsense_web/components/tool-ui/notion/update-notion-page.tsx @@ -2,7 +2,7 @@ import type { ToolCallMessagePartProps } from "@assistant-ui/react"; import { useSetAtom } from "jotai"; -import { CornerDownLeftIcon, Pen } from "lucide-react"; +import { CornerDownLeftIcon, Pencil } from "lucide-react"; import { useCallback, useEffect, useState } from "react"; import { openHitlEditPanelAtom } from "@/atoms/chat/hitl-edit-panel.atom"; import { PlateEditor } from "@/components/editor/plate-editor"; @@ -196,7 +196,7 @@ function ApprovalCard({ }); }} > - + Edit )} diff --git a/surfsense_web/components/tool-ui/onedrive/create-file.tsx b/surfsense_web/components/tool-ui/onedrive/create-file.tsx index 8a64a6cf8..7621f152f 100644 --- a/surfsense_web/components/tool-ui/onedrive/create-file.tsx +++ b/surfsense_web/components/tool-ui/onedrive/create-file.tsx @@ -2,7 +2,7 @@ import type { ToolCallMessagePartProps } from "@assistant-ui/react"; import { useSetAtom } from "jotai"; -import { CornerDownLeftIcon, FileIcon, Pen } from "lucide-react"; +import { CornerDownLeftIcon, FileIcon, Pencil } from "lucide-react"; import { useCallback, useEffect, useMemo, useState } from "react"; import { openHitlEditPanelAtom } from "@/atoms/chat/hitl-edit-panel.atom"; import { PlateEditor } from "@/components/editor/plate-editor"; @@ -209,7 +209,7 @@ function ApprovalCard({ }); }} > - + Edit )} diff --git a/surfsense_web/components/ui/mode-toolbar-button.tsx b/surfsense_web/components/ui/mode-toolbar-button.tsx index 37231991f..394eaf97c 100644 --- a/surfsense_web/components/ui/mode-toolbar-button.tsx +++ b/surfsense_web/components/ui/mode-toolbar-button.tsx @@ -1,6 +1,6 @@ "use client"; -import { BookOpenIcon, PenLineIcon } from "lucide-react"; +import { BookOpenIcon, Pencil } from "lucide-react"; import { usePlateState } from "platejs/react"; import { ToolbarButton } from "./toolbar"; @@ -13,7 +13,7 @@ export function ModeToolbarButton() { tooltip={readOnly ? "Click to edit" : "Click to view"} onClick={() => setReadOnly(!readOnly)} > - {readOnly ? : } + {readOnly ? : } ); } From 2618205749ebcbe532561a97868e04164c57bdd8 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Fri, 24 Apr 2026 03:52:39 +0530 Subject: [PATCH 107/113] refactor(thread): remove unused filesystem settings and related logic from Composer component --- .../components/assistant-ui/thread.tsx | 361 ------------------ 1 file changed, 361 deletions(-) diff --git a/surfsense_web/components/assistant-ui/thread.tsx b/surfsense_web/components/assistant-ui/thread.tsx index 6fde33061..2ec422fbf 100644 --- a/surfsense_web/components/assistant-ui/thread.tsx +++ b/surfsense_web/components/assistant-ui/thread.tsx @@ -12,15 +12,11 @@ import { AlertCircle, ArrowDownIcon, ArrowUpIcon, - Check, ChevronDown, ChevronUp, Clipboard, Dot, - Folder, - FolderPlus, Globe, - Laptop, Plus, Settings2, SquareIcon, @@ -70,16 +66,6 @@ import { } from "@/components/new-chat/document-mention-picker"; import { PromptPicker, type PromptPickerRef } from "@/components/new-chat/prompt-picker"; import { Avatar, AvatarFallback, AvatarGroup } from "@/components/ui/avatar"; -import { - AlertDialog, - AlertDialogAction, - AlertDialogCancel, - AlertDialogContent, - AlertDialogDescription, - AlertDialogFooter, - AlertDialogHeader, - AlertDialogTitle, -} from "@/components/ui/alert-dialog"; import { Button } from "@/components/ui/button"; import { Drawer, DrawerContent, DrawerHandle, DrawerTitle } from "@/components/ui/drawer"; import { @@ -108,18 +94,6 @@ import { cn } from "@/lib/utils"; const COMPOSER_PLACEHOLDER = "Ask anything, type / for prompts, type @ to mention docs"; -type ComposerFilesystemSettings = { - mode: "cloud" | "desktop_local_folder"; - localRootPaths: string[]; - updatedAt: string; -}; - -const LOCAL_FILESYSTEM_TRUST_KEY = "surfsense.local-filesystem-trust.v1"; -const MAX_LOCAL_FILESYSTEM_ROOTS = 5; - -const getFolderDisplayName = (rootPath: string): string => - rootPath.split(/[\\/]/).at(-1) || rootPath; - export const Thread: FC = () => { return ; }; @@ -388,12 +362,6 @@ const Composer: FC = () => { }, []); const electronAPI = useElectronAPI(); - const [filesystemSettings, setFilesystemSettings] = useState( - null - ); - const [localTrustDialogOpen, setLocalTrustDialogOpen] = useState(false); - const [localFoldersOpen, setLocalFoldersOpen] = useState(false); - const [pendingLocalPath, setPendingLocalPath] = useState(null); const [clipboardInitialText, setClipboardInitialText] = useState(); const clipboardLoadedRef = useRef(false); useEffect(() => { @@ -406,116 +374,6 @@ const Composer: FC = () => { }); }, [electronAPI]); - useEffect(() => { - if (!electronAPI?.getAgentFilesystemSettings) return; - let mounted = true; - electronAPI - .getAgentFilesystemSettings() - .then((settings: ComposerFilesystemSettings) => { - if (!mounted) return; - setFilesystemSettings(settings); - }) - .catch(() => { - if (!mounted) return; - setFilesystemSettings({ - mode: "cloud", - localRootPaths: [], - updatedAt: new Date().toISOString(), - }); - }); - return () => { - mounted = false; - }; - }, [electronAPI]); - - const hasLocalFilesystemTrust = useCallback(() => { - try { - return window.localStorage.getItem(LOCAL_FILESYSTEM_TRUST_KEY) === "true"; - } catch { - return false; - } - }, []); - - const localRootPaths = filesystemSettings?.localRootPaths ?? []; - const primaryLocalRootPath = localRootPaths[0] ?? null; - const extraLocalRootCount = Math.max(0, localRootPaths.length - 1); - const canAddMoreLocalRoots = localRootPaths.length < MAX_LOCAL_FILESYSTEM_ROOTS; - - const applyLocalRootPath = useCallback( - async (path: string) => { - if (!electronAPI?.setAgentFilesystemSettings) return; - const nextLocalRootPaths = [...localRootPaths, path] - .filter((rootPath, index, allPaths) => allPaths.indexOf(rootPath) === index) - .slice(0, MAX_LOCAL_FILESYSTEM_ROOTS); - if (nextLocalRootPaths.length === localRootPaths.length) { - return; - } - const updated = await electronAPI.setAgentFilesystemSettings({ - mode: "desktop_local_folder", - localRootPaths: nextLocalRootPaths, - }); - setFilesystemSettings(updated); - }, - [electronAPI, localRootPaths] - ); - - const runSwitchToLocalMode = useCallback(async () => { - if (!electronAPI?.setAgentFilesystemSettings) return; - const updated = await electronAPI.setAgentFilesystemSettings({ mode: "desktop_local_folder" }); - setFilesystemSettings(updated); - }, [electronAPI]); - - const runPickLocalRoot = useCallback(async () => { - if (!electronAPI?.pickAgentFilesystemRoot) return; - const picked = await electronAPI.pickAgentFilesystemRoot(); - if (!picked) return; - await applyLocalRootPath(picked); - }, [applyLocalRootPath, electronAPI]); - - const handleFilesystemModeChange = useCallback( - async (mode: "cloud" | "desktop_local_folder") => { - if (!electronAPI?.setAgentFilesystemSettings) return; - if (mode === "desktop_local_folder") return void runSwitchToLocalMode(); - const updated = await electronAPI.setAgentFilesystemSettings({ mode }); - setFilesystemSettings(updated); - }, - [electronAPI, runSwitchToLocalMode] - ); - - const handlePickFilesystemRoot = useCallback(async () => { - if (!canAddMoreLocalRoots) return; - if (hasLocalFilesystemTrust()) { - await runPickLocalRoot(); - return; - } - if (!electronAPI?.pickAgentFilesystemRoot) return; - const picked = await electronAPI.pickAgentFilesystemRoot(); - if (!picked) return; - setPendingLocalPath(picked); - setLocalTrustDialogOpen(true); - }, [canAddMoreLocalRoots, electronAPI, hasLocalFilesystemTrust, runPickLocalRoot]); - - const handleRemoveFilesystemRoot = useCallback( - async (rootPathToRemove: string) => { - if (!electronAPI?.setAgentFilesystemSettings) return; - const updated = await electronAPI.setAgentFilesystemSettings({ - mode: "desktop_local_folder", - localRootPaths: localRootPaths.filter((rootPath) => rootPath !== rootPathToRemove), - }); - setFilesystemSettings(updated); - }, - [electronAPI, localRootPaths] - ); - - const handleClearFilesystemRoots = useCallback(async () => { - if (!electronAPI?.setAgentFilesystemSettings) return; - const updated = await electronAPI.setAgentFilesystemSettings({ - mode: "desktop_local_folder", - localRootPaths: [], - }); - setFilesystemSettings(updated); - }, [electronAPI]); - const isThreadEmpty = useAuiState(({ thread }) => thread.isEmpty); const isThreadRunning = useAuiState(({ thread }) => thread.isRunning); @@ -810,225 +668,6 @@ const Composer: FC = () => { currentUserId={currentUser?.id ?? null} members={members ?? []} /> - {electronAPI && filesystemSettings ? ( -
- - - - - - handleFilesystemModeChange("cloud")} - className="flex items-center justify-between" - > - - - Cloud - - {filesystemSettings.mode === "cloud" && } - - handleFilesystemModeChange("desktop_local_folder")} - className="flex items-center justify-between" - > - - - Local - - {filesystemSettings.mode === "desktop_local_folder" && ( - - )} - - - - - {filesystemSettings.mode === "desktop_local_folder" && ( - <> -
-
- {primaryLocalRootPath ? ( - <> -
- - - {getFolderDisplayName(primaryLocalRootPath)} - - -
- {extraLocalRootCount > 0 && ( - - - - - -
- {localRootPaths.map((rootPath) => ( -
- - - {getFolderDisplayName(rootPath)} - - -
- ))} -
- -
-
-
-
- )} - - - ) : ( - - )} -
- - )} -
- ) : null} - { - setLocalTrustDialogOpen(open); - if (!open) { - setPendingLocalPath(null); - } - }} - > - - - Trust this workspace? - - Local mode can read and edit files inside the folders you select. Continue only if - you trust this workspace and its contents. - - {(pendingLocalPath || primaryLocalRootPath) && ( - - Folder path: {pendingLocalPath || primaryLocalRootPath} - - )} - - - Cancel - { - try { - window.localStorage.setItem(LOCAL_FILESYSTEM_TRUST_KEY, "true"); - } catch {} - setLocalTrustDialogOpen(false); - const path = pendingLocalPath; - setPendingLocalPath(null); - if (path) { - await applyLocalRootPath(path); - } else { - await runPickLocalRoot(); - } - }} - > - I trust this workspace - - - - {showDocumentPopover && (
Date: Fri, 24 Apr 2026 03:55:24 +0530 Subject: [PATCH 108/113] feat(sidebar): implement local filesystem browser and enhance document sidebar with local folder management features --- .../layout/ui/sidebar/DocumentsSidebar.tsx | 466 +++++++++++++++--- .../ui/sidebar/LocalFilesystemBrowser.tsx | 271 ++++++++++ 2 files changed, 675 insertions(+), 62 deletions(-) create mode 100644 surfsense_web/components/layout/ui/sidebar/LocalFilesystemBrowser.tsx diff --git a/surfsense_web/components/layout/ui/sidebar/DocumentsSidebar.tsx b/surfsense_web/components/layout/ui/sidebar/DocumentsSidebar.tsx index daed8747d..5c955a53e 100644 --- a/surfsense_web/components/layout/ui/sidebar/DocumentsSidebar.tsx +++ b/surfsense_web/components/layout/ui/sidebar/DocumentsSidebar.tsx @@ -6,9 +6,12 @@ import { ChevronLeft, ChevronRight, FileText, + Folder, FolderClock, + Globe, Lock, Paperclip, + Search, Trash2, Unplug, Upload, @@ -59,7 +62,9 @@ import { import { Avatar, AvatarFallback, AvatarGroup } from "@/components/ui/avatar"; import { Button } from "@/components/ui/button"; import { Drawer, DrawerContent, DrawerHandle, DrawerTitle } from "@/components/ui/drawer"; +import { Input } from "@/components/ui/input"; import { Spinner } from "@/components/ui/spinner"; +import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; import { useAnonymousMode, useIsAnonymous } from "@/contexts/anonymous-mode"; import { useLoginGate } from "@/contexts/login-gate"; @@ -76,9 +81,31 @@ import { BACKEND_URL } from "@/lib/env-config"; import { uploadFolderScan } from "@/lib/folder-sync-upload"; import { getSupportedExtensionsSet } from "@/lib/supported-extensions"; import { queries } from "@/zero/queries/index"; +import { LocalFilesystemBrowser } from "./LocalFilesystemBrowser"; import { SidebarSlideOutPanel } from "./SidebarSlideOutPanel"; const NON_DELETABLE_DOCUMENT_TYPES: readonly string[] = ["SURFSENSE_DOCS"]; +const LOCAL_FILESYSTEM_TRUST_KEY = "surfsense.local-filesystem-trust.v1"; +const MAX_LOCAL_FILESYSTEM_ROOTS = 5; + +type FilesystemSettings = { + mode: "cloud" | "desktop_local_folder"; + localRootPaths: string[]; + updatedAt: string; +}; + +interface WatchedFolderEntry { + path: string; + name: string; + excludePatterns: string[]; + fileExtensions: string[] | null; + rootFolderId: number | null; + searchSpaceId: number; + active: boolean; +} + +const getFolderDisplayName = (rootPath: string): string => + rootPath.split(/[\\/]/).at(-1) || rootPath; const SHOWCASE_CONNECTORS = [ { type: "GOOGLE_DRIVE_CONNECTOR", label: "Google Drive" }, @@ -133,12 +160,119 @@ function AuthenticatedDocumentsSidebar({ const [search, setSearch] = useState(""); const debouncedSearch = useDebouncedValue(search, 250); + const [localSearch, setLocalSearch] = useState(""); + const debouncedLocalSearch = useDebouncedValue(localSearch, 250); + const localSearchInputRef = useRef(null); const [activeTypes, setActiveTypes] = useState([]); + const [filesystemSettings, setFilesystemSettings] = useState(null); + const [localTrustDialogOpen, setLocalTrustDialogOpen] = useState(false); + const [pendingLocalPath, setPendingLocalPath] = useState(null); const [watchedFolderIds, setWatchedFolderIds] = useState>(new Set()); const [folderWatchOpen, setFolderWatchOpen] = useAtom(folderWatchDialogOpenAtom); const [watchInitialFolder, setWatchInitialFolder] = useAtom(folderWatchInitialFolderAtom); const isElectron = typeof window !== "undefined" && !!window.electronAPI; + useEffect(() => { + if (!electronAPI?.getAgentFilesystemSettings) return; + let mounted = true; + electronAPI + .getAgentFilesystemSettings() + .then((settings: FilesystemSettings) => { + if (!mounted) return; + setFilesystemSettings(settings); + }) + .catch(() => { + if (!mounted) return; + setFilesystemSettings({ + mode: "cloud", + localRootPaths: [], + updatedAt: new Date().toISOString(), + }); + }); + return () => { + mounted = false; + }; + }, [electronAPI]); + + const hasLocalFilesystemTrust = useCallback(() => { + try { + return window.localStorage.getItem(LOCAL_FILESYSTEM_TRUST_KEY) === "true"; + } catch { + return false; + } + }, []); + + const localRootPaths = filesystemSettings?.localRootPaths ?? []; + const canAddMoreLocalRoots = localRootPaths.length < MAX_LOCAL_FILESYSTEM_ROOTS; + + const applyLocalRootPath = useCallback( + async (path: string) => { + if (!electronAPI?.setAgentFilesystemSettings) return; + const nextLocalRootPaths = [...localRootPaths, path] + .filter((rootPath, index, allPaths) => allPaths.indexOf(rootPath) === index) + .slice(0, MAX_LOCAL_FILESYSTEM_ROOTS); + if (nextLocalRootPaths.length === localRootPaths.length) return; + const updated = await electronAPI.setAgentFilesystemSettings({ + mode: "desktop_local_folder", + localRootPaths: nextLocalRootPaths, + }); + setFilesystemSettings(updated); + }, + [electronAPI, localRootPaths] + ); + + const runPickLocalRoot = useCallback(async () => { + if (!electronAPI?.pickAgentFilesystemRoot) return; + const picked = await electronAPI.pickAgentFilesystemRoot(); + if (!picked) return; + await applyLocalRootPath(picked); + }, [applyLocalRootPath, electronAPI]); + + const handlePickFilesystemRoot = useCallback(async () => { + if (!canAddMoreLocalRoots) return; + if (hasLocalFilesystemTrust()) { + await runPickLocalRoot(); + return; + } + if (!electronAPI?.pickAgentFilesystemRoot) return; + const picked = await electronAPI.pickAgentFilesystemRoot(); + if (!picked) return; + setPendingLocalPath(picked); + setLocalTrustDialogOpen(true); + }, [canAddMoreLocalRoots, electronAPI, hasLocalFilesystemTrust, runPickLocalRoot]); + + const handleRemoveFilesystemRoot = useCallback( + async (rootPathToRemove: string) => { + if (!electronAPI?.setAgentFilesystemSettings) return; + const updated = await electronAPI.setAgentFilesystemSettings({ + mode: "desktop_local_folder", + localRootPaths: localRootPaths.filter((rootPath) => rootPath !== rootPathToRemove), + }); + setFilesystemSettings(updated); + }, + [electronAPI, localRootPaths] + ); + + const handleClearFilesystemRoots = useCallback(async () => { + if (!electronAPI?.setAgentFilesystemSettings) return; + const updated = await electronAPI.setAgentFilesystemSettings({ + mode: "desktop_local_folder", + localRootPaths: [], + }); + setFilesystemSettings(updated); + }, [electronAPI]); + + const handleFilesystemTabChange = useCallback( + async (tab: "cloud" | "local") => { + if (!electronAPI?.setAgentFilesystemSettings) return; + const updated = await electronAPI.setAgentFilesystemSettings({ + mode: tab === "cloud" ? "cloud" : "desktop_local_folder", + }); + setFilesystemSettings(updated); + }, + [electronAPI] + ); + // AI File Sort state const { data: searchSpaces, refetch: refetchSearchSpaces } = useAtomValue(searchSpacesAtom); const activeSearchSpace = useMemo( @@ -196,7 +330,7 @@ function AuthenticatedDocumentsSidebar({ if (!electronAPI?.getWatchedFolders) return; const api = electronAPI; - const folders = await api.getWatchedFolders(); + const folders = (await api.getWatchedFolders()) as WatchedFolderEntry[]; if (folders.length === 0) { try { @@ -214,9 +348,11 @@ function AuthenticatedDocumentsSidebar({ active: true, }); } - const recovered = await api.getWatchedFolders(); + const recovered = (await api.getWatchedFolders()) as WatchedFolderEntry[]; const ids = new Set( - recovered.filter((f) => f.rootFolderId != null).map((f) => f.rootFolderId as number) + recovered + .filter((f: WatchedFolderEntry) => f.rootFolderId != null) + .map((f: WatchedFolderEntry) => f.rootFolderId as number) ); setWatchedFolderIds(ids); return; @@ -226,7 +362,9 @@ function AuthenticatedDocumentsSidebar({ } const ids = new Set( - folders.filter((f) => f.rootFolderId != null).map((f) => f.rootFolderId as number) + folders + .filter((f: WatchedFolderEntry) => f.rootFolderId != null) + .map((f: WatchedFolderEntry) => f.rootFolderId as number) ); setWatchedFolderIds(ids); }, [searchSpaceId, electronAPI]); @@ -375,8 +513,8 @@ function AuthenticatedDocumentsSidebar({ async (folder: FolderDisplay) => { if (!electronAPI) return; - const watchedFolders = await electronAPI.getWatchedFolders(); - const matched = watchedFolders.find((wf) => wf.rootFolderId === folder.id); + const watchedFolders = (await electronAPI.getWatchedFolders()) as WatchedFolderEntry[]; + const matched = watchedFolders.find((wf: WatchedFolderEntry) => wf.rootFolderId === folder.id); if (!matched) { toast.error("This folder is not being watched"); return; @@ -405,8 +543,8 @@ function AuthenticatedDocumentsSidebar({ async (folder: FolderDisplay) => { if (!electronAPI) return; - const watchedFolders = await electronAPI.getWatchedFolders(); - const matched = watchedFolders.find((wf) => wf.rootFolderId === folder.id); + const watchedFolders = (await electronAPI.getWatchedFolders()) as WatchedFolderEntry[]; + const matched = watchedFolders.find((wf: WatchedFolderEntry) => wf.rootFolderId === folder.id); if (!matched) { toast.error("This folder is not being watched"); return; @@ -438,8 +576,10 @@ function AuthenticatedDocumentsSidebar({ if (!confirm(`Delete folder "${folder.name}" and all its contents?`)) return; try { if (electronAPI) { - const watchedFolders = await electronAPI.getWatchedFolders(); - const matched = watchedFolders.find((wf) => wf.rootFolderId === folder.id); + const watchedFolders = (await electronAPI.getWatchedFolders()) as WatchedFolderEntry[]; + const matched = watchedFolders.find( + (wf: WatchedFolderEntry) => wf.rootFolderId === folder.id + ); if (matched) { await electronAPI.removeWatchedFolder(matched.path); } @@ -836,59 +976,11 @@ function AuthenticatedDocumentsSidebar({ return () => document.removeEventListener("keydown", handleEscape); }, [open, onOpenChange, isMobile, setRightPanelCollapsed]); - const documentsContent = ( - <> -
-
-
- {isMobile && ( - - )} -

{t("title") || "Documents"}

-
-
- {!isMobile && onDockedChange && ( - - - - - - {isDocked ? "Collapse panel" : "Expand panel"} - - - )} - {headerAction} -
-
-
+ const showFilesystemTabs = !isMobile && !!electronAPI && !!filesystemSettings; + const currentFilesystemTab = filesystemSettings?.mode === "desktop_local_folder" ? "local" : "cloud"; + const cloudContent = ( + <> {/* Connected tools strip */}
+ + ); + + const localContent = ( +
+
+ {localRootPaths.length > 0 ? ( + <> + {localRootPaths.map((rootPath) => ( +
+ + {getFolderDisplayName(rootPath)} + +
+ ))} + + + + ) : ( + + )} +
+
+
+
+
+ setLocalSearch(e.target.value)} + placeholder="Search local files" + type="text" + aria-label="Search local files" + /> + {Boolean(localSearch) && ( + + )} +
+
+ { + openEditorPanel({ + kind: "local_file", + localFilePath, + title: localFilePath.split("/").pop() || localFilePath, + searchSpaceId, + }); + }} + /> +
+ ); + + const documentsContent = ( + <> +
+
+
+ {isMobile && ( + + )} +

{t("title") || "Documents"}

+ {showFilesystemTabs && ( + { + void handleFilesystemTabChange(value === "local" ? "local" : "cloud"); + }} + > + + + + Cloud + + + + Local + + + + )} +
+
+ {!isMobile && onDockedChange && ( + + + + + + {isDocked ? "Collapse panel" : "Expand panel"} + + + )} + {headerAction} +
+
+
+ {showFilesystemTabs ? ( + { + void handleFilesystemTabChange(value === "local" ? "local" : "cloud"); + }} + className="flex min-h-0 flex-1 flex-col" + > + + {cloudContent} + + + {localContent} + + + ) : ( + cloudContent + )} {versionDocId !== null && ( )} + { + setLocalTrustDialogOpen(nextOpen); + if (!nextOpen) setPendingLocalPath(null); + }} + > + + + Trust this workspace? + + Local mode can read and edit files inside the folders you select. Continue only if + you trust this workspace and its contents. + + {pendingLocalPath && ( + + Folder path: {pendingLocalPath} + + )} + + + Cancel + { + try { + window.localStorage.setItem(LOCAL_FILESYSTEM_TRUST_KEY, "true"); + } catch {} + setLocalTrustDialogOpen(false); + const path = pendingLocalPath; + setPendingLocalPath(null); + if (path) { + await applyLocalRootPath(path); + } else { + await runPickLocalRoot(); + } + }} + > + I trust this workspace + + + + void; +} + +interface LocalFolderFileEntry { + relativePath: string; + fullPath: string; + size: number; + mtimeMs: number; +} + +type RootLoadState = { + loading: boolean; + error: string | null; + files: LocalFolderFileEntry[]; +}; + +interface LocalFolderNode { + key: string; + name: string; + folders: Map; + files: LocalFolderFileEntry[]; +} + +const getFolderDisplayName = (rootPath: string): string => + rootPath.split(/[\\/]/).at(-1) || rootPath; + +function createFolderNode(key: string, name: string): LocalFolderNode { + return { + key, + name, + folders: new Map(), + files: [], + }; +} + +function getFileName(pathValue: string): string { + return pathValue.split(/[\\/]/).at(-1) || pathValue; +} + +export function LocalFilesystemBrowser({ + rootPaths, + searchSpaceId, + searchQuery, + onOpenFile, +}: LocalFilesystemBrowserProps) { + const electronAPI = useElectronAPI(); + const [rootStateMap, setRootStateMap] = useState>({}); + const [expandedFolderKeys, setExpandedFolderKeys] = useState>(new Set()); + const supportedExtensions = useMemo(() => Array.from(getSupportedExtensionsSet()), []); + + useEffect(() => { + setExpandedFolderKeys((prev) => { + const next = new Set(prev); + for (const rootPath of rootPaths) { + next.add(rootPath); + } + return next; + }); + }, [rootPaths]); + + useEffect(() => { + if (!electronAPI?.listFolderFiles) return; + let cancelled = false; + + for (const rootPath of rootPaths) { + setRootStateMap((prev) => ({ + ...prev, + [rootPath]: { + loading: true, + error: null, + files: prev[rootPath]?.files ?? [], + }, + })); + } + + void Promise.all( + rootPaths.map(async (rootPath) => { + try { + const files = (await electronAPI.listFolderFiles({ + path: rootPath, + name: getFolderDisplayName(rootPath), + excludePatterns: DEFAULT_EXCLUDE_PATTERNS, + fileExtensions: supportedExtensions, + rootFolderId: null, + searchSpaceId, + active: true, + })) as LocalFolderFileEntry[]; + if (cancelled) return; + setRootStateMap((prev) => ({ + ...prev, + [rootPath]: { + loading: false, + error: null, + files, + }, + })); + } catch (error) { + if (cancelled) return; + setRootStateMap((prev) => ({ + ...prev, + [rootPath]: { + loading: false, + error: error instanceof Error ? error.message : "Failed to read folder", + files: [], + }, + })); + } + }) + ); + + return () => { + cancelled = true; + }; + }, [electronAPI, rootPaths, searchSpaceId, supportedExtensions]); + + const treeByRoot = useMemo(() => { + const query = searchQuery?.trim().toLowerCase() ?? ""; + const hasQuery = query.length > 0; + + return rootPaths.map((rootPath) => { + const rootNode = createFolderNode(rootPath, getFolderDisplayName(rootPath)); + const allFiles = rootStateMap[rootPath]?.files ?? []; + const files = hasQuery + ? allFiles.filter((file) => { + const relativePath = file.relativePath.toLowerCase(); + const fileName = getFileName(file.relativePath).toLowerCase(); + return relativePath.includes(query) || fileName.includes(query); + }) + : allFiles; + for (const file of files) { + const parts = file.relativePath.split(/[\\/]/).filter(Boolean); + let cursor = rootNode; + for (let i = 0; i < parts.length - 1; i++) { + const part = parts[i]; + const folderKey = `${cursor.key}/${part}`; + if (!cursor.folders.has(part)) { + cursor.folders.set(part, createFolderNode(folderKey, part)); + } + cursor = cursor.folders.get(part) as LocalFolderNode; + } + cursor.files.push(file); + } + return { rootPath, rootNode, matchCount: files.length, totalCount: allFiles.length }; + }); + }, [rootPaths, rootStateMap, searchQuery]); + + const toggleFolder = useCallback((folderKey: string) => { + setExpandedFolderKeys((prev) => { + const next = new Set(prev); + if (next.has(folderKey)) { + next.delete(folderKey); + } else { + next.add(folderKey); + } + return next; + }); + }, []); + + const renderFolder = useCallback( + (folder: LocalFolderNode, depth: number) => { + const isExpanded = expandedFolderKeys.has(folder.key); + const childFolders = Array.from(folder.folders.values()).sort((a, b) => + a.name.localeCompare(b.name) + ); + const files = [...folder.files].sort((a, b) => a.relativePath.localeCompare(b.relativePath)); + return ( +
+ + {isExpanded && ( + <> + {childFolders.map((childFolder) => renderFolder(childFolder, depth + 1))} + {files.map((file) => ( + + ))} + + )} +
+ ); + }, + [expandedFolderKeys, onOpenFile, toggleFolder] + ); + + if (rootPaths.length === 0) { + return ( +
+

No local folder selected

+

+ Add a local folder above to browse files in desktop mode. +

+
+ ); + } + + return ( +
+ {treeByRoot.map(({ rootPath, rootNode, matchCount, totalCount }) => { + const state = rootStateMap[rootPath]; + if (!state || state.loading) { + return ( +
+ + Loading {getFolderDisplayName(rootPath)}... +
+ ); + } + if (state.error) { + return ( +
+

Failed to load local folder

+

{state.error}

+
+ ); + } + const isEmpty = totalCount === 0; + return ( +
+ {renderFolder(rootNode, 0)} + {isEmpty && ( +
+ No supported files found in this folder. +
+ )} + {!isEmpty && matchCount === 0 && searchQuery && ( +
+ No matching files in this folder. +
+ )} +
+ ); + })} +
+ ); +} From d1c14160e3ac2b4025357fb14571b344e27025fd Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Fri, 24 Apr 2026 04:42:24 +0530 Subject: [PATCH 109/113] feat(sidebar): enhance DocumentsSidebar with dropdown menu for local folder management and improve UI interactions --- .../layout/ui/sidebar/DocumentsSidebar.tsx | 150 +++++++++++------- .../ui/sidebar/LocalFilesystemBrowser.tsx | 10 -- 2 files changed, 89 insertions(+), 71 deletions(-) diff --git a/surfsense_web/components/layout/ui/sidebar/DocumentsSidebar.tsx b/surfsense_web/components/layout/ui/sidebar/DocumentsSidebar.tsx index 5c955a53e..dbe2f16e4 100644 --- a/surfsense_web/components/layout/ui/sidebar/DocumentsSidebar.tsx +++ b/surfsense_web/components/layout/ui/sidebar/DocumentsSidebar.tsx @@ -7,11 +7,13 @@ import { ChevronRight, FileText, Folder, + FolderPlus, FolderClock, - Globe, + Laptop, Lock, Paperclip, Search, + Server, Trash2, Unplug, Upload, @@ -61,8 +63,17 @@ import { } from "@/components/ui/alert-dialog"; import { Avatar, AvatarFallback, AvatarGroup } from "@/components/ui/avatar"; import { Button } from "@/components/ui/button"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuLabel, + DropdownMenuSeparator, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; import { Drawer, DrawerContent, DrawerHandle, DrawerTitle } from "@/components/ui/drawer"; import { Input } from "@/components/ui/input"; +import { Separator } from "@/components/ui/separator"; import { Spinner } from "@/components/ui/spinner"; import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; @@ -1135,76 +1146,93 @@ function AuthenticatedDocumentsSidebar({ ); const localContent = ( -
-
- {localRootPaths.length > 0 ? ( - <> - {localRootPaths.map((rootPath) => ( -
- - {getFolderDisplayName(rootPath)} +
+
+
+ {localRootPaths.length > 0 ? ( + + -
- ))} - - - - ) : ( -
+ )} + + - )} + + +
-
setLocalSearch(e.target.value)} placeholder="Search local files" @@ -1214,14 +1242,14 @@ function AuthenticatedDocumentsSidebar({ {Boolean(localSearch) && ( )}
@@ -1266,21 +1294,21 @@ function AuthenticatedDocumentsSidebar({ void handleFilesystemTabChange(value === "local" ? "local" : "cloud"); }} > - + - + Cloud - + Local diff --git a/surfsense_web/components/layout/ui/sidebar/LocalFilesystemBrowser.tsx b/surfsense_web/components/layout/ui/sidebar/LocalFilesystemBrowser.tsx index 544280116..7aebf4695 100644 --- a/surfsense_web/components/layout/ui/sidebar/LocalFilesystemBrowser.tsx +++ b/surfsense_web/components/layout/ui/sidebar/LocalFilesystemBrowser.tsx @@ -61,16 +61,6 @@ export function LocalFilesystemBrowser({ const [expandedFolderKeys, setExpandedFolderKeys] = useState>(new Set()); const supportedExtensions = useMemo(() => Array.from(getSupportedExtensionsSet()), []); - useEffect(() => { - setExpandedFolderKeys((prev) => { - const next = new Set(prev); - for (const rootPath of rootPaths) { - next.add(rootPath); - } - return next; - }); - }, [rootPaths]); - useEffect(() => { if (!electronAPI?.listFolderFiles) return; let cancelled = false; From ce71897286c4f4772928f9155f033715c2690732 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Fri, 24 Apr 2026 04:54:48 +0530 Subject: [PATCH 110/113] refactor(hotkeys): simplify hotkey display logic and replace icon representation with text in DesktopShortcutsContent and login page --- .../components/DesktopShortcutsContent.tsx | 45 ++++++------------- surfsense_web/app/desktop/login/page.tsx | 45 ++++++------------- 2 files changed, 26 insertions(+), 64 deletions(-) diff --git a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopShortcutsContent.tsx b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopShortcutsContent.tsx index f4981b8f0..6207457c4 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopShortcutsContent.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/user-settings/components/DesktopShortcutsContent.tsx @@ -1,10 +1,11 @@ "use client"; -import { ArrowBigUp, BrainCog, Command, Option, Rocket, RotateCcw, Zap } from "lucide-react"; +import { BrainCog, Rocket, RotateCcw, Zap } from "lucide-react"; import { useCallback, useEffect, useMemo, useRef, useState } from "react"; import { toast } from "sonner"; import { DEFAULT_SHORTCUTS, keyEventToAccelerator } from "@/components/desktop/shortcut-recorder"; import { Button } from "@/components/ui/button"; +import { ShortcutKbd } from "@/components/ui/shortcut-kbd"; import { Spinner } from "@/components/ui/spinner"; import { useElectronAPI } from "@/hooks/use-platform"; @@ -17,24 +18,20 @@ const HOTKEY_ROWS: Array<{ key: ShortcutKey; label: string; icon: React.ElementT { key: "autocomplete", label: "Extreme Assist", icon: BrainCog }, ]; -type ShortcutToken = - | { kind: "text"; value: string } - | { kind: "icon"; value: "command" | "option" | "shift" }; - -function acceleratorToTokens(accel: string, isMac: boolean): ShortcutToken[] { +function acceleratorToKeys(accel: string, isMac: boolean): string[] { if (!accel) return []; return accel.split("+").map((part) => { if (part === "CommandOrControl") { - return isMac ? { kind: "icon", value: "command" as const } : { kind: "text", value: "Ctrl" }; + return isMac ? "⌘" : "Ctrl"; } if (part === "Alt") { - return isMac ? { kind: "icon", value: "option" as const } : { kind: "text", value: "Alt" }; + return isMac ? "⌥" : "Alt"; } if (part === "Shift") { - return isMac ? { kind: "icon", value: "shift" as const } : { kind: "text", value: "Shift" }; + return isMac ? "⇧" : "Shift"; } - if (part === "Space") return { kind: "text", value: "Space" }; - return { kind: "text", value: part.length === 1 ? part.toUpperCase() : part }; + if (part === "Space") return "Space"; + return part.length === 1 ? part.toUpperCase() : part; }); } @@ -58,7 +55,7 @@ function HotkeyRow({ const [recording, setRecording] = useState(false); const inputRef = useRef(null); const isDefault = value === defaultValue; - const displayTokens = useMemo(() => acceleratorToTokens(value, isMac), [value, isMac]); + const displayKeys = useMemo(() => acceleratorToKeys(value, isMac), [value, isMac]); const handleKeyDown = useCallback( (e: React.KeyboardEvent) => { @@ -103,13 +100,14 @@ function HotkeyRow({
diff --git a/surfsense_web/app/desktop/login/page.tsx b/surfsense_web/app/desktop/login/page.tsx index 6d5e2abd4..451143949 100644 --- a/surfsense_web/app/desktop/login/page.tsx +++ b/surfsense_web/app/desktop/login/page.tsx @@ -2,7 +2,7 @@ import { IconBrandGoogleFilled } from "@tabler/icons-react"; import { useAtom } from "jotai"; -import { ArrowBigUp, BrainCog, Command, Eye, EyeOff, Option, Rocket, RotateCcw, Zap } from "lucide-react"; +import { BrainCog, Eye, EyeOff, Rocket, RotateCcw, Zap } from "lucide-react"; import Image from "next/image"; import { useRouter } from "next/navigation"; import { useCallback, useEffect, useMemo, useRef, useState } from "react"; @@ -13,6 +13,7 @@ import { Button } from "@/components/ui/button"; import { Input } from "@/components/ui/input"; import { Label } from "@/components/ui/label"; import { Separator } from "@/components/ui/separator"; +import { ShortcutKbd } from "@/components/ui/shortcut-kbd"; import { Spinner } from "@/components/ui/spinner"; import { useElectronAPI } from "@/hooks/use-platform"; import { searchSpacesApiService } from "@/lib/apis/search-spaces-api.service"; @@ -23,10 +24,6 @@ const isGoogleAuth = AUTH_TYPE === "GOOGLE"; type ShortcutKey = "generalAssist" | "quickAsk" | "autocomplete"; type ShortcutMap = typeof DEFAULT_SHORTCUTS; -type ShortcutToken = - | { kind: "text"; value: string } - | { kind: "icon"; value: "command" | "option" | "shift" }; - const HOTKEY_ROWS: Array<{ key: ShortcutKey; label: string; description: string; icon: React.ElementType }> = [ { key: "generalAssist", @@ -48,20 +45,20 @@ const HOTKEY_ROWS: Array<{ key: ShortcutKey; label: string; description: string; }, ]; -function acceleratorToTokens(accel: string, isMac: boolean): ShortcutToken[] { +function acceleratorToKeys(accel: string, isMac: boolean): string[] { if (!accel) return []; return accel.split("+").map((part) => { if (part === "CommandOrControl") { - return isMac ? { kind: "icon", value: "command" as const } : { kind: "text", value: "Ctrl" }; + return isMac ? "⌘" : "Ctrl"; } if (part === "Alt") { - return isMac ? { kind: "icon", value: "option" as const } : { kind: "text", value: "Alt" }; + return isMac ? "⌥" : "Alt"; } if (part === "Shift") { - return isMac ? { kind: "icon", value: "shift" as const } : { kind: "text", value: "Shift" }; + return isMac ? "⇧" : "Shift"; } - if (part === "Space") return { kind: "text", value: "Space" }; - return { kind: "text", value: part.length === 1 ? part.toUpperCase() : part }; + if (part === "Space") return "Space"; + return part.length === 1 ? part.toUpperCase() : part; }); } @@ -87,7 +84,7 @@ function HotkeyRow({ const [recording, setRecording] = useState(false); const inputRef = useRef(null); const isDefault = value === defaultValue; - const displayTokens = useMemo(() => acceleratorToTokens(value, isMac), [value, isMac]); + const displayKeys = useMemo(() => acceleratorToKeys(value, isMac), [value, isMac]); const handleKeyDown = useCallback( (e: React.KeyboardEvent) => { @@ -135,36 +132,20 @@ function HotkeyRow({
From a7a758f26edc04be3e3a6ec3a3cde207f8046bef Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Fri, 24 Apr 2026 05:03:23 +0530 Subject: [PATCH 111/113] feat(filesystem): add getAgentFilesystemMounts API and integrate with LocalFilesystemBrowser for improved mount management --- surfsense_desktop/src/ipc/channels.ts | 1 + surfsense_desktop/src/ipc/handlers.ts | 5 ++ .../src/modules/agent-filesystem.ts | 7 ++- surfsense_desktop/src/preload.ts | 2 + .../ui/sidebar/LocalFilesystemBrowser.tsx | 61 +++++++++++++++++-- surfsense_web/types/window.d.ts | 6 ++ 6 files changed, 77 insertions(+), 5 deletions(-) diff --git a/surfsense_desktop/src/ipc/channels.ts b/surfsense_desktop/src/ipc/channels.ts index 5cf6e9001..ccd166899 100644 --- a/surfsense_desktop/src/ipc/channels.ts +++ b/surfsense_desktop/src/ipc/channels.ts @@ -55,6 +55,7 @@ export const IPC_CHANNELS = { ANALYTICS_GET_CONTEXT: 'analytics:get-context', // Agent filesystem mode AGENT_FILESYSTEM_GET_SETTINGS: 'agent-filesystem:get-settings', + AGENT_FILESYSTEM_GET_MOUNTS: 'agent-filesystem:get-mounts', AGENT_FILESYSTEM_SET_SETTINGS: 'agent-filesystem:set-settings', AGENT_FILESYSTEM_PICK_ROOT: 'agent-filesystem:pick-root', } as const; diff --git a/surfsense_desktop/src/ipc/handlers.ts b/surfsense_desktop/src/ipc/handlers.ts index 247d171f5..54882f4ee 100644 --- a/surfsense_desktop/src/ipc/handlers.ts +++ b/surfsense_desktop/src/ipc/handlers.ts @@ -39,6 +39,7 @@ import { import { readAgentLocalFileText, writeAgentLocalFileText, + getAgentFilesystemMounts, getAgentFilesystemSettings, pickAgentFilesystemRoot, setAgentFilesystemSettings, @@ -226,6 +227,10 @@ export function registerIpcHandlers(): void { getAgentFilesystemSettings() ); + ipcMain.handle(IPC_CHANNELS.AGENT_FILESYSTEM_GET_MOUNTS, () => + getAgentFilesystemMounts() + ); + ipcMain.handle( IPC_CHANNELS.AGENT_FILESYSTEM_SET_SETTINGS, (_event, settings: { mode?: 'cloud' | 'desktop_local_folder'; localRootPaths?: string[] | null }) => diff --git a/surfsense_desktop/src/modules/agent-filesystem.ts b/surfsense_desktop/src/modules/agent-filesystem.ts index 2bf0101d6..f00c185f8 100644 --- a/surfsense_desktop/src/modules/agent-filesystem.ts +++ b/surfsense_desktop/src/modules/agent-filesystem.ts @@ -122,7 +122,7 @@ function toVirtualPath(rootPath: string, absolutePath: string): string { return `/${rel.replace(/\\/g, "/")}`; } -type LocalRootMount = { +export type LocalRootMount = { mount: string; rootPath: string; }; @@ -145,6 +145,11 @@ function buildRootMounts(rootPaths: string[]): LocalRootMount[] { return mounts; } +export async function getAgentFilesystemMounts(): Promise { + const rootPaths = await resolveCurrentRootPaths(); + return buildRootMounts(rootPaths); +} + function parseMountedVirtualPath(virtualPath: string): { mount: string; subPath: string; diff --git a/surfsense_desktop/src/preload.ts b/surfsense_desktop/src/preload.ts index f7aaf9633..9c538f691 100644 --- a/surfsense_desktop/src/preload.ts +++ b/surfsense_desktop/src/preload.ts @@ -108,6 +108,8 @@ contextBridge.exposeInMainWorld('electronAPI', { // Agent filesystem mode getAgentFilesystemSettings: () => ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_GET_SETTINGS), + getAgentFilesystemMounts: () => + ipcRenderer.invoke(IPC_CHANNELS.AGENT_FILESYSTEM_GET_MOUNTS), setAgentFilesystemSettings: (settings: { mode?: "cloud" | "desktop_local_folder"; localRootPaths?: string[] | null; diff --git a/surfsense_web/components/layout/ui/sidebar/LocalFilesystemBrowser.tsx b/surfsense_web/components/layout/ui/sidebar/LocalFilesystemBrowser.tsx index 7aebf4695..5b08f2e37 100644 --- a/surfsense_web/components/layout/ui/sidebar/LocalFilesystemBrowser.tsx +++ b/surfsense_web/components/layout/ui/sidebar/LocalFilesystemBrowser.tsx @@ -34,6 +34,11 @@ interface LocalFolderNode { files: LocalFolderFileEntry[]; } +type LocalRootMount = { + mount: string; + rootPath: string; +}; + const getFolderDisplayName = (rootPath: string): string => rootPath.split(/[\\/]/).at(-1) || rootPath; @@ -50,6 +55,20 @@ function getFileName(pathValue: string): string { return pathValue.split(/[\\/]/).at(-1) || pathValue; } +function toVirtualPath(relativePath: string): string { + const normalized = relativePath.replace(/\\/g, "/").replace(/^\/+/, ""); + return `/${normalized}`; +} + +function normalizeRootPathForLookup(rootPath: string, isWindows: boolean): string { + const normalized = rootPath.replace(/\\/g, "/").replace(/\/+$/, ""); + return isWindows ? normalized.toLowerCase() : normalized; +} + +function toMountedVirtualPath(mount: string, relativePath: string): string { + return `/${mount}${toVirtualPath(relativePath)}`; +} + export function LocalFilesystemBrowser({ rootPaths, searchSpaceId, @@ -59,7 +78,9 @@ export function LocalFilesystemBrowser({ const electronAPI = useElectronAPI(); const [rootStateMap, setRootStateMap] = useState>({}); const [expandedFolderKeys, setExpandedFolderKeys] = useState>(new Set()); + const [mountByRootKey, setMountByRootKey] = useState>(new Map()); const supportedExtensions = useMemo(() => Array.from(getSupportedExtensionsSet()), []); + const isWindowsPlatform = electronAPI?.versions.platform === "win32"; useEffect(() => { if (!electronAPI?.listFolderFiles) return; @@ -116,6 +137,31 @@ export function LocalFilesystemBrowser({ }; }, [electronAPI, rootPaths, searchSpaceId, supportedExtensions]); + useEffect(() => { + if (!electronAPI?.getAgentFilesystemMounts) { + setMountByRootKey(new Map()); + return; + } + let cancelled = false; + void electronAPI + .getAgentFilesystemMounts() + .then((mounts: LocalRootMount[]) => { + if (cancelled) return; + const next = new Map(); + for (const entry of mounts) { + next.set(normalizeRootPathForLookup(entry.rootPath, isWindowsPlatform), entry.mount); + } + setMountByRootKey(next); + }) + .catch(() => { + if (cancelled) return; + setMountByRootKey(new Map()); + }); + return () => { + cancelled = true; + }; + }, [electronAPI, isWindowsPlatform, rootPaths]); + const treeByRoot = useMemo(() => { const query = searchQuery?.trim().toLowerCase() ?? ""; const hasQuery = query.length > 0; @@ -160,7 +206,7 @@ export function LocalFilesystemBrowser({ }, []); const renderFolder = useCallback( - (folder: LocalFolderNode, depth: number) => { + (folder: LocalFolderNode, depth: number, mount: string) => { const isExpanded = expandedFolderKeys.has(folder.key); const childFolders = Array.from(folder.folders.values()).sort((a, b) => a.name.localeCompare(b.name) @@ -185,12 +231,12 @@ export function LocalFilesystemBrowser({ {isExpanded && ( <> - {childFolders.map((childFolder) => renderFolder(childFolder, depth + 1))} + {childFolders.map((childFolder) => renderFolder(childFolder, depth + 1, mount))} {files.map((file) => (