From d6e605fd50d6841082602eb08f9449cb6d7abdfc Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Mon, 20 Apr 2026 20:59:17 +0200 Subject: [PATCH 01/57] feat(notion-mcp): add OAuth + PKCE service layer and MCP adapter Implements Notion MCP integration core: - OAuth 2.0 discovery (RFC 9470 + 8414), dynamic client registration, PKCE token exchange, and refresh with rotation - NotionMCPAdapter connecting to mcp.notion.com/mcp with fallback to direct API on known serialization errors - Response parser translating MCP text responses into dicts matching NotionHistoryConnector output format - has_mcp_notion_connector() helper for connector gating --- .../app/services/notion_mcp/__init__.py | 27 ++ .../app/services/notion_mcp/adapter.py | 253 +++++++++++++++ .../app/services/notion_mcp/oauth.py | 298 ++++++++++++++++++ .../services/notion_mcp/response_parser.py | 212 +++++++++++++ 4 files changed, 790 insertions(+) create mode 100644 surfsense_backend/app/services/notion_mcp/__init__.py create mode 100644 surfsense_backend/app/services/notion_mcp/adapter.py create mode 100644 surfsense_backend/app/services/notion_mcp/oauth.py create mode 100644 surfsense_backend/app/services/notion_mcp/response_parser.py diff --git a/surfsense_backend/app/services/notion_mcp/__init__.py b/surfsense_backend/app/services/notion_mcp/__init__.py new file mode 100644 index 000000000..6a57500b6 --- /dev/null +++ b/surfsense_backend/app/services/notion_mcp/__init__.py @@ -0,0 +1,27 @@ +"""Notion MCP integration. + +Routes Notion operations through Notion's hosted MCP server +at https://mcp.notion.com/mcp instead of direct API calls. +""" + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.db import SearchSourceConnector, SearchSourceConnectorType + + +async def has_mcp_notion_connector( + session: AsyncSession, + search_space_id: int, +) -> bool: + """Check whether the search space has at least one MCP-mode Notion connector.""" + result = await session.execute( + select(SearchSourceConnector.id, SearchSourceConnector.config).filter( + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, + ) + ) + for _, config in result.all(): + if isinstance(config, dict) and config.get("mcp_mode"): + return True + return False diff --git a/surfsense_backend/app/services/notion_mcp/adapter.py b/surfsense_backend/app/services/notion_mcp/adapter.py new file mode 100644 index 000000000..76eac6305 --- /dev/null +++ b/surfsense_backend/app/services/notion_mcp/adapter.py @@ -0,0 +1,253 @@ +"""Notion MCP Adapter. + +Connects to Notion's hosted MCP server at ``https://mcp.notion.com/mcp`` +and exposes the same method signatures as ``NotionHistoryConnector``'s +write operations so that tool factories can swap with a one-line change. + +Includes an optional fallback to ``NotionHistoryConnector`` when the MCP +server returns known serialization errors (GitHub issues #215, #216). +""" + +import logging +from datetime import UTC, datetime +from typing import Any + +from mcp import ClientSession +from mcp.client.streamable_http import streamablehttp_client +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.config import config +from app.db import SearchSourceConnector +from app.schemas.notion_auth_credentials import NotionAuthCredentialsBase +from app.utils.oauth_security import TokenEncryption + +from .response_parser import ( + extract_text_from_mcp_response, + is_mcp_serialization_error, + parse_create_page_response, + parse_delete_page_response, + parse_fetch_page_response, + parse_health_check_response, + parse_update_page_response, +) + +logger = logging.getLogger(__name__) + +NOTION_MCP_URL = "https://mcp.notion.com/mcp" + + +class NotionMCPAdapter: + """Routes Notion operations through the hosted MCP server. + + Drop-in replacement for ``NotionHistoryConnector`` write methods. + Returns the same dict structure so KB sync works unchanged. + """ + + def __init__(self, session: AsyncSession, connector_id: int): + self._session = session + self._connector_id = connector_id + self._access_token: str | None = None + + async def _get_valid_token(self) -> str: + """Get a valid MCP access token, refreshing if expired.""" + result = await self._session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == self._connector_id + ) + ) + connector = result.scalars().first() + if not connector: + raise ValueError(f"Connector {self._connector_id} not found") + + cfg = connector.config or {} + + if not cfg.get("mcp_mode"): + raise ValueError( + f"Connector {self._connector_id} is not an MCP connector" + ) + + access_token = cfg.get("access_token") + if not access_token: + raise ValueError("No access token in MCP connector config") + + is_encrypted = cfg.get("_token_encrypted", False) + if is_encrypted and config.SECRET_KEY: + token_encryption = TokenEncryption(config.SECRET_KEY) + access_token = token_encryption.decrypt_token(access_token) + + expires_at_str = cfg.get("expires_at") + if expires_at_str: + expires_at = datetime.fromisoformat(expires_at_str) + if expires_at.tzinfo is None: + expires_at = expires_at.replace(tzinfo=UTC) + if expires_at <= datetime.now(UTC): + from app.routes.notion_mcp_connector_route import refresh_notion_mcp_token + + connector = await refresh_notion_mcp_token(self._session, connector) + cfg = connector.config or {} + access_token = cfg.get("access_token", "") + if is_encrypted and config.SECRET_KEY: + token_encryption = TokenEncryption(config.SECRET_KEY) + access_token = token_encryption.decrypt_token(access_token) + + self._access_token = access_token + return access_token + + async def _call_mcp_tool( + self, tool_name: str, arguments: dict[str, Any] + ) -> str: + """Connect to Notion MCP server and call a tool. Returns raw text.""" + token = await self._get_valid_token() + headers = {"Authorization": f"Bearer {token}"} + + async with ( + streamablehttp_client(NOTION_MCP_URL, headers=headers) as (read, write, _), + ClientSession(read, write) as session, + ): + await session.initialize() + response = await session.call_tool(tool_name, arguments=arguments) + return extract_text_from_mcp_response(response) + + async def _call_with_fallback( + self, + tool_name: str, + arguments: dict[str, Any], + parser, + fallback_method: str | None = None, + fallback_kwargs: dict[str, Any] | None = None, + ) -> dict[str, Any]: + """Call MCP tool, parse response, and fall back on serialization errors.""" + try: + raw_text = await self._call_mcp_tool(tool_name, arguments) + result = parser(raw_text) + + if result.get("mcp_serialization_error") and fallback_method: + logger.warning( + "MCP tool '%s' hit serialization bug, falling back to direct API", + tool_name, + ) + return await self._fallback(fallback_method, fallback_kwargs or {}) + + return result + + except Exception as e: + error_str = str(e) + if is_mcp_serialization_error(error_str) and fallback_method: + logger.warning( + "MCP tool '%s' raised serialization error, falling back: %s", + tool_name, + error_str, + ) + return await self._fallback(fallback_method, fallback_kwargs or {}) + + logger.error("MCP tool '%s' failed: %s", tool_name, e, exc_info=True) + return {"status": "error", "message": f"MCP call failed: {e!s}"} + + async def _fallback( + self, method_name: str, kwargs: dict[str, Any] + ) -> dict[str, Any]: + """Fall back to NotionHistoryConnector for the given method. + + Uses the already-refreshed MCP access token directly with the + Notion SDK, bypassing the connector's config-based token loading. + """ + from app.connectors.notion_history import NotionHistoryConnector + from app.schemas.notion_auth_credentials import NotionAuthCredentialsBase + + token = self._access_token + if not token: + token = await self._get_valid_token() + + connector = NotionHistoryConnector( + session=self._session, + connector_id=self._connector_id, + ) + connector._credentials = NotionAuthCredentialsBase(access_token=token) + connector._using_legacy_token = True + + method = getattr(connector, method_name) + return await method(**kwargs) + + # ------------------------------------------------------------------ + # Public API — same signatures as NotionHistoryConnector + # ------------------------------------------------------------------ + + async def create_page( + self, + title: str, + content: str, + parent_page_id: str | None = None, + ) -> dict[str, Any]: + arguments: dict[str, Any] = { + "pages": [ + { + "title": title, + "content": content, + } + ] + } + if parent_page_id: + arguments["pages"][0]["parent_page_url"] = parent_page_id + + return await self._call_with_fallback( + tool_name="notion-create-pages", + arguments=arguments, + parser=parse_create_page_response, + fallback_method="create_page", + fallback_kwargs={ + "title": title, + "content": content, + "parent_page_id": parent_page_id, + }, + ) + + async def update_page( + self, + page_id: str, + content: str | None = None, + ) -> dict[str, Any]: + arguments: dict[str, Any] = { + "page_id": page_id, + "command": "replace_content", + } + if content: + arguments["new_str"] = content + + return await self._call_with_fallback( + tool_name="notion-update-page", + arguments=arguments, + parser=parse_update_page_response, + fallback_method="update_page", + fallback_kwargs={"page_id": page_id, "content": content}, + ) + + async def delete_page(self, page_id: str) -> dict[str, Any]: + arguments: dict[str, Any] = { + "page_id": page_id, + "command": "update_properties", + "archived": True, + } + + return await self._call_with_fallback( + tool_name="notion-update-page", + arguments=arguments, + parser=parse_delete_page_response, + fallback_method="delete_page", + fallback_kwargs={"page_id": page_id}, + ) + + async def fetch_page(self, page_url_or_id: str) -> dict[str, Any]: + """Fetch page content via ``notion-fetch``.""" + raw_text = await self._call_mcp_tool( + "notion-fetch", {"url": page_url_or_id} + ) + return parse_fetch_page_response(raw_text) + + async def health_check(self) -> dict[str, Any]: + """Check MCP connection via ``notion-get-self``.""" + try: + raw_text = await self._call_mcp_tool("notion-get-self", {}) + return parse_health_check_response(raw_text) + except Exception as e: + return {"status": "error", "message": str(e)} diff --git a/surfsense_backend/app/services/notion_mcp/oauth.py b/surfsense_backend/app/services/notion_mcp/oauth.py new file mode 100644 index 000000000..cfa6ad3e0 --- /dev/null +++ b/surfsense_backend/app/services/notion_mcp/oauth.py @@ -0,0 +1,298 @@ +"""OAuth 2.0 + PKCE utilities for Notion's remote MCP server. + +Implements the flow described in the official guide: +https://developers.notion.com/guides/mcp/build-mcp-client + +Steps: + 1. Discover OAuth metadata (RFC 9470 → RFC 8414) + 2. Dynamic client registration (RFC 7591) + 3. Build authorization URL with PKCE code_challenge + 4. Exchange authorization code + code_verifier for tokens + 5. Refresh access tokens (with refresh-token rotation) + +All functions are stateless — callers (route handlers) manage storage. +""" + +import logging +from dataclasses import dataclass +from datetime import UTC, datetime, timedelta +from typing import Any + +import httpx + +logger = logging.getLogger(__name__) + +NOTION_MCP_SERVER_URL = "https://mcp.notion.com/mcp" +_HTTP_TIMEOUT = 30.0 + + +@dataclass(frozen=True) +class OAuthMetadata: + issuer: str + authorization_endpoint: str + token_endpoint: str + registration_endpoint: str | None + code_challenge_methods_supported: list[str] + + +@dataclass(frozen=True) +class ClientCredentials: + client_id: str + client_secret: str | None = None + client_id_issued_at: int | None = None + client_secret_expires_at: int | None = None + + +@dataclass(frozen=True) +class TokenSet: + access_token: str + refresh_token: str | None + token_type: str + expires_in: int | None + expires_at: datetime | None + scope: str | None + + +# --------------------------------------------------------------------------- +# Step 1 — OAuth discovery +# --------------------------------------------------------------------------- + + +async def discover_oauth_metadata( + mcp_server_url: str = NOTION_MCP_SERVER_URL, +) -> OAuthMetadata: + """Discover OAuth endpoints via RFC 9470 + RFC 8414. + + 1. Fetch protected-resource metadata to find the authorization server. + 2. Fetch authorization-server metadata to get OAuth endpoints. + """ + from urllib.parse import urlparse + + parsed = urlparse(mcp_server_url) + origin = f"{parsed.scheme}://{parsed.netloc}" + path = parsed.path.rstrip("/") + + async with httpx.AsyncClient(timeout=_HTTP_TIMEOUT) as client: + # RFC 9470 — Protected Resource Metadata + # URL format: {origin}/.well-known/oauth-protected-resource{path} + pr_url = f"{origin}/.well-known/oauth-protected-resource{path}" + pr_resp = await client.get(pr_url) + pr_resp.raise_for_status() + pr_data = pr_resp.json() + + auth_servers = pr_data.get("authorization_servers", []) + if not auth_servers: + raise ValueError("No authorization_servers in protected resource metadata") + auth_server_url = auth_servers[0] + + # RFC 8414 — Authorization Server Metadata + as_url = f"{auth_server_url}/.well-known/oauth-authorization-server" + as_resp = await client.get(as_url) + as_resp.raise_for_status() + as_data = as_resp.json() + + if not as_data.get("authorization_endpoint") or not as_data.get("token_endpoint"): + raise ValueError("Missing required OAuth endpoints in server metadata") + + return OAuthMetadata( + issuer=as_data.get("issuer", auth_server_url), + authorization_endpoint=as_data["authorization_endpoint"], + token_endpoint=as_data["token_endpoint"], + registration_endpoint=as_data.get("registration_endpoint"), + code_challenge_methods_supported=as_data.get( + "code_challenge_methods_supported", [] + ), + ) + + +# --------------------------------------------------------------------------- +# Step 2 — Dynamic client registration (RFC 7591) +# --------------------------------------------------------------------------- + + +async def register_client( + metadata: OAuthMetadata, + redirect_uri: str, + client_name: str = "SurfSense", +) -> ClientCredentials: + """Dynamically register an OAuth client with the Notion MCP server.""" + if not metadata.registration_endpoint: + raise ValueError("Server does not support dynamic client registration") + + payload = { + "client_name": client_name, + "redirect_uris": [redirect_uri], + "grant_types": ["authorization_code", "refresh_token"], + "response_types": ["code"], + "token_endpoint_auth_method": "none", + } + + async with httpx.AsyncClient(timeout=_HTTP_TIMEOUT) as client: + resp = await client.post( + metadata.registration_endpoint, + json=payload, + headers={"Content-Type": "application/json", "Accept": "application/json"}, + ) + if not resp.is_success: + logger.error( + "Dynamic client registration failed (%s): %s", + resp.status_code, + resp.text, + ) + resp.raise_for_status() + data = resp.json() + + return ClientCredentials( + client_id=data["client_id"], + client_secret=data.get("client_secret"), + client_id_issued_at=data.get("client_id_issued_at"), + client_secret_expires_at=data.get("client_secret_expires_at"), + ) + + +# --------------------------------------------------------------------------- +# Step 3 — Build authorization URL +# --------------------------------------------------------------------------- + + +def build_authorization_url( + metadata: OAuthMetadata, + client_id: str, + redirect_uri: str, + code_challenge: str, + state: str, +) -> str: + """Build the OAuth authorization URL with PKCE parameters.""" + from urllib.parse import urlencode + + params = { + "response_type": "code", + "client_id": client_id, + "redirect_uri": redirect_uri, + "code_challenge": code_challenge, + "code_challenge_method": "S256", + "state": state, + "prompt": "consent", + } + return f"{metadata.authorization_endpoint}?{urlencode(params)}" + + +# --------------------------------------------------------------------------- +# Step 4 — Exchange authorization code for tokens +# --------------------------------------------------------------------------- + + +async def exchange_code_for_tokens( + code: str, + code_verifier: str, + metadata: OAuthMetadata, + client_id: str, + redirect_uri: str, + client_secret: str | None = None, +) -> TokenSet: + """Exchange an authorization code + PKCE verifier for tokens.""" + form_data: dict[str, Any] = { + "grant_type": "authorization_code", + "code": code, + "client_id": client_id, + "redirect_uri": redirect_uri, + "code_verifier": code_verifier, + } + if client_secret: + form_data["client_secret"] = client_secret + + async with httpx.AsyncClient(timeout=_HTTP_TIMEOUT) as client: + resp = await client.post( + metadata.token_endpoint, + data=form_data, + headers={ + "Content-Type": "application/x-www-form-urlencoded", + "Accept": "application/json", + }, + ) + if not resp.is_success: + body = resp.text + raise ValueError(f"Token exchange failed ({resp.status_code}): {body}") + tokens = resp.json() + + if not tokens.get("access_token"): + raise ValueError("No access_token in token response") + + expires_at = None + if tokens.get("expires_in"): + expires_at = datetime.now(UTC) + timedelta(seconds=int(tokens["expires_in"])) + + return TokenSet( + access_token=tokens["access_token"], + refresh_token=tokens.get("refresh_token"), + token_type=tokens.get("token_type", "Bearer"), + expires_in=tokens.get("expires_in"), + expires_at=expires_at, + scope=tokens.get("scope"), + ) + + +# --------------------------------------------------------------------------- +# Step 5 — Refresh access token +# --------------------------------------------------------------------------- + + +async def refresh_access_token( + refresh_token: str, + metadata: OAuthMetadata, + client_id: str, + client_secret: str | None = None, +) -> TokenSet: + """Refresh an access token. + + Notion MCP uses refresh-token rotation: each refresh returns a new + refresh_token and invalidates the old one. Callers MUST persist the + new refresh_token atomically with the new access_token. + """ + form_data: dict[str, Any] = { + "grant_type": "refresh_token", + "refresh_token": refresh_token, + "client_id": client_id, + } + if client_secret: + form_data["client_secret"] = client_secret + + async with httpx.AsyncClient(timeout=_HTTP_TIMEOUT) as client: + resp = await client.post( + metadata.token_endpoint, + data=form_data, + headers={ + "Content-Type": "application/x-www-form-urlencoded", + "Accept": "application/json", + }, + ) + + if not resp.is_success: + body = resp.text + try: + error_data = resp.json() + error_code = error_data.get("error", "") + if error_code == "invalid_grant": + raise ValueError("REAUTH_REQUIRED") + except ValueError: + if "REAUTH_REQUIRED" in str(resp.text) or resp.status_code == 401: + raise + raise ValueError(f"Token refresh failed ({resp.status_code}): {body}") + + tokens = resp.json() + + if not tokens.get("access_token"): + raise ValueError("No access_token in refresh response") + + expires_at = None + if tokens.get("expires_in"): + expires_at = datetime.now(UTC) + timedelta(seconds=int(tokens["expires_in"])) + + return TokenSet( + access_token=tokens["access_token"], + refresh_token=tokens.get("refresh_token"), + token_type=tokens.get("token_type", "Bearer"), + expires_in=tokens.get("expires_in"), + expires_at=expires_at, + scope=tokens.get("scope"), + ) diff --git a/surfsense_backend/app/services/notion_mcp/response_parser.py b/surfsense_backend/app/services/notion_mcp/response_parser.py new file mode 100644 index 000000000..34d5ef332 --- /dev/null +++ b/surfsense_backend/app/services/notion_mcp/response_parser.py @@ -0,0 +1,212 @@ +"""Parse Notion MCP tool responses into structured dicts. + +The Notion MCP server returns responses as MCP TextContent where the +``text`` field contains JSON-stringified Notion API response data. +See: https://deepwiki.com/makenotion/notion-mcp-server/4.3-request-and-response-handling + +This module extracts that JSON and normalises it into the same dict +format that ``NotionHistoryConnector`` methods return, so downstream +code (KB sync, tool factories) works unchanged. +""" + +import json +import logging +from typing import Any + +logger = logging.getLogger(__name__) + +MCP_SERIALIZATION_ERROR_MARKERS = [ + "Expected array, received string", + "Expected object, received string", + "should be defined, instead was `undefined`", +] + + +def is_mcp_serialization_error(text: str) -> bool: + """Return True if the MCP error text matches a known serialization bug.""" + return any(marker in text for marker in MCP_SERIALIZATION_ERROR_MARKERS) + + +def extract_text_from_mcp_response(response) -> str: + """Pull the concatenated text out of an MCP ``CallToolResult``. + + Args: + response: The ``CallToolResult`` returned by ``session.call_tool()``. + + Returns: + Concatenated text content from the response. + """ + parts: list[str] = [] + for content in response.content: + if hasattr(content, "text"): + parts.append(content.text) + elif hasattr(content, "data"): + parts.append(str(content.data)) + else: + parts.append(str(content)) + return "\n".join(parts) if parts else "" + + +def _try_parse_json(text: str) -> dict[str, Any] | None: + """Attempt to parse *text* as JSON, returning None on failure.""" + try: + parsed = json.loads(text) + if isinstance(parsed, dict): + return parsed + except (json.JSONDecodeError, TypeError): + pass + return None + + +def _extract_page_title(page_data: dict[str, Any]) -> str: + """Best-effort extraction of the page title from a Notion page object.""" + props = page_data.get("properties", {}) + for prop in props.values(): + if prop.get("type") == "title": + title_parts = prop.get("title", []) + if title_parts: + return " ".join(t.get("plain_text", "") for t in title_parts) + return page_data.get("id", "Untitled") + + +def parse_create_page_response(raw_text: str) -> dict[str, Any]: + """Parse a ``notion-create-pages`` MCP response. + + Returns a dict compatible with ``NotionHistoryConnector.create_page()``: + ``{status, page_id, url, title, message}`` + """ + data = _try_parse_json(raw_text) + + if data is None: + if is_mcp_serialization_error(raw_text): + return { + "status": "mcp_error", + "message": raw_text, + "mcp_serialization_error": True, + } + return {"status": "error", "message": f"Unexpected MCP response: {raw_text[:500]}"} + + if data.get("status") == "error" or "error" in data: + return { + "status": "error", + "message": data.get("message", data.get("error", str(data))), + } + + page_id = data.get("id", "") + url = data.get("url", "") + title = _extract_page_title(data) + + return { + "status": "success", + "page_id": page_id, + "url": url, + "title": title, + "message": f"Created Notion page '{title}'", + } + + +def parse_update_page_response(raw_text: str) -> dict[str, Any]: + """Parse a ``notion-update-page`` MCP response. + + Returns a dict compatible with ``NotionHistoryConnector.update_page()``: + ``{status, page_id, url, title, message}`` + """ + data = _try_parse_json(raw_text) + + if data is None: + if is_mcp_serialization_error(raw_text): + return { + "status": "mcp_error", + "message": raw_text, + "mcp_serialization_error": True, + } + return {"status": "error", "message": f"Unexpected MCP response: {raw_text[:500]}"} + + if data.get("status") == "error" or "error" in data: + return { + "status": "error", + "message": data.get("message", data.get("error", str(data))), + } + + page_id = data.get("id", "") + url = data.get("url", "") + title = _extract_page_title(data) + + return { + "status": "success", + "page_id": page_id, + "url": url, + "title": title, + "message": f"Updated Notion page '{title}' (content appended)", + } + + +def parse_delete_page_response(raw_text: str) -> dict[str, Any]: + """Parse an archive (delete) MCP response. + + The Notion API responds to ``pages.update(archived=True)`` with + the archived page object. + + Returns a dict compatible with ``NotionHistoryConnector.delete_page()``: + ``{status, page_id, message}`` + """ + data = _try_parse_json(raw_text) + + if data is None: + if is_mcp_serialization_error(raw_text): + return { + "status": "mcp_error", + "message": raw_text, + "mcp_serialization_error": True, + } + return {"status": "error", "message": f"Unexpected MCP response: {raw_text[:500]}"} + + if data.get("status") == "error" or "error" in data: + return { + "status": "error", + "message": data.get("message", data.get("error", str(data))), + } + + page_id = data.get("id", "") + title = _extract_page_title(data) + + return { + "status": "success", + "page_id": page_id, + "message": f"Deleted Notion page '{title}'", + } + + +def parse_fetch_page_response(raw_text: str) -> dict[str, Any]: + """Parse a ``notion-fetch`` MCP response. + + Returns the raw parsed dict (Notion page/block data) or an error dict. + """ + data = _try_parse_json(raw_text) + + if data is None: + return {"status": "error", "message": f"Unexpected MCP response: {raw_text[:500]}"} + + if data.get("status") == "error" or "error" in data: + return { + "status": "error", + "message": data.get("message", data.get("error", str(data))), + } + + return {"status": "success", "data": data} + + +def parse_health_check_response(raw_text: str) -> dict[str, Any]: + """Parse a ``notion-get-self`` MCP response for health checking.""" + data = _try_parse_json(raw_text) + + if data is None: + return {"status": "error", "message": raw_text[:500]} + + if data.get("status") == "error" or "error" in data: + return { + "status": "error", + "message": data.get("message", data.get("error", str(data))), + } + + return {"status": "success", "data": data} From 41d547934dc41ac8d965f7a9ce4767fd4f9d249a Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Mon, 20 Apr 2026 21:02:05 +0200 Subject: [PATCH 02/57] feat(notion-mcp): add MCP connector OAuth routes --- surfsense_backend/app/routes/__init__.py | 2 + .../app/routes/notion_mcp_connector_route.py | 486 ++++++++++++++++++ 2 files changed, 488 insertions(+) create mode 100644 surfsense_backend/app/routes/notion_mcp_connector_route.py diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py index ad40666cd..faec7fe09 100644 --- a/surfsense_backend/app/routes/__init__.py +++ b/surfsense_backend/app/routes/__init__.py @@ -37,6 +37,7 @@ from .new_llm_config_routes import router as new_llm_config_router from .notes_routes import router as notes_router from .notifications_routes import router as notifications_router from .notion_add_connector_route import router as notion_add_connector_router +from .notion_mcp_connector_route import router as notion_mcp_connector_router from .onedrive_add_connector_route import router as onedrive_add_connector_router from .podcasts_routes import router as podcasts_router from .prompts_routes import router as prompts_router @@ -81,6 +82,7 @@ router.include_router(airtable_add_connector_router) router.include_router(linear_add_connector_router) router.include_router(luma_add_connector_router) router.include_router(notion_add_connector_router) +router.include_router(notion_mcp_connector_router) router.include_router(slack_add_connector_router) router.include_router(teams_add_connector_router) router.include_router(onedrive_add_connector_router) diff --git a/surfsense_backend/app/routes/notion_mcp_connector_route.py b/surfsense_backend/app/routes/notion_mcp_connector_route.py new file mode 100644 index 000000000..b9305cd74 --- /dev/null +++ b/surfsense_backend/app/routes/notion_mcp_connector_route.py @@ -0,0 +1,486 @@ +"""Notion MCP Connector OAuth Routes. + +Handles OAuth 2.0 + PKCE authentication for Notion's hosted MCP server. +Based on: https://developers.notion.com/guides/mcp/build-mcp-client + +This creates connectors with the same ``NOTION_CONNECTOR`` type as the +existing direct-API connector, but with ``mcp_mode: True`` in the config +so the adapter layer knows to route through MCP. +""" + +import logging +from uuid import UUID + +from fastapi import APIRouter, Depends, HTTPException, Request +from fastapi.responses import RedirectResponse +from sqlalchemy import select +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm.attributes import flag_modified + +from app.config import config +from app.db import ( + SearchSourceConnector, + SearchSourceConnectorType, + User, + get_async_session, +) +from app.services.notion_mcp.oauth import ( + ClientCredentials, + OAuthMetadata, + build_authorization_url, + discover_oauth_metadata, + exchange_code_for_tokens, + refresh_access_token, + register_client, +) +from app.users import current_active_user +from app.utils.connector_naming import ( + check_duplicate_connector, + extract_identifier_from_credentials, + generate_unique_connector_name, +) +from app.utils.oauth_security import OAuthStateManager, TokenEncryption, generate_pkce_pair + +logger = logging.getLogger(__name__) + +router = APIRouter() + +_state_manager: OAuthStateManager | None = None +_token_encryption: TokenEncryption | None = None +_oauth_metadata: OAuthMetadata | None = None + + +def _get_state_manager() -> OAuthStateManager: + global _state_manager + if _state_manager is None: + if not config.SECRET_KEY: + raise ValueError("SECRET_KEY must be set for OAuth security") + _state_manager = OAuthStateManager(config.SECRET_KEY) + return _state_manager + + +def _get_token_encryption() -> TokenEncryption: + global _token_encryption + if _token_encryption is None: + if not config.SECRET_KEY: + raise ValueError("SECRET_KEY must be set for token encryption") + _token_encryption = TokenEncryption(config.SECRET_KEY) + return _token_encryption + + +async def _get_oauth_metadata() -> OAuthMetadata: + global _oauth_metadata + if _oauth_metadata is None: + _oauth_metadata = await discover_oauth_metadata() + return _oauth_metadata + + +async def _fetch_workspace_info(access_token: str) -> dict: + """Fetch workspace metadata using the Notion API with the fresh token. + + The ``/v1/users/me`` endpoint returns bot info including workspace_name. + This populates connector config fields so naming and metadata services + work correctly. + """ + try: + import httpx + + async with httpx.AsyncClient(timeout=15.0) as client: + resp = await client.get( + "https://api.notion.com/v1/users/me", + headers={ + "Authorization": f"Bearer {access_token}", + "Notion-Version": "2022-06-28", + }, + ) + if resp.is_success: + data = resp.json() + bot_info = data.get("bot", {}) + return { + "bot_id": data.get("id"), + "workspace_name": bot_info.get("workspace_name", "Notion Workspace"), + "workspace_icon": data.get("avatar_url") or "📄", + } + except Exception as e: + logger.warning("Failed to fetch workspace info: %s", e) + return {} + + +NOTION_MCP_REDIRECT_URI = None + + +def _get_redirect_uri() -> str: + global NOTION_MCP_REDIRECT_URI + if NOTION_MCP_REDIRECT_URI is None: + backend = config.BACKEND_URL or "http://localhost:8000" + NOTION_MCP_REDIRECT_URI = f"{backend}/api/v1/auth/notion-mcp/connector/callback" + return NOTION_MCP_REDIRECT_URI + + +# --------------------------------------------------------------------------- +# Route: initiate OAuth +# --------------------------------------------------------------------------- + + +@router.get("/auth/notion-mcp/connector/add") +async def connect_notion_mcp( + space_id: int, + user: User = Depends(current_active_user), +): + """Initiate Notion MCP OAuth + PKCE flow.""" + if not config.SECRET_KEY: + raise HTTPException(status_code=500, detail="SECRET_KEY not configured.") + + try: + metadata = await _get_oauth_metadata() + + redirect_uri = _get_redirect_uri() + credentials = await register_client(metadata, redirect_uri) + + code_verifier, code_challenge = generate_pkce_pair() + + state_manager = _get_state_manager() + state_encoded = state_manager.generate_secure_state( + space_id, + user.id, + code_verifier=code_verifier, + mcp_client_id=credentials.client_id, + mcp_client_secret=credentials.client_secret or "", + ) + + auth_url = build_authorization_url( + metadata=metadata, + client_id=credentials.client_id, + redirect_uri=redirect_uri, + code_challenge=code_challenge, + state=state_encoded, + ) + + logger.info("Generated Notion MCP OAuth URL for user %s, space %s", user.id, space_id) + return {"auth_url": auth_url} + + except Exception as e: + logger.error("Failed to initiate Notion MCP OAuth: %s", e, exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to initiate Notion MCP OAuth: {e!s}" + ) from e + + +# --------------------------------------------------------------------------- +# Route: re-authenticate existing connector +# --------------------------------------------------------------------------- + + +@router.get("/auth/notion-mcp/connector/reauth") +async def reauth_notion_mcp( + space_id: int, + connector_id: int, + return_url: str | None = None, + user: User = Depends(current_active_user), + session: AsyncSession = Depends(get_async_session), +): + """Initiate re-authentication for an existing Notion MCP connector.""" + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == connector_id, + SearchSourceConnector.user_id == user.id, + SearchSourceConnector.search_space_id == space_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, + ) + ) + connector = result.scalars().first() + if not connector: + raise HTTPException(status_code=404, detail="Connector not found or access denied") + + if not config.SECRET_KEY: + raise HTTPException(status_code=500, detail="SECRET_KEY not configured.") + + try: + metadata = await _get_oauth_metadata() + redirect_uri = _get_redirect_uri() + credentials = await register_client(metadata, redirect_uri) + + code_verifier, code_challenge = generate_pkce_pair() + + extra: dict = { + "connector_id": connector_id, + "code_verifier": code_verifier, + "mcp_client_id": credentials.client_id, + "mcp_client_secret": credentials.client_secret or "", + } + if return_url and return_url.startswith("/"): + extra["return_url"] = return_url + + state_manager = _get_state_manager() + state_encoded = state_manager.generate_secure_state(space_id, user.id, **extra) + + auth_url = build_authorization_url( + metadata=metadata, + client_id=credentials.client_id, + redirect_uri=redirect_uri, + code_challenge=code_challenge, + state=state_encoded, + ) + + logger.info("Initiating Notion MCP re-auth for user %s, connector %s", user.id, connector_id) + return {"auth_url": auth_url} + + except HTTPException: + raise + except Exception as e: + logger.error("Failed to initiate Notion MCP re-auth: %s", e, exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to initiate Notion MCP re-auth: {e!s}" + ) from e + + +# --------------------------------------------------------------------------- +# Route: OAuth callback +# --------------------------------------------------------------------------- + + +@router.get("/auth/notion-mcp/connector/callback") +async def notion_mcp_callback( + request: Request, + code: str | None = None, + error: str | None = None, + state: str | None = None, + session: AsyncSession = Depends(get_async_session), +): + """Handle the OAuth callback from Notion's MCP authorization server.""" + if error: + logger.warning("Notion MCP OAuth error: %s", error) + space_id = None + if state: + try: + data = _get_state_manager().validate_state(state) + space_id = data.get("space_id") + except Exception: + pass + if space_id: + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?error=notion_mcp_oauth_denied" + ) + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=notion_mcp_oauth_denied" + ) + + if not code: + raise HTTPException(status_code=400, detail="Missing authorization code") + if not state: + raise HTTPException(status_code=400, detail="Missing state parameter") + + state_manager = _get_state_manager() + try: + data = state_manager.validate_state(state) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=400, detail=f"Invalid state: {e!s}") from e + + user_id = UUID(data["user_id"]) + space_id = data["space_id"] + code_verifier = data.get("code_verifier") + mcp_client_id = data.get("mcp_client_id") + mcp_client_secret = data.get("mcp_client_secret") or None + + if not code_verifier or not mcp_client_id: + raise HTTPException(status_code=400, detail="Missing PKCE or client data in state") + + try: + metadata = await _get_oauth_metadata() + redirect_uri = _get_redirect_uri() + + token_set = await exchange_code_for_tokens( + code=code, + code_verifier=code_verifier, + metadata=metadata, + client_id=mcp_client_id, + redirect_uri=redirect_uri, + client_secret=mcp_client_secret, + ) + except Exception as e: + logger.error("Notion MCP token exchange failed: %s", e, exc_info=True) + raise HTTPException(status_code=400, detail=f"Token exchange failed: {e!s}") from e + + token_encryption = _get_token_encryption() + + workspace_info = await _fetch_workspace_info(token_set.access_token) + + connector_config = { + "access_token": token_encryption.encrypt_token(token_set.access_token), + "refresh_token": token_encryption.encrypt_token(token_set.refresh_token) + if token_set.refresh_token + else None, + "expires_in": token_set.expires_in, + "expires_at": token_set.expires_at.isoformat() if token_set.expires_at else None, + "workspace_id": workspace_info.get("workspace_id"), + "workspace_name": workspace_info.get("workspace_name", "Notion Workspace"), + "workspace_icon": workspace_info.get("workspace_icon", "📄"), + "bot_id": workspace_info.get("bot_id"), + "mcp_mode": True, + "mcp_client_id": mcp_client_id, + "mcp_client_secret": token_encryption.encrypt_token(mcp_client_secret) + if mcp_client_secret + else None, + "_token_encrypted": True, + } + + reauth_connector_id = data.get("connector_id") + reauth_return_url = data.get("return_url") + + # --- Re-auth path --- + if reauth_connector_id: + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == reauth_connector_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.search_space_id == space_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, + ) + ) + db_connector = result.scalars().first() + if not db_connector: + raise HTTPException(status_code=404, detail="Connector not found during re-auth") + + db_connector.config = connector_config + flag_modified(db_connector, "config") + await session.commit() + await session.refresh(db_connector) + + logger.info("Re-authenticated Notion MCP connector %s for user %s", db_connector.id, user_id) + if reauth_return_url and reauth_return_url.startswith("/"): + return RedirectResponse(url=f"{config.NEXT_FRONTEND_URL}{reauth_return_url}") + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?success=true&connector=notion-connector&connectorId={db_connector.id}" + ) + + # --- New connector path --- + connector_identifier = extract_identifier_from_credentials( + SearchSourceConnectorType.NOTION_CONNECTOR, connector_config + ) + + is_duplicate = await check_duplicate_connector( + session, + SearchSourceConnectorType.NOTION_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + if is_duplicate: + logger.warning("Duplicate Notion MCP connector for user %s", user_id) + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?error=duplicate_account&connector=notion-connector" + ) + + connector_name = await generate_unique_connector_name( + session, + SearchSourceConnectorType.NOTION_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + + new_connector = SearchSourceConnector( + name=connector_name, + connector_type=SearchSourceConnectorType.NOTION_CONNECTOR, + is_indexable=True, + config=connector_config, + search_space_id=space_id, + user_id=user_id, + ) + session.add(new_connector) + + try: + await session.commit() + logger.info("Created Notion MCP connector for user %s in space %s", user_id, space_id) + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?success=true&connector=notion-connector&connectorId={new_connector.id}" + ) + except IntegrityError as e: + await session.rollback() + raise HTTPException(status_code=409, detail=f"Database integrity error: {e!s}") from e + except Exception as e: + await session.rollback() + raise HTTPException( + status_code=500, detail=f"Failed to create connector: {e!s}" + ) from e + + +# --------------------------------------------------------------------------- +# Token refresh helper (used by the adapter) +# --------------------------------------------------------------------------- + + +async def refresh_notion_mcp_token( + session: AsyncSession, + connector: SearchSourceConnector, +) -> SearchSourceConnector: + """Refresh the MCP access token for a connector. + + Handles refresh-token rotation: persists both new access_token + and new refresh_token atomically. + """ + token_encryption = _get_token_encryption() + + cfg = connector.config or {} + encrypted_refresh = cfg.get("refresh_token") + if not encrypted_refresh: + raise HTTPException(status_code=400, detail="No refresh token available. Please re-authenticate.") + + try: + refresh_token = token_encryption.decrypt_token(encrypted_refresh) + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to decrypt refresh token: {e!s}") from e + + mcp_client_id = cfg.get("mcp_client_id") + mcp_client_secret_encrypted = cfg.get("mcp_client_secret") + mcp_client_secret = ( + token_encryption.decrypt_token(mcp_client_secret_encrypted) + if mcp_client_secret_encrypted + else None + ) + + if not mcp_client_id: + raise HTTPException(status_code=400, detail="Missing MCP client_id. Please re-authenticate.") + + metadata = await _get_oauth_metadata() + + try: + token_set = await refresh_access_token( + refresh_token=refresh_token, + metadata=metadata, + client_id=mcp_client_id, + client_secret=mcp_client_secret, + ) + except ValueError as e: + if "REAUTH_REQUIRED" in str(e): + connector.config = {**connector.config, "auth_expired": True} + flag_modified(connector, "config") + await session.commit() + await session.refresh(connector) + raise HTTPException( + status_code=401, detail="Notion MCP authentication expired. Please re-authenticate." + ) from e + raise HTTPException(status_code=400, detail=f"Token refresh failed: {e!s}") from e + + updated_config = { + **connector.config, + "access_token": token_encryption.encrypt_token(token_set.access_token), + "refresh_token": token_encryption.encrypt_token(token_set.refresh_token) + if token_set.refresh_token + else connector.config.get("refresh_token"), + "expires_in": token_set.expires_in, + "expires_at": token_set.expires_at.isoformat() if token_set.expires_at else None, + "_token_encrypted": True, + } + updated_config.pop("auth_expired", None) + + connector.config = updated_config + flag_modified(connector, "config") + await session.commit() + await session.refresh(connector) + + logger.info("Refreshed Notion MCP token for connector %s", connector.id) + return connector From 8d438f52f56ec7db3c64c29b0de5437926ba1a93 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Mon, 20 Apr 2026 21:02:10 +0200 Subject: [PATCH 03/57] feat(notion-mcp): add MCP agent tool factories and registry wiring --- .../new_chat/tools/notion_mcp/__init__.py | 5 + .../new_chat/tools/notion_mcp/create_page.py | 205 ++++++++++++++++++ .../new_chat/tools/notion_mcp/delete_page.py | 173 +++++++++++++++ .../new_chat/tools/notion_mcp/update_page.py | 179 +++++++++++++++ .../app/agents/new_chat/tools/registry.py | 39 ++++ 5 files changed, 601 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/tools/notion_mcp/__init__.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/notion_mcp/create_page.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/notion_mcp/delete_page.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/notion_mcp/update_page.py diff --git a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/__init__.py b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/__init__.py new file mode 100644 index 000000000..1e1515bfb --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/__init__.py @@ -0,0 +1,5 @@ +"""MCP-backed Notion tool factories. + +Drop-in replacements for ``tools/notion/`` that route through +Notion's hosted MCP server instead of direct API calls. +""" diff --git a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/create_page.py b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/create_page.py new file mode 100644 index 000000000..a73363a65 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/create_page.py @@ -0,0 +1,205 @@ +import logging +from typing import Any + +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from app.agents.new_chat.tools.hitl import request_approval +from app.services.notion import NotionToolMetadataService + +logger = logging.getLogger(__name__) + + +def _find_mcp_connector(connectors): + """Return the first connector with mcp_mode enabled, or None.""" + for c in connectors: + if (c.config or {}).get("mcp_mode"): + return c + return None + + +def create_create_notion_page_mcp_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, + connector_id: int | None = None, +): + @tool + async def create_notion_page( + title: str, + content: str | None = None, + ) -> dict[str, Any]: + """Create a new page in Notion with the given title and content. + + Use this tool when the user asks you to create, save, or publish + something to Notion. The page will be created in the user's + configured Notion workspace. The user MUST specify a topic before you + call this tool. If the request does not contain a topic (e.g. "create a + notion page"), ask what the page should be about. Never call this tool + without a clear topic from the user. + + Args: + title: The title of the Notion page. + content: Optional markdown content for the page body (supports headings, lists, paragraphs). + Generate this yourself based on the user's topic. + + Returns: + Dictionary with: + - status: "success", "rejected", or "error" + - page_id: Created page ID (if success) + - url: URL to the created page (if success) + - title: Page title (if success) + - message: Result message + + IMPORTANT: If status is "rejected", the user explicitly declined the action. + Respond with a brief acknowledgment (e.g., "Understood, I didn't create the page.") + and move on. Do NOT troubleshoot or suggest alternatives. + + Examples: + - "Create a Notion page about our Q2 roadmap" + - "Save a summary of today's discussion to Notion" + """ + logger.info("create_notion_page (MCP) called: title='%s'", title) + + if db_session is None or search_space_id is None or user_id is None: + logger.error("Notion MCP tool not properly configured - missing required parameters") + return { + "status": "error", + "message": "Notion tool not properly configured. Please contact support.", + } + + try: + metadata_service = NotionToolMetadataService(db_session) + context = await metadata_service.get_creation_context(search_space_id, user_id) + + if "error" in context: + logger.error("Failed to fetch creation context: %s", context["error"]) + return {"status": "error", "message": context["error"]} + + accounts = context.get("accounts", []) + if accounts and all(a.get("auth_expired") for a in accounts): + return { + "status": "auth_error", + "message": "All connected Notion accounts need re-authentication. Please re-authenticate in your connector settings.", + "connector_type": "notion", + } + + result = request_approval( + action_type="notion_page_creation", + tool_name="create_notion_page", + params={ + "title": title, + "content": content, + "parent_page_id": None, + "connector_id": connector_id, + }, + context=context, + ) + + if result.rejected: + logger.info("Notion page creation rejected by user") + return { + "status": "rejected", + "message": "User declined. Do not retry or suggest alternatives.", + } + + final_title = result.params.get("title", title) + final_content = result.params.get("content", content) + final_parent_page_id = result.params.get("parent_page_id") + final_connector_id = result.params.get("connector_id", connector_id) + + if not final_title or not final_title.strip(): + return { + "status": "error", + "message": "Page title cannot be empty. Please provide a valid title.", + } + + from sqlalchemy.future import select + + from app.db import SearchSourceConnector, SearchSourceConnectorType + + actual_connector_id = final_connector_id + if actual_connector_id is None: + query_result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, + ) + ) + connectors = query_result.scalars().all() + connector = _find_mcp_connector(connectors) + + if not connector: + return { + "status": "error", + "message": "No Notion MCP connector found. Please connect Notion (MCP) in your workspace settings.", + } + actual_connector_id = connector.id + else: + query_result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == actual_connector_id, + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, + ) + ) + connector = query_result.scalars().first() + if not connector: + return { + "status": "error", + "message": "Selected Notion account is invalid or has been disconnected.", + } + + from app.services.notion_mcp.adapter import NotionMCPAdapter + + adapter = NotionMCPAdapter(session=db_session, connector_id=actual_connector_id) + result = await adapter.create_page( + title=final_title, + content=final_content, + parent_page_id=final_parent_page_id, + ) + logger.info("create_page (MCP) result: %s - %s", result.get("status"), result.get("message", "")) + + if result.get("status") == "success": + kb_message_suffix = "" + try: + from app.services.notion import NotionKBSyncService + + kb_service = NotionKBSyncService(db_session) + kb_result = await kb_service.sync_after_create( + page_id=result.get("page_id"), + page_title=result.get("title", final_title), + page_url=result.get("url"), + content=final_content, + connector_id=actual_connector_id, + search_space_id=search_space_id, + user_id=user_id, + ) + if kb_result["status"] == "success": + kb_message_suffix = " Your knowledge base has also been updated." + else: + kb_message_suffix = " This page will be added to your knowledge base in the next scheduled sync." + except Exception as kb_err: + logger.warning("KB sync after create failed: %s", kb_err) + kb_message_suffix = " This page will be added to your knowledge base in the next scheduled sync." + + result["message"] = result.get("message", "") + kb_message_suffix + + return result + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + + logger.error("Error creating Notion page (MCP): %s", e, exc_info=True) + if isinstance(e, ValueError): + message = str(e) + else: + message = "Something went wrong while creating the page. Please try again." + return {"status": "error", "message": message} + + return create_notion_page diff --git a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/delete_page.py b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/delete_page.py new file mode 100644 index 000000000..c0cf7642b --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/delete_page.py @@ -0,0 +1,173 @@ +import logging +from typing import Any + +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from app.agents.new_chat.tools.hitl import request_approval +from app.services.notion.tool_metadata_service import NotionToolMetadataService + +logger = logging.getLogger(__name__) + + +def create_delete_notion_page_mcp_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, + connector_id: int | None = None, +): + @tool + async def delete_notion_page( + page_title: str, + delete_from_kb: bool = False, + ) -> dict[str, Any]: + """Delete (archive) a Notion page. + + Use this tool when the user asks you to delete, remove, or archive + a Notion page. Note that Notion doesn't permanently delete pages, + it archives them (they can be restored from trash). + + Args: + page_title: The title of the Notion page to delete. + delete_from_kb: Whether to also remove the page from the knowledge base. + Default is False. + + Returns: + Dictionary with: + - status: "success", "rejected", "not_found", or "error" + - page_id: Deleted page ID (if success) + - message: Success or error message + - deleted_from_kb: Whether the page was also removed from knowledge base (if success) + + Examples: + - "Delete the 'Meeting Notes' Notion page" + - "Remove the 'Old Project Plan' Notion page" + """ + logger.info( + "delete_notion_page (MCP) called: page_title='%s', delete_from_kb=%s", + page_title, + delete_from_kb, + ) + + if db_session is None or search_space_id is None or user_id is None: + logger.error("Notion MCP tool not properly configured - missing required parameters") + return { + "status": "error", + "message": "Notion tool not properly configured. Please contact support.", + } + + try: + metadata_service = NotionToolMetadataService(db_session) + context = await metadata_service.get_delete_context(search_space_id, user_id, page_title) + + if "error" in context: + error_msg = context["error"] + if "not found" in error_msg.lower(): + return {"status": "not_found", "message": error_msg} + return {"status": "error", "message": error_msg} + + account = context.get("account", {}) + if account.get("auth_expired"): + return { + "status": "auth_error", + "message": "The Notion account for this page needs re-authentication. Please re-authenticate in your connector settings.", + } + + page_id = context.get("page_id") + connector_id_from_context = account.get("id") + document_id = context.get("document_id") + + result = request_approval( + action_type="notion_page_deletion", + tool_name="delete_notion_page", + params={ + "page_id": page_id, + "connector_id": connector_id_from_context, + "delete_from_kb": delete_from_kb, + }, + context=context, + ) + + if result.rejected: + logger.info("Notion page deletion rejected by user") + return { + "status": "rejected", + "message": "User declined. Do not retry or suggest alternatives.", + } + + final_page_id = result.params.get("page_id", page_id) + final_connector_id = result.params.get("connector_id", connector_id_from_context) + final_delete_from_kb = result.params.get("delete_from_kb", delete_from_kb) + + from sqlalchemy.future import select + + from app.db import SearchSourceConnector, SearchSourceConnectorType + + if final_connector_id: + query_result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == final_connector_id, + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, + ) + ) + connector = query_result.scalars().first() + if not connector: + return { + "status": "error", + "message": "Selected Notion account is invalid or has been disconnected.", + } + actual_connector_id = connector.id + else: + return {"status": "error", "message": "No connector found for this page."} + + from app.services.notion_mcp.adapter import NotionMCPAdapter + + adapter = NotionMCPAdapter(session=db_session, connector_id=actual_connector_id) + result = await adapter.delete_page(page_id=final_page_id) + logger.info("delete_page (MCP) result: %s - %s", result.get("status"), result.get("message", "")) + + deleted_from_kb = False + if result.get("status") == "success" and final_delete_from_kb and document_id: + try: + from sqlalchemy.future import select + + from app.db import Document + + doc_result = await db_session.execute( + select(Document).filter(Document.id == document_id) + ) + document = doc_result.scalars().first() + + if document: + await db_session.delete(document) + await db_session.commit() + deleted_from_kb = True + logger.info("Deleted document %s from knowledge base", document_id) + except Exception as e: + logger.error("Failed to delete document from KB: %s", e) + await db_session.rollback() + result["warning"] = f"Page deleted from Notion, but failed to remove from knowledge base: {e!s}" + + if result.get("status") == "success": + result["deleted_from_kb"] = deleted_from_kb + if deleted_from_kb: + result["message"] = f"{result.get('message', '')} (also removed from knowledge base)" + + return result + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + + logger.error("Error deleting Notion page (MCP): %s", e, exc_info=True) + if isinstance(e, ValueError): + message = str(e) + else: + message = "Something went wrong while deleting the page. Please try again." + return {"status": "error", "message": message} + + return delete_notion_page diff --git a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/update_page.py b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/update_page.py new file mode 100644 index 000000000..28599cbae --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/update_page.py @@ -0,0 +1,179 @@ +import logging +from typing import Any + +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from app.agents.new_chat.tools.hitl import request_approval +from app.services.notion import NotionToolMetadataService + +logger = logging.getLogger(__name__) + + +def create_update_notion_page_mcp_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, + connector_id: int | None = None, +): + @tool + async def update_notion_page( + page_title: str, + content: str | None = None, + ) -> dict[str, Any]: + """Update an existing Notion page by appending new content. + + Use this tool when the user asks you to add content to, modify, or update + a Notion page. The new content will be appended to the existing page content. + The user MUST specify what to add before you call this tool. If the + request is vague, ask what content they want added. + + Args: + page_title: The title of the Notion page to update. + content: Optional markdown content to append to the page body (supports headings, lists, paragraphs). + Generate this yourself based on the user's request. + + Returns: + Dictionary with: + - status: "success", "rejected", "not_found", or "error" + - page_id: Updated page ID (if success) + - url: URL to the updated page (if success) + - title: Current page title (if success) + - message: Result message + + IMPORTANT: + - If status is "rejected", the user explicitly declined the action. + Respond with a brief acknowledgment (e.g., "Understood, I didn't update the page.") + and move on. Do NOT ask for alternatives or troubleshoot. + - If status is "not_found", inform the user conversationally using the exact message provided. + + Examples: + - "Add today's meeting notes to the 'Meeting Notes' Notion page" + - "Update the 'Project Plan' page with a status update on phase 1" + """ + logger.info( + "update_notion_page (MCP) called: page_title='%s', content_length=%d", + page_title, + len(content) if content else 0, + ) + + if db_session is None or search_space_id is None or user_id is None: + logger.error("Notion MCP tool not properly configured - missing required parameters") + return { + "status": "error", + "message": "Notion tool not properly configured. Please contact support.", + } + + if not content or not content.strip(): + return { + "status": "error", + "message": "Content is required to update the page. Please provide the actual content you want to add.", + } + + try: + metadata_service = NotionToolMetadataService(db_session) + context = await metadata_service.get_update_context(search_space_id, user_id, page_title) + + if "error" in context: + error_msg = context["error"] + if "not found" in error_msg.lower(): + return {"status": "not_found", "message": error_msg} + return {"status": "error", "message": error_msg} + + account = context.get("account", {}) + if account.get("auth_expired"): + return { + "status": "auth_error", + "message": "The Notion account for this page needs re-authentication. Please re-authenticate in your connector settings.", + } + + page_id = context.get("page_id") + document_id = context.get("document_id") + connector_id_from_context = account.get("id") + + result = request_approval( + action_type="notion_page_update", + tool_name="update_notion_page", + params={ + "page_id": page_id, + "content": content, + "connector_id": connector_id_from_context, + }, + context=context, + ) + + if result.rejected: + logger.info("Notion page update rejected by user") + return { + "status": "rejected", + "message": "User declined. Do not retry or suggest alternatives.", + } + + final_page_id = result.params.get("page_id", page_id) + final_content = result.params.get("content", content) + final_connector_id = result.params.get("connector_id", connector_id_from_context) + + from sqlalchemy.future import select + + from app.db import SearchSourceConnector, SearchSourceConnectorType + + if final_connector_id: + query_result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == final_connector_id, + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, + ) + ) + connector = query_result.scalars().first() + if not connector: + return { + "status": "error", + "message": "Selected Notion account is invalid or has been disconnected.", + } + actual_connector_id = connector.id + else: + return {"status": "error", "message": "No connector found for this page."} + + from app.services.notion_mcp.adapter import NotionMCPAdapter + + adapter = NotionMCPAdapter(session=db_session, connector_id=actual_connector_id) + result = await adapter.update_page(page_id=final_page_id, content=final_content) + logger.info("update_page (MCP) result: %s - %s", result.get("status"), result.get("message", "")) + + if result.get("status") == "success" and document_id is not None: + from app.services.notion import NotionKBSyncService + + kb_service = NotionKBSyncService(db_session) + kb_result = await kb_service.sync_after_update( + document_id=document_id, + appended_content=final_content, + user_id=user_id, + search_space_id=search_space_id, + appended_block_ids=result.get("appended_block_ids"), + ) + + if kb_result["status"] == "success": + result["message"] = f"{result['message']}. Your knowledge base has also been updated." + elif kb_result["status"] == "not_indexed": + result["message"] = f"{result['message']}. This page will be added to your knowledge base in the next scheduled sync." + else: + result["message"] = f"{result['message']}. Your knowledge base will be updated in the next scheduled sync." + + return result + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + + logger.error("Error updating Notion page (MCP): %s", e, exc_info=True) + if isinstance(e, ValueError): + message = str(e) + else: + message = "Something went wrong while updating the page. Please try again." + return {"status": "error", "message": message} + + return update_notion_page diff --git a/surfsense_backend/app/agents/new_chat/tools/registry.py b/surfsense_backend/app/agents/new_chat/tools/registry.py index 265aabbbf..e7378653b 100644 --- a/surfsense_backend/app/agents/new_chat/tools/registry.py +++ b/surfsense_backend/app/agents/new_chat/tools/registry.py @@ -86,6 +86,11 @@ from .notion import ( create_delete_notion_page_tool, create_update_notion_page_tool, ) +from .notion_mcp import ( + create_page as notion_mcp_create_page_mod, + delete_page as notion_mcp_delete_page_mod, + update_page as notion_mcp_update_page_mod, +) from .onedrive import ( create_create_onedrive_file_tool, create_delete_onedrive_file_tool, @@ -316,6 +321,40 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ requires=["db_session", "search_space_id", "user_id"], ), # ========================================================================= + # NOTION MCP TOOLS - MCP-backed variants (disabled until swap) + # These route through Notion's hosted MCP server instead of direct API. + # ========================================================================= + ToolDefinition( + name="create_notion_page_mcp", + description="Create a new page in Notion via MCP server", + factory=lambda deps: notion_mcp_create_page_mod.create_create_notion_page_mcp_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + ), + ToolDefinition( + name="update_notion_page_mcp", + description="Append new content to an existing Notion page via MCP server", + factory=lambda deps: notion_mcp_update_page_mod.create_update_notion_page_mcp_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + ), + ToolDefinition( + name="delete_notion_page_mcp", + description="Delete an existing Notion page via MCP server", + factory=lambda deps: notion_mcp_delete_page_mod.create_delete_notion_page_mcp_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + ), + # ========================================================================= # GOOGLE DRIVE TOOLS - create files, delete files # Auto-disabled when no Google Drive connector is configured (see chat_deepagent.py) # ========================================================================= From 5a8ec704fe45c51c1211f19b5333040a69fff854 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Mon, 20 Apr 2026 21:02:21 +0200 Subject: [PATCH 04/57] feat(notion-mcp): add MCP gating in agent, indexer, and health check --- .../app/agents/new_chat/chat_deepagent.py | 29 ++++++++++++++----- .../services/notion/tool_metadata_service.py | 21 +++++++++++++- .../connector_indexers/notion_indexer.py | 12 ++++++++ 3 files changed, 54 insertions(+), 8 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/chat_deepagent.py b/surfsense_backend/app/agents/new_chat/chat_deepagent.py index ab47b49ce..6709715bd 100644 --- a/surfsense_backend/app/agents/new_chat/chat_deepagent.py +++ b/surfsense_backend/app/agents/new_chat/chat_deepagent.py @@ -285,18 +285,33 @@ async def create_surfsense_deep_agent( "llm": llm, } - # Disable Notion action tools if no Notion connector is configured + # Disable Notion action tools if no Notion connector is configured. + # When an MCP-mode connector exists, use MCP tools; otherwise use direct-API tools. modified_disabled_tools = list(disabled_tools) if disabled_tools else [] has_notion_connector = ( available_connectors is not None and "NOTION_CONNECTOR" in available_connectors ) + _notion_direct_tools = [ + "create_notion_page", + "update_notion_page", + "delete_notion_page", + ] + _notion_mcp_tools = [ + "create_notion_page_mcp", + "update_notion_page_mcp", + "delete_notion_page_mcp", + ] if not has_notion_connector: - notion_tools = [ - "create_notion_page", - "update_notion_page", - "delete_notion_page", - ] - modified_disabled_tools.extend(notion_tools) + modified_disabled_tools.extend(_notion_direct_tools) + modified_disabled_tools.extend(_notion_mcp_tools) + else: + from app.services.notion_mcp import has_mcp_notion_connector + + _use_mcp = await has_mcp_notion_connector(db_session, search_space_id) + if _use_mcp: + modified_disabled_tools.extend(_notion_direct_tools) + else: + modified_disabled_tools.extend(_notion_mcp_tools) # Disable Linear action tools if no Linear connector is configured has_linear_connector = ( diff --git a/surfsense_backend/app/services/notion/tool_metadata_service.py b/surfsense_backend/app/services/notion/tool_metadata_service.py index 097ef3461..8a58d5e62 100644 --- a/surfsense_backend/app/services/notion/tool_metadata_service.py +++ b/surfsense_backend/app/services/notion/tool_metadata_service.py @@ -227,11 +227,30 @@ class NotionToolMetadataService: async def _check_account_health(self, connector_id: int) -> bool: """Check if a Notion connector's token is still valid. - Uses a lightweight ``users.me()`` call to verify the token. + For regular connectors: uses ``users.me()`` via the Notion SDK. + For MCP-mode connectors: uses ``notion-get-self`` via the MCP adapter. Returns True if the token is expired/invalid, False if healthy. """ try: + result = await self._db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == connector_id + ) + ) + db_connector = result.scalars().first() + if not db_connector: + return True + + if (db_connector.config or {}).get("mcp_mode"): + from app.services.notion_mcp.adapter import NotionMCPAdapter + + adapter = NotionMCPAdapter( + session=self._db_session, connector_id=connector_id + ) + health = await adapter.health_check() + return health.get("status") != "success" + connector = NotionHistoryConnector( session=self._db_session, connector_id=connector_id ) diff --git a/surfsense_backend/app/tasks/connector_indexers/notion_indexer.py b/surfsense_backend/app/tasks/connector_indexers/notion_indexer.py index 77aac795a..6a3a99b5c 100644 --- a/surfsense_backend/app/tasks/connector_indexers/notion_indexer.py +++ b/surfsense_backend/app/tasks/connector_indexers/notion_indexer.py @@ -129,6 +129,18 @@ async def index_notion_pages( f"Connector with ID {connector_id} not found or is not a Notion connector", ) + if (connector.config or {}).get("mcp_mode"): + msg = ( + f"Connector {connector_id} is an MCP-mode connector. " + "Background indexing is not supported for MCP connectors — " + "use a regular Notion connector for indexing." + ) + logger.info(msg) + await task_logger.log_task_completion( + log_entry, msg, {"skipped": True, "reason": "mcp_mode"} + ) + return 0, 0, None + if not connector.config.get("access_token") and not connector.config.get( "NOTION_INTEGRATION_TOKEN" ): From 30944c0fec25055bd470b5488e0f08d31104e2f1 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Mon, 20 Apr 2026 21:02:34 +0200 Subject: [PATCH 05/57] feat(notion-mcp): wire frontend to MCP OAuth endpoints --- .../connector-configs/views/connector-edit-view.tsx | 2 +- .../connector-popup/constants/connector-constants.ts | 2 +- .../connector-popup/views/connector-accounts-list-view.tsx | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx index e19600ab2..274fc0fc7 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx @@ -21,7 +21,7 @@ import { getConnectorConfigComponent } from "../index"; const REAUTH_ENDPOINTS: Partial> = { [EnumConnectorName.LINEAR_CONNECTOR]: "/api/v1/auth/linear/connector/reauth", - [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion/connector/reauth", + [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion-mcp/connector/reauth", [EnumConnectorName.GOOGLE_DRIVE_CONNECTOR]: "/api/v1/auth/google/drive/connector/reauth", [EnumConnectorName.GOOGLE_GMAIL_CONNECTOR]: "/api/v1/auth/google/gmail/connector/reauth", [EnumConnectorName.GOOGLE_CALENDAR_CONNECTOR]: "/api/v1/auth/google/calendar/connector/reauth", diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index da6885ffe..0e517b38e 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -38,7 +38,7 @@ export const OAUTH_CONNECTORS = [ title: "Notion", description: "Search your Notion pages", connectorType: EnumConnectorName.NOTION_CONNECTOR, - authEndpoint: "/api/v1/auth/notion/connector/add/", + authEndpoint: "/api/v1/auth/notion-mcp/connector/add", }, { id: "linear-connector", diff --git a/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx index b4c049c5c..6cdd535db 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx @@ -18,7 +18,7 @@ import { getConnectorDisplayName } from "../tabs/all-connectors-tab"; const REAUTH_ENDPOINTS: Partial> = { [EnumConnectorName.LINEAR_CONNECTOR]: "/api/v1/auth/linear/connector/reauth", - [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion/connector/reauth", + [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion-mcp/connector/reauth", [EnumConnectorName.GOOGLE_DRIVE_CONNECTOR]: "/api/v1/auth/google/drive/connector/reauth", [EnumConnectorName.GOOGLE_GMAIL_CONNECTOR]: "/api/v1/auth/google/gmail/connector/reauth", [EnumConnectorName.GOOGLE_CALENDAR_CONNECTOR]: "/api/v1/auth/google/calendar/connector/reauth", From 875c4c3cf45122937cf0967ada80c1843ef30301 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:28:30 +0200 Subject: [PATCH 06/57] add connector exception hierarchy --- .../app/connectors/exceptions.py | 98 +++++++++++++++++++ 1 file changed, 98 insertions(+) create mode 100644 surfsense_backend/app/connectors/exceptions.py diff --git a/surfsense_backend/app/connectors/exceptions.py b/surfsense_backend/app/connectors/exceptions.py new file mode 100644 index 000000000..32a1e7bdc --- /dev/null +++ b/surfsense_backend/app/connectors/exceptions.py @@ -0,0 +1,98 @@ +"""Standard exception hierarchy for all connectors. + +ConnectorError +├── ConnectorAuthError (401/403 — non-retryable) +├── ConnectorRateLimitError (429 — retryable, carries ``retry_after``) +├── ConnectorTimeoutError (timeout/504 — retryable) +└── ConnectorAPIError (5xx or unexpected — retryable when >= 500) +""" + +from __future__ import annotations + +from typing import Any + + +class ConnectorError(Exception): + + def __init__( + self, + message: str, + *, + service: str = "", + status_code: int | None = None, + response_body: Any = None, + ) -> None: + super().__init__(message) + self.service = service + self.status_code = status_code + self.response_body = response_body + + @property + def retryable(self) -> bool: + return False + + +class ConnectorAuthError(ConnectorError): + """Token expired, revoked, insufficient scopes, or needs re-auth (401/403).""" + + @property + def retryable(self) -> bool: + return False + + +class ConnectorRateLimitError(ConnectorError): + """429 Too Many Requests.""" + + def __init__( + self, + message: str = "Rate limited", + *, + service: str = "", + retry_after: float | None = None, + status_code: int = 429, + response_body: Any = None, + ) -> None: + super().__init__( + message, + service=service, + status_code=status_code, + response_body=response_body, + ) + self.retry_after = retry_after + + @property + def retryable(self) -> bool: + return True + + +class ConnectorTimeoutError(ConnectorError): + """Request timeout or gateway timeout (504).""" + + def __init__( + self, + message: str = "Request timed out", + *, + service: str = "", + status_code: int | None = None, + response_body: Any = None, + ) -> None: + super().__init__( + message, + service=service, + status_code=status_code, + response_body=response_body, + ) + + @property + def retryable(self) -> bool: + return True + + +class ConnectorAPIError(ConnectorError): + """Generic API error (5xx or unexpected status codes).""" + + @property + def retryable(self) -> bool: + if self.status_code is not None: + return self.status_code >= 500 + return False From 45acf9de15a9edf2e28e5746a71bf07e59ed9532 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:28:36 +0200 Subject: [PATCH 07/57] add async retry utility with tenacity --- surfsense_backend/app/utils/async_retry.py | 129 +++++++++++++++++++++ 1 file changed, 129 insertions(+) create mode 100644 surfsense_backend/app/utils/async_retry.py diff --git a/surfsense_backend/app/utils/async_retry.py b/surfsense_backend/app/utils/async_retry.py new file mode 100644 index 000000000..c3bdd5386 --- /dev/null +++ b/surfsense_backend/app/utils/async_retry.py @@ -0,0 +1,129 @@ +"""Async retry decorators for connector API calls, built on tenacity.""" + +from __future__ import annotations + +import logging +from collections.abc import Callable +from typing import TypeVar + +import httpx +from tenacity import ( + before_sleep_log, + retry, + retry_if_exception, + stop_after_attempt, + stop_after_delay, + wait_exponential_jitter, +) + +from app.connectors.exceptions import ( + ConnectorAPIError, + ConnectorAuthError, + ConnectorError, + ConnectorRateLimitError, + ConnectorTimeoutError, +) + +logger = logging.getLogger(__name__) + +F = TypeVar("F", bound=Callable) + + +def _is_retryable(exc: BaseException) -> bool: + if isinstance(exc, ConnectorError): + return exc.retryable + if isinstance(exc, (httpx.TimeoutException, httpx.ConnectError)): + return True + return False + + +def build_retry( + *, + max_attempts: int = 4, + max_delay: float = 60.0, + initial_delay: float = 1.0, + total_timeout: float = 180.0, + service: str = "", +) -> Callable: + """Configurable tenacity ``@retry`` decorator with exponential backoff + jitter.""" + _logger = logging.getLogger(f"connector.retry.{service}") if service else logger + + return retry( + retry=retry_if_exception(_is_retryable), + stop=(stop_after_attempt(max_attempts) | stop_after_delay(total_timeout)), + wait=wait_exponential_jitter(initial=initial_delay, max=max_delay), + reraise=True, + before_sleep=before_sleep_log(_logger, logging.WARNING), + ) + + +def retry_on_transient( + *, + service: str = "", + max_attempts: int = 4, +) -> Callable: + """Shorthand: retry up to *max_attempts* on rate-limits, timeouts, and 5xx.""" + return build_retry(max_attempts=max_attempts, service=service) + + +def raise_for_status( + response: httpx.Response, + *, + service: str = "", +) -> None: + """Map non-2xx httpx responses to the appropriate ``ConnectorError``.""" + if response.is_success: + return + + status = response.status_code + + try: + body = response.json() + except Exception: + body = response.text[:500] if response.text else None + + if status == 429: + retry_after_raw = response.headers.get("Retry-After") + retry_after: float | None = None + if retry_after_raw: + try: + retry_after = float(retry_after_raw) + except (ValueError, TypeError): + pass + raise ConnectorRateLimitError( + f"{service} rate limited (429)", + service=service, + retry_after=retry_after, + response_body=body, + ) + + if status in (401, 403): + raise ConnectorAuthError( + f"{service} authentication failed ({status})", + service=service, + status_code=status, + response_body=body, + ) + + if status == 504: + raise ConnectorTimeoutError( + f"{service} gateway timeout (504)", + service=service, + status_code=status, + response_body=body, + ) + + if status >= 500: + raise ConnectorAPIError( + f"{service} server error ({status})", + service=service, + status_code=status, + response_body=body, + ) + + raise ConnectorAPIError( + f"{service} request failed ({status})", + service=service, + status_code=status, + response_body=body, + ) From 474c35fb2a760a2a49b2435cf321de816540dfde Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:28:41 +0200 Subject: [PATCH 08/57] add standardized tool response helper --- .../agents/new_chat/tools/tool_response.py | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/tools/tool_response.py diff --git a/surfsense_backend/app/agents/new_chat/tools/tool_response.py b/surfsense_backend/app/agents/new_chat/tools/tool_response.py new file mode 100644 index 000000000..5fb1864b7 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/tool_response.py @@ -0,0 +1,41 @@ +"""Standardised response dict factories for LangChain agent tools.""" + +from __future__ import annotations + +from typing import Any + + +class ToolResponse: + + @staticmethod + def success(message: str, **data: Any) -> dict[str, Any]: + return {"status": "success", "message": message, **data} + + @staticmethod + def error(error: str, **data: Any) -> dict[str, Any]: + return {"status": "error", "error": error, **data} + + @staticmethod + def auth_error(service: str, **data: Any) -> dict[str, Any]: + return { + "status": "auth_error", + "error": ( + f"{service} authentication has expired or been revoked. " + "Please re-connect the integration in Settings → Connectors." + ), + **data, + } + + @staticmethod + def rejected(message: str = "Action was declined by the user.") -> dict[str, Any]: + return {"status": "rejected", "message": message} + + @staticmethod + def not_found( + resource: str, identifier: str, **data: Any + ) -> dict[str, Any]: + return { + "status": "not_found", + "error": f"{resource} '{identifier}' was not found.", + **data, + } From 6529889e7359b83b1ba171355a7ce46af74af446 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:28:45 +0200 Subject: [PATCH 09/57] add declarative connector gating to tool registry --- .../app/agents/new_chat/tools/registry.py | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/surfsense_backend/app/agents/new_chat/tools/registry.py b/surfsense_backend/app/agents/new_chat/tools/registry.py index e7378653b..f9b9287de 100644 --- a/surfsense_backend/app/agents/new_chat/tools/registry.py +++ b/surfsense_backend/app/agents/new_chat/tools/registry.py @@ -119,6 +119,8 @@ class ToolDefinition: factory: Callable that creates the tool. Receives a dict of dependencies. requires: List of dependency names this tool needs (e.g., "search_space_id", "db_session") enabled_by_default: Whether the tool is enabled when no explicit config is provided + required_connector: Searchable type string (e.g. ``"LINEAR_CONNECTOR"``) + that must be in ``available_connectors`` for the tool to be enabled. """ @@ -128,6 +130,7 @@ class ToolDefinition: requires: list[str] = field(default_factory=list) enabled_by_default: bool = True hidden: bool = False + required_connector: str | None = None # ============================================================================= @@ -265,6 +268,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="LINEAR_CONNECTOR", ), ToolDefinition( name="update_linear_issue", @@ -275,6 +279,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="LINEAR_CONNECTOR", ), ToolDefinition( name="delete_linear_issue", @@ -285,6 +290,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="LINEAR_CONNECTOR", ), # ========================================================================= # NOTION TOOLS - create, update, delete pages @@ -299,6 +305,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="NOTION_CONNECTOR", ), ToolDefinition( name="update_notion_page", @@ -309,6 +316,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="NOTION_CONNECTOR", ), ToolDefinition( name="delete_notion_page", @@ -319,6 +327,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="NOTION_CONNECTOR", ), # ========================================================================= # NOTION MCP TOOLS - MCP-backed variants (disabled until swap) @@ -333,6 +342,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="NOTION_CONNECTOR", ), ToolDefinition( name="update_notion_page_mcp", @@ -343,6 +353,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="NOTION_CONNECTOR", ), ToolDefinition( name="delete_notion_page_mcp", @@ -353,6 +364,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="NOTION_CONNECTOR", ), # ========================================================================= # GOOGLE DRIVE TOOLS - create files, delete files @@ -367,6 +379,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_DRIVE_FILE", ), ToolDefinition( name="delete_google_drive_file", @@ -377,6 +390,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_DRIVE_FILE", ), # ========================================================================= # DROPBOX TOOLS - create and trash files @@ -391,6 +405,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="DROPBOX_FILE", ), ToolDefinition( name="delete_dropbox_file", @@ -401,6 +416,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="DROPBOX_FILE", ), # ========================================================================= # ONEDRIVE TOOLS - create and trash files @@ -415,6 +431,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="ONEDRIVE_FILE", ), ToolDefinition( name="delete_onedrive_file", @@ -425,6 +442,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="ONEDRIVE_FILE", ), # ========================================================================= # GOOGLE CALENDAR TOOLS - create, update, delete events @@ -439,6 +457,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_CALENDAR_CONNECTOR", ), ToolDefinition( name="update_calendar_event", @@ -449,6 +468,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_CALENDAR_CONNECTOR", ), ToolDefinition( name="delete_calendar_event", @@ -459,6 +479,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_CALENDAR_CONNECTOR", ), # ========================================================================= # GMAIL TOOLS - create drafts, update drafts, send emails, trash emails @@ -473,6 +494,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_GMAIL_CONNECTOR", ), ToolDefinition( name="send_gmail_email", @@ -483,6 +505,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_GMAIL_CONNECTOR", ), ToolDefinition( name="trash_gmail_email", @@ -493,6 +516,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_GMAIL_CONNECTOR", ), ToolDefinition( name="update_gmail_draft", @@ -503,6 +527,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_GMAIL_CONNECTOR", ), # ========================================================================= # JIRA TOOLS - create, update, delete issues @@ -517,6 +542,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="JIRA_CONNECTOR", ), ToolDefinition( name="update_jira_issue", @@ -527,6 +553,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="JIRA_CONNECTOR", ), ToolDefinition( name="delete_jira_issue", @@ -537,6 +564,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="JIRA_CONNECTOR", ), # ========================================================================= # CONFLUENCE TOOLS - create, update, delete pages @@ -551,6 +579,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="CONFLUENCE_CONNECTOR", ), ToolDefinition( name="update_confluence_page", @@ -561,6 +590,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="CONFLUENCE_CONNECTOR", ), ToolDefinition( name="delete_confluence_page", @@ -571,6 +601,7 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ user_id=deps["user_id"], ), requires=["db_session", "search_space_id", "user_id"], + required_connector="CONFLUENCE_CONNECTOR", ), ] @@ -588,6 +619,22 @@ def get_tool_by_name(name: str) -> ToolDefinition | None: return None +def get_connector_gated_tools( + available_connectors: list[str] | None, +) -> list[str]: + """Return tool names to disable""" + if available_connectors is None: + available = set() + else: + available = set(available_connectors) + + disabled: list[str] = [] + for tool_def in BUILTIN_TOOLS: + if tool_def.required_connector and tool_def.required_connector not in available: + disabled.append(tool_def.name) + return disabled + + def get_all_tool_names() -> list[str]: """Get names of all registered tools.""" return [tool_def.name for tool_def in BUILTIN_TOOLS] From a1804265b88b35ec88ec9c1abb5f046be35e45f2 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:28:53 +0200 Subject: [PATCH 10/57] replace manual connector checks with declarative gating --- .../app/agents/new_chat/chat_deepagent.py | 123 +++--------------- 1 file changed, 17 insertions(+), 106 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/chat_deepagent.py b/surfsense_backend/app/agents/new_chat/chat_deepagent.py index 6709715bd..480cae8c9 100644 --- a/surfsense_backend/app/agents/new_chat/chat_deepagent.py +++ b/surfsense_backend/app/agents/new_chat/chat_deepagent.py @@ -45,7 +45,7 @@ from app.agents.new_chat.system_prompt import ( build_configurable_system_prompt, build_surfsense_system_prompt, ) -from app.agents.new_chat.tools.registry import build_tools_async +from app.agents.new_chat.tools.registry import build_tools_async, get_connector_gated_tools from app.db import ChatVisibility from app.services.connector_service import ConnectorService from app.utils.perf import get_perf_logger @@ -285,120 +285,31 @@ async def create_surfsense_deep_agent( "llm": llm, } - # Disable Notion action tools if no Notion connector is configured. - # When an MCP-mode connector exists, use MCP tools; otherwise use direct-API tools. modified_disabled_tools = list(disabled_tools) if disabled_tools else [] + modified_disabled_tools.extend( + get_connector_gated_tools(available_connectors) + ) + + # TODO(phase-1): Remove Notion MCP gating after revert. has_notion_connector = ( available_connectors is not None and "NOTION_CONNECTOR" in available_connectors ) - _notion_direct_tools = [ - "create_notion_page", - "update_notion_page", - "delete_notion_page", - ] - _notion_mcp_tools = [ - "create_notion_page_mcp", - "update_notion_page_mcp", - "delete_notion_page_mcp", - ] - if not has_notion_connector: - modified_disabled_tools.extend(_notion_direct_tools) - modified_disabled_tools.extend(_notion_mcp_tools) - else: + if has_notion_connector: from app.services.notion_mcp import has_mcp_notion_connector _use_mcp = await has_mcp_notion_connector(db_session, search_space_id) if _use_mcp: - modified_disabled_tools.extend(_notion_direct_tools) + modified_disabled_tools.extend([ + "create_notion_page", + "update_notion_page", + "delete_notion_page", + ]) else: - modified_disabled_tools.extend(_notion_mcp_tools) - - # Disable Linear action tools if no Linear connector is configured - has_linear_connector = ( - available_connectors is not None and "LINEAR_CONNECTOR" in available_connectors - ) - if not has_linear_connector: - linear_tools = [ - "create_linear_issue", - "update_linear_issue", - "delete_linear_issue", - ] - modified_disabled_tools.extend(linear_tools) - - # Disable Google Drive action tools if no Google Drive connector is configured - has_google_drive_connector = ( - available_connectors is not None and "GOOGLE_DRIVE_FILE" in available_connectors - ) - if not has_google_drive_connector: - google_drive_tools = [ - "create_google_drive_file", - "delete_google_drive_file", - ] - modified_disabled_tools.extend(google_drive_tools) - - has_dropbox_connector = ( - available_connectors is not None and "DROPBOX_FILE" in available_connectors - ) - if not has_dropbox_connector: - modified_disabled_tools.extend(["create_dropbox_file", "delete_dropbox_file"]) - - has_onedrive_connector = ( - available_connectors is not None and "ONEDRIVE_FILE" in available_connectors - ) - if not has_onedrive_connector: - modified_disabled_tools.extend(["create_onedrive_file", "delete_onedrive_file"]) - - # Disable Google Calendar action tools if no Google Calendar connector is configured - has_google_calendar_connector = ( - available_connectors is not None - and "GOOGLE_CALENDAR_CONNECTOR" in available_connectors - ) - if not has_google_calendar_connector: - calendar_tools = [ - "create_calendar_event", - "update_calendar_event", - "delete_calendar_event", - ] - modified_disabled_tools.extend(calendar_tools) - - # Disable Gmail action tools if no Gmail connector is configured - has_gmail_connector = ( - available_connectors is not None - and "GOOGLE_GMAIL_CONNECTOR" in available_connectors - ) - if not has_gmail_connector: - gmail_tools = [ - "create_gmail_draft", - "update_gmail_draft", - "send_gmail_email", - "trash_gmail_email", - ] - modified_disabled_tools.extend(gmail_tools) - - # Disable Jira action tools if no Jira connector is configured - has_jira_connector = ( - available_connectors is not None and "JIRA_CONNECTOR" in available_connectors - ) - if not has_jira_connector: - jira_tools = [ - "create_jira_issue", - "update_jira_issue", - "delete_jira_issue", - ] - modified_disabled_tools.extend(jira_tools) - - # Disable Confluence action tools if no Confluence connector is configured - has_confluence_connector = ( - available_connectors is not None - and "CONFLUENCE_CONNECTOR" in available_connectors - ) - if not has_confluence_connector: - confluence_tools = [ - "create_confluence_page", - "update_confluence_page", - "delete_confluence_page", - ] - modified_disabled_tools.extend(confluence_tools) + modified_disabled_tools.extend([ + "create_notion_page_mcp", + "update_notion_page_mcp", + "delete_notion_page_mcp", + ]) # Remove direct KB search tool; we now pre-seed a scoped filesystem via middleware. if "search_knowledge_base" not in modified_disabled_tools: From 2dfe03b9b2771c7c2398209645c585397aaaf07e Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:29:03 +0200 Subject: [PATCH 11/57] add reusable OAuth connector route base class --- .../app/routes/oauth_connector_base.py | 620 ++++++++++++++++++ 1 file changed, 620 insertions(+) create mode 100644 surfsense_backend/app/routes/oauth_connector_base.py diff --git a/surfsense_backend/app/routes/oauth_connector_base.py b/surfsense_backend/app/routes/oauth_connector_base.py new file mode 100644 index 000000000..0483d2540 --- /dev/null +++ b/surfsense_backend/app/routes/oauth_connector_base.py @@ -0,0 +1,620 @@ +"""Reusable base for OAuth 2.0 connector routes. + +Subclasses override ``fetch_account_info``, ``build_connector_config``, +and ``get_connector_display_name`` to customise provider-specific behaviour. +Call ``build_router()`` to get a FastAPI ``APIRouter`` with ``/connector/add``, +``/connector/callback``, and ``/connector/reauth`` endpoints. +""" + +from __future__ import annotations + +import base64 +import logging +from datetime import UTC, datetime, timedelta +from typing import Any +from urllib.parse import urlencode +from uuid import UUID + +import httpx +from fastapi import APIRouter, Depends, HTTPException +from fastapi.responses import RedirectResponse +from sqlalchemy import select +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm.attributes import flag_modified + +from app.config import config +from app.db import ( + SearchSourceConnector, + SearchSourceConnectorType, + User, + get_async_session, +) +from app.users import current_active_user +from app.utils.connector_naming import ( + check_duplicate_connector, + generate_unique_connector_name, +) +from app.utils.oauth_security import OAuthStateManager, TokenEncryption + +logger = logging.getLogger(__name__) + + +class OAuthConnectorRoute: + + def __init__( + self, + *, + provider_name: str, + connector_type: SearchSourceConnectorType, + authorize_url: str, + token_url: str, + client_id_env: str, + client_secret_env: str, + redirect_uri_env: str, + scopes: list[str], + auth_prefix: str, + use_pkce: bool = False, + token_auth_method: str = "body", + is_indexable: bool = True, + extra_auth_params: dict[str, str] | None = None, + ) -> None: + self.provider_name = provider_name + self.connector_type = connector_type + self.authorize_url = authorize_url + self.token_url = token_url + self.client_id_env = client_id_env + self.client_secret_env = client_secret_env + self.redirect_uri_env = redirect_uri_env + self.scopes = scopes + self.auth_prefix = auth_prefix.rstrip("/") + self.use_pkce = use_pkce + self.token_auth_method = token_auth_method + self.is_indexable = is_indexable + self.extra_auth_params = extra_auth_params or {} + + self._state_manager: OAuthStateManager | None = None + self._token_encryption: TokenEncryption | None = None + + def _get_client_id(self) -> str: + value = getattr(config, self.client_id_env, None) + if not value: + raise HTTPException( + status_code=500, + detail=f"{self.provider_name.title()} OAuth not configured " + f"({self.client_id_env} missing).", + ) + return value + + def _get_client_secret(self) -> str: + value = getattr(config, self.client_secret_env, None) + if not value: + raise HTTPException( + status_code=500, + detail=f"{self.provider_name.title()} OAuth not configured " + f"({self.client_secret_env} missing).", + ) + return value + + def _get_redirect_uri(self) -> str: + value = getattr(config, self.redirect_uri_env, None) + if not value: + raise HTTPException( + status_code=500, + detail=f"{self.redirect_uri_env} not configured.", + ) + return value + + def _get_state_manager(self) -> OAuthStateManager: + if self._state_manager is None: + if not config.SECRET_KEY: + raise HTTPException( + status_code=500, + detail="SECRET_KEY not configured for OAuth security.", + ) + self._state_manager = OAuthStateManager(config.SECRET_KEY) + return self._state_manager + + def _get_token_encryption(self) -> TokenEncryption: + if self._token_encryption is None: + if not config.SECRET_KEY: + raise HTTPException( + status_code=500, + detail="SECRET_KEY not configured for token encryption.", + ) + self._token_encryption = TokenEncryption(config.SECRET_KEY) + return self._token_encryption + + def _frontend_redirect( + self, + space_id: int | None, + *, + success: bool = False, + connector_id: int | None = None, + error: str | None = None, + ) -> RedirectResponse: + if success and space_id: + connector_slug = f"{self.provider_name}-connector" + qs = f"success=true&connector={connector_slug}" + if connector_id: + qs += f"&connectorId={connector_id}" + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?{qs}" + ) + if error and space_id: + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?error={error}" + ) + if error: + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard?error={error}" + ) + return RedirectResponse(url=f"{config.NEXT_FRONTEND_URL}/dashboard") + + async def fetch_account_info(self, access_token: str) -> dict[str, Any]: + """Override to fetch account/workspace info after token exchange. + + Return dict is merged into connector config; key ``"name"`` is used + for the display name and dedup. + """ + return {} + + def build_connector_config( + self, + token_json: dict[str, Any], + account_info: dict[str, Any], + encryption: TokenEncryption, + ) -> dict[str, Any]: + """Override for custom config shapes. Default: standard encrypted OAuth fields.""" + access_token = token_json.get("access_token", "") + refresh_token = token_json.get("refresh_token") + + expires_at = None + if token_json.get("expires_in"): + expires_at = datetime.now(UTC) + timedelta( + seconds=int(token_json["expires_in"]) + ) + + cfg: dict[str, Any] = { + "access_token": encryption.encrypt_token(access_token), + "refresh_token": ( + encryption.encrypt_token(refresh_token) if refresh_token else None + ), + "token_type": token_json.get("token_type", "Bearer"), + "expires_in": token_json.get("expires_in"), + "expires_at": expires_at.isoformat() if expires_at else None, + "scope": token_json.get("scope"), + "_token_encrypted": True, + } + cfg.update(account_info) + return cfg + + def get_connector_display_name(self, account_info: dict[str, Any]) -> str: + return str(account_info.get("name", self.provider_name.title())) + + async def on_token_refresh_failure( + self, + session: AsyncSession, + connector: SearchSourceConnector, + ) -> None: + try: + connector.config = {**connector.config, "auth_expired": True} + flag_modified(connector, "config") + await session.commit() + await session.refresh(connector) + except Exception: + logger.warning( + "Failed to persist auth_expired flag for connector %s", + connector.id, + exc_info=True, + ) + + async def _exchange_code( + self, code: str, extra_state: dict[str, Any] + ) -> dict[str, Any]: + client_id = self._get_client_id() + client_secret = self._get_client_secret() + redirect_uri = self._get_redirect_uri() + + headers: dict[str, str] = { + "Content-Type": "application/x-www-form-urlencoded", + } + body: dict[str, str] = { + "grant_type": "authorization_code", + "code": code, + "redirect_uri": redirect_uri, + } + + if self.token_auth_method == "basic": + creds = base64.b64encode(f"{client_id}:{client_secret}".encode()).decode() + headers["Authorization"] = f"Basic {creds}" + else: + body["client_id"] = client_id + body["client_secret"] = client_secret + + if self.use_pkce: + verifier = extra_state.get("code_verifier") + if verifier: + body["code_verifier"] = verifier + + async with httpx.AsyncClient() as client: + resp = await client.post( + self.token_url, data=body, headers=headers, timeout=30.0 + ) + + if resp.status_code != 200: + detail = resp.text + try: + detail = resp.json().get("error_description", detail) + except Exception: + pass + raise HTTPException( + status_code=400, detail=f"Token exchange failed: {detail}" + ) + + return resp.json() + + async def refresh_token( + self, session: AsyncSession, connector: SearchSourceConnector + ) -> SearchSourceConnector: + encryption = self._get_token_encryption() + is_encrypted = connector.config.get("_token_encrypted", False) + + refresh_tok = connector.config.get("refresh_token") + if is_encrypted and refresh_tok: + try: + refresh_tok = encryption.decrypt_token(refresh_tok) + except Exception as e: + logger.error("Failed to decrypt refresh token: %s", e) + raise HTTPException( + status_code=500, detail="Failed to decrypt stored refresh token" + ) from e + + if not refresh_tok: + await self.on_token_refresh_failure(session, connector) + raise HTTPException( + status_code=400, + detail="No refresh token available. Please re-authenticate.", + ) + + client_id = self._get_client_id() + client_secret = self._get_client_secret() + + headers: dict[str, str] = { + "Content-Type": "application/x-www-form-urlencoded", + } + body: dict[str, str] = { + "grant_type": "refresh_token", + "refresh_token": refresh_tok, + } + + if self.token_auth_method == "basic": + creds = base64.b64encode(f"{client_id}:{client_secret}".encode()).decode() + headers["Authorization"] = f"Basic {creds}" + else: + body["client_id"] = client_id + body["client_secret"] = client_secret + + async with httpx.AsyncClient() as client: + resp = await client.post( + self.token_url, data=body, headers=headers, timeout=30.0 + ) + + if resp.status_code != 200: + error_detail = resp.text + try: + ej = resp.json() + error_detail = ej.get("error_description", error_detail) + error_code = ej.get("error", "") + except Exception: + error_code = "" + combined = (error_detail + error_code).lower() + if any(kw in combined for kw in ("invalid_grant", "expired", "revoked")): + await self.on_token_refresh_failure(session, connector) + raise HTTPException( + status_code=401, + detail=f"{self.provider_name.title()} authentication failed. " + "Please re-authenticate.", + ) + raise HTTPException( + status_code=400, detail=f"Token refresh failed: {error_detail}" + ) + + token_json = resp.json() + new_access = token_json.get("access_token") + if not new_access: + raise HTTPException( + status_code=400, detail="No access token received from refresh" + ) + + expires_at = None + if token_json.get("expires_in"): + expires_at = datetime.now(UTC) + timedelta( + seconds=int(token_json["expires_in"]) + ) + + updated_config = dict(connector.config) + updated_config["access_token"] = encryption.encrypt_token(new_access) + new_refresh = token_json.get("refresh_token") + if new_refresh: + updated_config["refresh_token"] = encryption.encrypt_token(new_refresh) + updated_config["expires_in"] = token_json.get("expires_in") + updated_config["expires_at"] = expires_at.isoformat() if expires_at else None + updated_config["scope"] = token_json.get("scope", updated_config.get("scope")) + updated_config["_token_encrypted"] = True + updated_config.pop("auth_expired", None) + + connector.config = updated_config + flag_modified(connector, "config") + await session.commit() + await session.refresh(connector) + + logger.info( + "Refreshed %s token for connector %s", + self.provider_name, + connector.id, + ) + return connector + + def build_router(self) -> APIRouter: + router = APIRouter() + oauth = self + + @router.get(f"{oauth.auth_prefix}/connector/add") + async def connect( + space_id: int, + user: User = Depends(current_active_user), + ): + if not space_id: + raise HTTPException(status_code=400, detail="space_id is required") + + client_id = oauth._get_client_id() + state_mgr = oauth._get_state_manager() + + extra_state: dict[str, Any] = {} + auth_params: dict[str, str] = { + "client_id": client_id, + "response_type": "code", + "redirect_uri": oauth._get_redirect_uri(), + "scope": " ".join(oauth.scopes), + } + + if oauth.use_pkce: + from app.utils.oauth_security import generate_pkce_pair + + verifier, challenge = generate_pkce_pair() + extra_state["code_verifier"] = verifier + auth_params["code_challenge"] = challenge + auth_params["code_challenge_method"] = "S256" + + auth_params.update(oauth.extra_auth_params) + + state_encoded = state_mgr.generate_secure_state( + space_id, user.id, **extra_state + ) + auth_params["state"] = state_encoded + auth_url = f"{oauth.authorize_url}?{urlencode(auth_params)}" + + logger.info( + "Generated %s OAuth URL for user %s, space %s", + oauth.provider_name, + user.id, + space_id, + ) + return {"auth_url": auth_url} + + @router.get(f"{oauth.auth_prefix}/connector/reauth") + async def reauth( + space_id: int, + connector_id: int, + return_url: str | None = None, + user: User = Depends(current_active_user), + session: AsyncSession = Depends(get_async_session), + ): + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == connector_id, + SearchSourceConnector.user_id == user.id, + SearchSourceConnector.search_space_id == space_id, + SearchSourceConnector.connector_type == oauth.connector_type, + ) + ) + if not result.scalars().first(): + raise HTTPException( + status_code=404, + detail=f"{oauth.provider_name.title()} connector not found " + "or access denied", + ) + + client_id = oauth._get_client_id() + state_mgr = oauth._get_state_manager() + + extra: dict[str, Any] = {"connector_id": connector_id} + if return_url and return_url.startswith("/"): + extra["return_url"] = return_url + + auth_params: dict[str, str] = { + "client_id": client_id, + "response_type": "code", + "redirect_uri": oauth._get_redirect_uri(), + "scope": " ".join(oauth.scopes), + } + + if oauth.use_pkce: + from app.utils.oauth_security import generate_pkce_pair + + verifier, challenge = generate_pkce_pair() + extra["code_verifier"] = verifier + auth_params["code_challenge"] = challenge + auth_params["code_challenge_method"] = "S256" + + auth_params.update(oauth.extra_auth_params) + + state_encoded = state_mgr.generate_secure_state( + space_id, user.id, **extra + ) + auth_params["state"] = state_encoded + auth_url = f"{oauth.authorize_url}?{urlencode(auth_params)}" + + logger.info( + "Initiating %s re-auth for user %s, connector %s", + oauth.provider_name, + user.id, + connector_id, + ) + return {"auth_url": auth_url} + + @router.get(f"{oauth.auth_prefix}/connector/callback") + async def callback( + code: str | None = None, + error: str | None = None, + state: str | None = None, + session: AsyncSession = Depends(get_async_session), + ): + error_label = f"{oauth.provider_name}_oauth_denied" + + if error: + logger.warning("%s OAuth error: %s", oauth.provider_name, error) + space_id = None + if state: + try: + data = oauth._get_state_manager().validate_state(state) + space_id = data.get("space_id") + except Exception: + pass + return oauth._frontend_redirect(space_id, error=error_label) + + if not code: + raise HTTPException( + status_code=400, detail="Missing authorization code" + ) + if not state: + raise HTTPException( + status_code=400, detail="Missing state parameter" + ) + + state_mgr = oauth._get_state_manager() + try: + data = state_mgr.validate_state(state) + except Exception as e: + raise HTTPException( + status_code=400, detail=f"Invalid state parameter: {e!s}" + ) from e + + user_id = UUID(data["user_id"]) + space_id = data["space_id"] + + token_json = await oauth._exchange_code(code, data) + + access_token = token_json.get("access_token", "") + if not access_token: + raise HTTPException( + status_code=400, + detail=f"No access token received from {oauth.provider_name.title()}", + ) + + account_info = await oauth.fetch_account_info(access_token) + encryption = oauth._get_token_encryption() + connector_config = oauth.build_connector_config( + token_json, account_info, encryption + ) + + display_name = oauth.get_connector_display_name(account_info) + + # --- Re-auth path --- + reauth_connector_id = data.get("connector_id") + reauth_return_url = data.get("return_url") + + if reauth_connector_id: + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == reauth_connector_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.search_space_id == space_id, + SearchSourceConnector.connector_type == oauth.connector_type, + ) + ) + db_connector = result.scalars().first() + if not db_connector: + raise HTTPException( + status_code=404, + detail="Connector not found or access denied during re-auth", + ) + + db_connector.config = connector_config + flag_modified(db_connector, "config") + await session.commit() + await session.refresh(db_connector) + + logger.info( + "Re-authenticated %s connector %s for user %s", + oauth.provider_name, + db_connector.id, + user_id, + ) + if reauth_return_url and reauth_return_url.startswith("/"): + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}{reauth_return_url}" + ) + return oauth._frontend_redirect( + space_id, success=True, connector_id=db_connector.id + ) + + # --- New connector path --- + is_dup = await check_duplicate_connector( + session, + oauth.connector_type, + space_id, + user_id, + display_name, + ) + if is_dup: + logger.warning( + "Duplicate %s connector for user %s (%s)", + oauth.provider_name, + user_id, + display_name, + ) + return oauth._frontend_redirect( + space_id, + error=f"duplicate_account&connector={oauth.provider_name}-connector", + ) + + connector_name = await generate_unique_connector_name( + session, + oauth.connector_type, + space_id, + user_id, + display_name, + ) + + new_connector = SearchSourceConnector( + name=connector_name, + connector_type=oauth.connector_type, + is_indexable=oauth.is_indexable, + config=connector_config, + search_space_id=space_id, + user_id=user_id, + ) + session.add(new_connector) + + try: + await session.commit() + except IntegrityError as e: + await session.rollback() + raise HTTPException( + status_code=409, detail=f"Database integrity error: {e!s}" + ) from e + + logger.info( + "Created %s connector %s for user %s in space %s", + oauth.provider_name, + new_connector.id, + user_id, + space_id, + ) + return oauth._frontend_redirect( + space_id, success=True, connector_id=new_connector.id + ) + + return router From ea3bda9ec39120ba223a6ab7b783f24ca7889b69 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:33:10 +0200 Subject: [PATCH 12/57] delete Notion MCP services, tools, and route --- .../new_chat/tools/notion_mcp/__init__.py | 5 - .../new_chat/tools/notion_mcp/create_page.py | 205 -------- .../new_chat/tools/notion_mcp/delete_page.py | 173 ------- .../new_chat/tools/notion_mcp/update_page.py | 179 ------- .../app/routes/notion_mcp_connector_route.py | 486 ------------------ .../app/services/notion_mcp/__init__.py | 27 - .../app/services/notion_mcp/adapter.py | 253 --------- .../app/services/notion_mcp/oauth.py | 298 ----------- .../services/notion_mcp/response_parser.py | 212 -------- 9 files changed, 1838 deletions(-) delete mode 100644 surfsense_backend/app/agents/new_chat/tools/notion_mcp/__init__.py delete mode 100644 surfsense_backend/app/agents/new_chat/tools/notion_mcp/create_page.py delete mode 100644 surfsense_backend/app/agents/new_chat/tools/notion_mcp/delete_page.py delete mode 100644 surfsense_backend/app/agents/new_chat/tools/notion_mcp/update_page.py delete mode 100644 surfsense_backend/app/routes/notion_mcp_connector_route.py delete mode 100644 surfsense_backend/app/services/notion_mcp/__init__.py delete mode 100644 surfsense_backend/app/services/notion_mcp/adapter.py delete mode 100644 surfsense_backend/app/services/notion_mcp/oauth.py delete mode 100644 surfsense_backend/app/services/notion_mcp/response_parser.py diff --git a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/__init__.py b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/__init__.py deleted file mode 100644 index 1e1515bfb..000000000 --- a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -"""MCP-backed Notion tool factories. - -Drop-in replacements for ``tools/notion/`` that route through -Notion's hosted MCP server instead of direct API calls. -""" diff --git a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/create_page.py b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/create_page.py deleted file mode 100644 index a73363a65..000000000 --- a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/create_page.py +++ /dev/null @@ -1,205 +0,0 @@ -import logging -from typing import Any - -from langchain_core.tools import tool -from sqlalchemy.ext.asyncio import AsyncSession - -from app.agents.new_chat.tools.hitl import request_approval -from app.services.notion import NotionToolMetadataService - -logger = logging.getLogger(__name__) - - -def _find_mcp_connector(connectors): - """Return the first connector with mcp_mode enabled, or None.""" - for c in connectors: - if (c.config or {}).get("mcp_mode"): - return c - return None - - -def create_create_notion_page_mcp_tool( - db_session: AsyncSession | None = None, - search_space_id: int | None = None, - user_id: str | None = None, - connector_id: int | None = None, -): - @tool - async def create_notion_page( - title: str, - content: str | None = None, - ) -> dict[str, Any]: - """Create a new page in Notion with the given title and content. - - Use this tool when the user asks you to create, save, or publish - something to Notion. The page will be created in the user's - configured Notion workspace. The user MUST specify a topic before you - call this tool. If the request does not contain a topic (e.g. "create a - notion page"), ask what the page should be about. Never call this tool - without a clear topic from the user. - - Args: - title: The title of the Notion page. - content: Optional markdown content for the page body (supports headings, lists, paragraphs). - Generate this yourself based on the user's topic. - - Returns: - Dictionary with: - - status: "success", "rejected", or "error" - - page_id: Created page ID (if success) - - url: URL to the created page (if success) - - title: Page title (if success) - - message: Result message - - IMPORTANT: If status is "rejected", the user explicitly declined the action. - Respond with a brief acknowledgment (e.g., "Understood, I didn't create the page.") - and move on. Do NOT troubleshoot or suggest alternatives. - - Examples: - - "Create a Notion page about our Q2 roadmap" - - "Save a summary of today's discussion to Notion" - """ - logger.info("create_notion_page (MCP) called: title='%s'", title) - - if db_session is None or search_space_id is None or user_id is None: - logger.error("Notion MCP tool not properly configured - missing required parameters") - return { - "status": "error", - "message": "Notion tool not properly configured. Please contact support.", - } - - try: - metadata_service = NotionToolMetadataService(db_session) - context = await metadata_service.get_creation_context(search_space_id, user_id) - - if "error" in context: - logger.error("Failed to fetch creation context: %s", context["error"]) - return {"status": "error", "message": context["error"]} - - accounts = context.get("accounts", []) - if accounts and all(a.get("auth_expired") for a in accounts): - return { - "status": "auth_error", - "message": "All connected Notion accounts need re-authentication. Please re-authenticate in your connector settings.", - "connector_type": "notion", - } - - result = request_approval( - action_type="notion_page_creation", - tool_name="create_notion_page", - params={ - "title": title, - "content": content, - "parent_page_id": None, - "connector_id": connector_id, - }, - context=context, - ) - - if result.rejected: - logger.info("Notion page creation rejected by user") - return { - "status": "rejected", - "message": "User declined. Do not retry or suggest alternatives.", - } - - final_title = result.params.get("title", title) - final_content = result.params.get("content", content) - final_parent_page_id = result.params.get("parent_page_id") - final_connector_id = result.params.get("connector_id", connector_id) - - if not final_title or not final_title.strip(): - return { - "status": "error", - "message": "Page title cannot be empty. Please provide a valid title.", - } - - from sqlalchemy.future import select - - from app.db import SearchSourceConnector, SearchSourceConnectorType - - actual_connector_id = final_connector_id - if actual_connector_id is None: - query_result = await db_session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.search_space_id == search_space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, - ) - ) - connectors = query_result.scalars().all() - connector = _find_mcp_connector(connectors) - - if not connector: - return { - "status": "error", - "message": "No Notion MCP connector found. Please connect Notion (MCP) in your workspace settings.", - } - actual_connector_id = connector.id - else: - query_result = await db_session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == actual_connector_id, - SearchSourceConnector.search_space_id == search_space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, - ) - ) - connector = query_result.scalars().first() - if not connector: - return { - "status": "error", - "message": "Selected Notion account is invalid or has been disconnected.", - } - - from app.services.notion_mcp.adapter import NotionMCPAdapter - - adapter = NotionMCPAdapter(session=db_session, connector_id=actual_connector_id) - result = await adapter.create_page( - title=final_title, - content=final_content, - parent_page_id=final_parent_page_id, - ) - logger.info("create_page (MCP) result: %s - %s", result.get("status"), result.get("message", "")) - - if result.get("status") == "success": - kb_message_suffix = "" - try: - from app.services.notion import NotionKBSyncService - - kb_service = NotionKBSyncService(db_session) - kb_result = await kb_service.sync_after_create( - page_id=result.get("page_id"), - page_title=result.get("title", final_title), - page_url=result.get("url"), - content=final_content, - connector_id=actual_connector_id, - search_space_id=search_space_id, - user_id=user_id, - ) - if kb_result["status"] == "success": - kb_message_suffix = " Your knowledge base has also been updated." - else: - kb_message_suffix = " This page will be added to your knowledge base in the next scheduled sync." - except Exception as kb_err: - logger.warning("KB sync after create failed: %s", kb_err) - kb_message_suffix = " This page will be added to your knowledge base in the next scheduled sync." - - result["message"] = result.get("message", "") + kb_message_suffix - - return result - - except Exception as e: - from langgraph.errors import GraphInterrupt - - if isinstance(e, GraphInterrupt): - raise - - logger.error("Error creating Notion page (MCP): %s", e, exc_info=True) - if isinstance(e, ValueError): - message = str(e) - else: - message = "Something went wrong while creating the page. Please try again." - return {"status": "error", "message": message} - - return create_notion_page diff --git a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/delete_page.py b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/delete_page.py deleted file mode 100644 index c0cf7642b..000000000 --- a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/delete_page.py +++ /dev/null @@ -1,173 +0,0 @@ -import logging -from typing import Any - -from langchain_core.tools import tool -from sqlalchemy.ext.asyncio import AsyncSession - -from app.agents.new_chat.tools.hitl import request_approval -from app.services.notion.tool_metadata_service import NotionToolMetadataService - -logger = logging.getLogger(__name__) - - -def create_delete_notion_page_mcp_tool( - db_session: AsyncSession | None = None, - search_space_id: int | None = None, - user_id: str | None = None, - connector_id: int | None = None, -): - @tool - async def delete_notion_page( - page_title: str, - delete_from_kb: bool = False, - ) -> dict[str, Any]: - """Delete (archive) a Notion page. - - Use this tool when the user asks you to delete, remove, or archive - a Notion page. Note that Notion doesn't permanently delete pages, - it archives them (they can be restored from trash). - - Args: - page_title: The title of the Notion page to delete. - delete_from_kb: Whether to also remove the page from the knowledge base. - Default is False. - - Returns: - Dictionary with: - - status: "success", "rejected", "not_found", or "error" - - page_id: Deleted page ID (if success) - - message: Success or error message - - deleted_from_kb: Whether the page was also removed from knowledge base (if success) - - Examples: - - "Delete the 'Meeting Notes' Notion page" - - "Remove the 'Old Project Plan' Notion page" - """ - logger.info( - "delete_notion_page (MCP) called: page_title='%s', delete_from_kb=%s", - page_title, - delete_from_kb, - ) - - if db_session is None or search_space_id is None or user_id is None: - logger.error("Notion MCP tool not properly configured - missing required parameters") - return { - "status": "error", - "message": "Notion tool not properly configured. Please contact support.", - } - - try: - metadata_service = NotionToolMetadataService(db_session) - context = await metadata_service.get_delete_context(search_space_id, user_id, page_title) - - if "error" in context: - error_msg = context["error"] - if "not found" in error_msg.lower(): - return {"status": "not_found", "message": error_msg} - return {"status": "error", "message": error_msg} - - account = context.get("account", {}) - if account.get("auth_expired"): - return { - "status": "auth_error", - "message": "The Notion account for this page needs re-authentication. Please re-authenticate in your connector settings.", - } - - page_id = context.get("page_id") - connector_id_from_context = account.get("id") - document_id = context.get("document_id") - - result = request_approval( - action_type="notion_page_deletion", - tool_name="delete_notion_page", - params={ - "page_id": page_id, - "connector_id": connector_id_from_context, - "delete_from_kb": delete_from_kb, - }, - context=context, - ) - - if result.rejected: - logger.info("Notion page deletion rejected by user") - return { - "status": "rejected", - "message": "User declined. Do not retry or suggest alternatives.", - } - - final_page_id = result.params.get("page_id", page_id) - final_connector_id = result.params.get("connector_id", connector_id_from_context) - final_delete_from_kb = result.params.get("delete_from_kb", delete_from_kb) - - from sqlalchemy.future import select - - from app.db import SearchSourceConnector, SearchSourceConnectorType - - if final_connector_id: - query_result = await db_session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == final_connector_id, - SearchSourceConnector.search_space_id == search_space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, - ) - ) - connector = query_result.scalars().first() - if not connector: - return { - "status": "error", - "message": "Selected Notion account is invalid or has been disconnected.", - } - actual_connector_id = connector.id - else: - return {"status": "error", "message": "No connector found for this page."} - - from app.services.notion_mcp.adapter import NotionMCPAdapter - - adapter = NotionMCPAdapter(session=db_session, connector_id=actual_connector_id) - result = await adapter.delete_page(page_id=final_page_id) - logger.info("delete_page (MCP) result: %s - %s", result.get("status"), result.get("message", "")) - - deleted_from_kb = False - if result.get("status") == "success" and final_delete_from_kb and document_id: - try: - from sqlalchemy.future import select - - from app.db import Document - - doc_result = await db_session.execute( - select(Document).filter(Document.id == document_id) - ) - document = doc_result.scalars().first() - - if document: - await db_session.delete(document) - await db_session.commit() - deleted_from_kb = True - logger.info("Deleted document %s from knowledge base", document_id) - except Exception as e: - logger.error("Failed to delete document from KB: %s", e) - await db_session.rollback() - result["warning"] = f"Page deleted from Notion, but failed to remove from knowledge base: {e!s}" - - if result.get("status") == "success": - result["deleted_from_kb"] = deleted_from_kb - if deleted_from_kb: - result["message"] = f"{result.get('message', '')} (also removed from knowledge base)" - - return result - - except Exception as e: - from langgraph.errors import GraphInterrupt - - if isinstance(e, GraphInterrupt): - raise - - logger.error("Error deleting Notion page (MCP): %s", e, exc_info=True) - if isinstance(e, ValueError): - message = str(e) - else: - message = "Something went wrong while deleting the page. Please try again." - return {"status": "error", "message": message} - - return delete_notion_page diff --git a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/update_page.py b/surfsense_backend/app/agents/new_chat/tools/notion_mcp/update_page.py deleted file mode 100644 index 28599cbae..000000000 --- a/surfsense_backend/app/agents/new_chat/tools/notion_mcp/update_page.py +++ /dev/null @@ -1,179 +0,0 @@ -import logging -from typing import Any - -from langchain_core.tools import tool -from sqlalchemy.ext.asyncio import AsyncSession - -from app.agents.new_chat.tools.hitl import request_approval -from app.services.notion import NotionToolMetadataService - -logger = logging.getLogger(__name__) - - -def create_update_notion_page_mcp_tool( - db_session: AsyncSession | None = None, - search_space_id: int | None = None, - user_id: str | None = None, - connector_id: int | None = None, -): - @tool - async def update_notion_page( - page_title: str, - content: str | None = None, - ) -> dict[str, Any]: - """Update an existing Notion page by appending new content. - - Use this tool when the user asks you to add content to, modify, or update - a Notion page. The new content will be appended to the existing page content. - The user MUST specify what to add before you call this tool. If the - request is vague, ask what content they want added. - - Args: - page_title: The title of the Notion page to update. - content: Optional markdown content to append to the page body (supports headings, lists, paragraphs). - Generate this yourself based on the user's request. - - Returns: - Dictionary with: - - status: "success", "rejected", "not_found", or "error" - - page_id: Updated page ID (if success) - - url: URL to the updated page (if success) - - title: Current page title (if success) - - message: Result message - - IMPORTANT: - - If status is "rejected", the user explicitly declined the action. - Respond with a brief acknowledgment (e.g., "Understood, I didn't update the page.") - and move on. Do NOT ask for alternatives or troubleshoot. - - If status is "not_found", inform the user conversationally using the exact message provided. - - Examples: - - "Add today's meeting notes to the 'Meeting Notes' Notion page" - - "Update the 'Project Plan' page with a status update on phase 1" - """ - logger.info( - "update_notion_page (MCP) called: page_title='%s', content_length=%d", - page_title, - len(content) if content else 0, - ) - - if db_session is None or search_space_id is None or user_id is None: - logger.error("Notion MCP tool not properly configured - missing required parameters") - return { - "status": "error", - "message": "Notion tool not properly configured. Please contact support.", - } - - if not content or not content.strip(): - return { - "status": "error", - "message": "Content is required to update the page. Please provide the actual content you want to add.", - } - - try: - metadata_service = NotionToolMetadataService(db_session) - context = await metadata_service.get_update_context(search_space_id, user_id, page_title) - - if "error" in context: - error_msg = context["error"] - if "not found" in error_msg.lower(): - return {"status": "not_found", "message": error_msg} - return {"status": "error", "message": error_msg} - - account = context.get("account", {}) - if account.get("auth_expired"): - return { - "status": "auth_error", - "message": "The Notion account for this page needs re-authentication. Please re-authenticate in your connector settings.", - } - - page_id = context.get("page_id") - document_id = context.get("document_id") - connector_id_from_context = account.get("id") - - result = request_approval( - action_type="notion_page_update", - tool_name="update_notion_page", - params={ - "page_id": page_id, - "content": content, - "connector_id": connector_id_from_context, - }, - context=context, - ) - - if result.rejected: - logger.info("Notion page update rejected by user") - return { - "status": "rejected", - "message": "User declined. Do not retry or suggest alternatives.", - } - - final_page_id = result.params.get("page_id", page_id) - final_content = result.params.get("content", content) - final_connector_id = result.params.get("connector_id", connector_id_from_context) - - from sqlalchemy.future import select - - from app.db import SearchSourceConnector, SearchSourceConnectorType - - if final_connector_id: - query_result = await db_session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == final_connector_id, - SearchSourceConnector.search_space_id == search_space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, - ) - ) - connector = query_result.scalars().first() - if not connector: - return { - "status": "error", - "message": "Selected Notion account is invalid or has been disconnected.", - } - actual_connector_id = connector.id - else: - return {"status": "error", "message": "No connector found for this page."} - - from app.services.notion_mcp.adapter import NotionMCPAdapter - - adapter = NotionMCPAdapter(session=db_session, connector_id=actual_connector_id) - result = await adapter.update_page(page_id=final_page_id, content=final_content) - logger.info("update_page (MCP) result: %s - %s", result.get("status"), result.get("message", "")) - - if result.get("status") == "success" and document_id is not None: - from app.services.notion import NotionKBSyncService - - kb_service = NotionKBSyncService(db_session) - kb_result = await kb_service.sync_after_update( - document_id=document_id, - appended_content=final_content, - user_id=user_id, - search_space_id=search_space_id, - appended_block_ids=result.get("appended_block_ids"), - ) - - if kb_result["status"] == "success": - result["message"] = f"{result['message']}. Your knowledge base has also been updated." - elif kb_result["status"] == "not_indexed": - result["message"] = f"{result['message']}. This page will be added to your knowledge base in the next scheduled sync." - else: - result["message"] = f"{result['message']}. Your knowledge base will be updated in the next scheduled sync." - - return result - - except Exception as e: - from langgraph.errors import GraphInterrupt - - if isinstance(e, GraphInterrupt): - raise - - logger.error("Error updating Notion page (MCP): %s", e, exc_info=True) - if isinstance(e, ValueError): - message = str(e) - else: - message = "Something went wrong while updating the page. Please try again." - return {"status": "error", "message": message} - - return update_notion_page diff --git a/surfsense_backend/app/routes/notion_mcp_connector_route.py b/surfsense_backend/app/routes/notion_mcp_connector_route.py deleted file mode 100644 index b9305cd74..000000000 --- a/surfsense_backend/app/routes/notion_mcp_connector_route.py +++ /dev/null @@ -1,486 +0,0 @@ -"""Notion MCP Connector OAuth Routes. - -Handles OAuth 2.0 + PKCE authentication for Notion's hosted MCP server. -Based on: https://developers.notion.com/guides/mcp/build-mcp-client - -This creates connectors with the same ``NOTION_CONNECTOR`` type as the -existing direct-API connector, but with ``mcp_mode: True`` in the config -so the adapter layer knows to route through MCP. -""" - -import logging -from uuid import UUID - -from fastapi import APIRouter, Depends, HTTPException, Request -from fastapi.responses import RedirectResponse -from sqlalchemy import select -from sqlalchemy.exc import IntegrityError -from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.orm.attributes import flag_modified - -from app.config import config -from app.db import ( - SearchSourceConnector, - SearchSourceConnectorType, - User, - get_async_session, -) -from app.services.notion_mcp.oauth import ( - ClientCredentials, - OAuthMetadata, - build_authorization_url, - discover_oauth_metadata, - exchange_code_for_tokens, - refresh_access_token, - register_client, -) -from app.users import current_active_user -from app.utils.connector_naming import ( - check_duplicate_connector, - extract_identifier_from_credentials, - generate_unique_connector_name, -) -from app.utils.oauth_security import OAuthStateManager, TokenEncryption, generate_pkce_pair - -logger = logging.getLogger(__name__) - -router = APIRouter() - -_state_manager: OAuthStateManager | None = None -_token_encryption: TokenEncryption | None = None -_oauth_metadata: OAuthMetadata | None = None - - -def _get_state_manager() -> OAuthStateManager: - global _state_manager - if _state_manager is None: - if not config.SECRET_KEY: - raise ValueError("SECRET_KEY must be set for OAuth security") - _state_manager = OAuthStateManager(config.SECRET_KEY) - return _state_manager - - -def _get_token_encryption() -> TokenEncryption: - global _token_encryption - if _token_encryption is None: - if not config.SECRET_KEY: - raise ValueError("SECRET_KEY must be set for token encryption") - _token_encryption = TokenEncryption(config.SECRET_KEY) - return _token_encryption - - -async def _get_oauth_metadata() -> OAuthMetadata: - global _oauth_metadata - if _oauth_metadata is None: - _oauth_metadata = await discover_oauth_metadata() - return _oauth_metadata - - -async def _fetch_workspace_info(access_token: str) -> dict: - """Fetch workspace metadata using the Notion API with the fresh token. - - The ``/v1/users/me`` endpoint returns bot info including workspace_name. - This populates connector config fields so naming and metadata services - work correctly. - """ - try: - import httpx - - async with httpx.AsyncClient(timeout=15.0) as client: - resp = await client.get( - "https://api.notion.com/v1/users/me", - headers={ - "Authorization": f"Bearer {access_token}", - "Notion-Version": "2022-06-28", - }, - ) - if resp.is_success: - data = resp.json() - bot_info = data.get("bot", {}) - return { - "bot_id": data.get("id"), - "workspace_name": bot_info.get("workspace_name", "Notion Workspace"), - "workspace_icon": data.get("avatar_url") or "📄", - } - except Exception as e: - logger.warning("Failed to fetch workspace info: %s", e) - return {} - - -NOTION_MCP_REDIRECT_URI = None - - -def _get_redirect_uri() -> str: - global NOTION_MCP_REDIRECT_URI - if NOTION_MCP_REDIRECT_URI is None: - backend = config.BACKEND_URL or "http://localhost:8000" - NOTION_MCP_REDIRECT_URI = f"{backend}/api/v1/auth/notion-mcp/connector/callback" - return NOTION_MCP_REDIRECT_URI - - -# --------------------------------------------------------------------------- -# Route: initiate OAuth -# --------------------------------------------------------------------------- - - -@router.get("/auth/notion-mcp/connector/add") -async def connect_notion_mcp( - space_id: int, - user: User = Depends(current_active_user), -): - """Initiate Notion MCP OAuth + PKCE flow.""" - if not config.SECRET_KEY: - raise HTTPException(status_code=500, detail="SECRET_KEY not configured.") - - try: - metadata = await _get_oauth_metadata() - - redirect_uri = _get_redirect_uri() - credentials = await register_client(metadata, redirect_uri) - - code_verifier, code_challenge = generate_pkce_pair() - - state_manager = _get_state_manager() - state_encoded = state_manager.generate_secure_state( - space_id, - user.id, - code_verifier=code_verifier, - mcp_client_id=credentials.client_id, - mcp_client_secret=credentials.client_secret or "", - ) - - auth_url = build_authorization_url( - metadata=metadata, - client_id=credentials.client_id, - redirect_uri=redirect_uri, - code_challenge=code_challenge, - state=state_encoded, - ) - - logger.info("Generated Notion MCP OAuth URL for user %s, space %s", user.id, space_id) - return {"auth_url": auth_url} - - except Exception as e: - logger.error("Failed to initiate Notion MCP OAuth: %s", e, exc_info=True) - raise HTTPException( - status_code=500, detail=f"Failed to initiate Notion MCP OAuth: {e!s}" - ) from e - - -# --------------------------------------------------------------------------- -# Route: re-authenticate existing connector -# --------------------------------------------------------------------------- - - -@router.get("/auth/notion-mcp/connector/reauth") -async def reauth_notion_mcp( - space_id: int, - connector_id: int, - return_url: str | None = None, - user: User = Depends(current_active_user), - session: AsyncSession = Depends(get_async_session), -): - """Initiate re-authentication for an existing Notion MCP connector.""" - result = await session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == connector_id, - SearchSourceConnector.user_id == user.id, - SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, - ) - ) - connector = result.scalars().first() - if not connector: - raise HTTPException(status_code=404, detail="Connector not found or access denied") - - if not config.SECRET_KEY: - raise HTTPException(status_code=500, detail="SECRET_KEY not configured.") - - try: - metadata = await _get_oauth_metadata() - redirect_uri = _get_redirect_uri() - credentials = await register_client(metadata, redirect_uri) - - code_verifier, code_challenge = generate_pkce_pair() - - extra: dict = { - "connector_id": connector_id, - "code_verifier": code_verifier, - "mcp_client_id": credentials.client_id, - "mcp_client_secret": credentials.client_secret or "", - } - if return_url and return_url.startswith("/"): - extra["return_url"] = return_url - - state_manager = _get_state_manager() - state_encoded = state_manager.generate_secure_state(space_id, user.id, **extra) - - auth_url = build_authorization_url( - metadata=metadata, - client_id=credentials.client_id, - redirect_uri=redirect_uri, - code_challenge=code_challenge, - state=state_encoded, - ) - - logger.info("Initiating Notion MCP re-auth for user %s, connector %s", user.id, connector_id) - return {"auth_url": auth_url} - - except HTTPException: - raise - except Exception as e: - logger.error("Failed to initiate Notion MCP re-auth: %s", e, exc_info=True) - raise HTTPException( - status_code=500, detail=f"Failed to initiate Notion MCP re-auth: {e!s}" - ) from e - - -# --------------------------------------------------------------------------- -# Route: OAuth callback -# --------------------------------------------------------------------------- - - -@router.get("/auth/notion-mcp/connector/callback") -async def notion_mcp_callback( - request: Request, - code: str | None = None, - error: str | None = None, - state: str | None = None, - session: AsyncSession = Depends(get_async_session), -): - """Handle the OAuth callback from Notion's MCP authorization server.""" - if error: - logger.warning("Notion MCP OAuth error: %s", error) - space_id = None - if state: - try: - data = _get_state_manager().validate_state(state) - space_id = data.get("space_id") - except Exception: - pass - if space_id: - return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?error=notion_mcp_oauth_denied" - ) - return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=notion_mcp_oauth_denied" - ) - - if not code: - raise HTTPException(status_code=400, detail="Missing authorization code") - if not state: - raise HTTPException(status_code=400, detail="Missing state parameter") - - state_manager = _get_state_manager() - try: - data = state_manager.validate_state(state) - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=400, detail=f"Invalid state: {e!s}") from e - - user_id = UUID(data["user_id"]) - space_id = data["space_id"] - code_verifier = data.get("code_verifier") - mcp_client_id = data.get("mcp_client_id") - mcp_client_secret = data.get("mcp_client_secret") or None - - if not code_verifier or not mcp_client_id: - raise HTTPException(status_code=400, detail="Missing PKCE or client data in state") - - try: - metadata = await _get_oauth_metadata() - redirect_uri = _get_redirect_uri() - - token_set = await exchange_code_for_tokens( - code=code, - code_verifier=code_verifier, - metadata=metadata, - client_id=mcp_client_id, - redirect_uri=redirect_uri, - client_secret=mcp_client_secret, - ) - except Exception as e: - logger.error("Notion MCP token exchange failed: %s", e, exc_info=True) - raise HTTPException(status_code=400, detail=f"Token exchange failed: {e!s}") from e - - token_encryption = _get_token_encryption() - - workspace_info = await _fetch_workspace_info(token_set.access_token) - - connector_config = { - "access_token": token_encryption.encrypt_token(token_set.access_token), - "refresh_token": token_encryption.encrypt_token(token_set.refresh_token) - if token_set.refresh_token - else None, - "expires_in": token_set.expires_in, - "expires_at": token_set.expires_at.isoformat() if token_set.expires_at else None, - "workspace_id": workspace_info.get("workspace_id"), - "workspace_name": workspace_info.get("workspace_name", "Notion Workspace"), - "workspace_icon": workspace_info.get("workspace_icon", "📄"), - "bot_id": workspace_info.get("bot_id"), - "mcp_mode": True, - "mcp_client_id": mcp_client_id, - "mcp_client_secret": token_encryption.encrypt_token(mcp_client_secret) - if mcp_client_secret - else None, - "_token_encrypted": True, - } - - reauth_connector_id = data.get("connector_id") - reauth_return_url = data.get("return_url") - - # --- Re-auth path --- - if reauth_connector_id: - result = await session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == reauth_connector_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, - ) - ) - db_connector = result.scalars().first() - if not db_connector: - raise HTTPException(status_code=404, detail="Connector not found during re-auth") - - db_connector.config = connector_config - flag_modified(db_connector, "config") - await session.commit() - await session.refresh(db_connector) - - logger.info("Re-authenticated Notion MCP connector %s for user %s", db_connector.id, user_id) - if reauth_return_url and reauth_return_url.startswith("/"): - return RedirectResponse(url=f"{config.NEXT_FRONTEND_URL}{reauth_return_url}") - return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?success=true&connector=notion-connector&connectorId={db_connector.id}" - ) - - # --- New connector path --- - connector_identifier = extract_identifier_from_credentials( - SearchSourceConnectorType.NOTION_CONNECTOR, connector_config - ) - - is_duplicate = await check_duplicate_connector( - session, - SearchSourceConnectorType.NOTION_CONNECTOR, - space_id, - user_id, - connector_identifier, - ) - if is_duplicate: - logger.warning("Duplicate Notion MCP connector for user %s", user_id) - return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?error=duplicate_account&connector=notion-connector" - ) - - connector_name = await generate_unique_connector_name( - session, - SearchSourceConnectorType.NOTION_CONNECTOR, - space_id, - user_id, - connector_identifier, - ) - - new_connector = SearchSourceConnector( - name=connector_name, - connector_type=SearchSourceConnectorType.NOTION_CONNECTOR, - is_indexable=True, - config=connector_config, - search_space_id=space_id, - user_id=user_id, - ) - session.add(new_connector) - - try: - await session.commit() - logger.info("Created Notion MCP connector for user %s in space %s", user_id, space_id) - return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?success=true&connector=notion-connector&connectorId={new_connector.id}" - ) - except IntegrityError as e: - await session.rollback() - raise HTTPException(status_code=409, detail=f"Database integrity error: {e!s}") from e - except Exception as e: - await session.rollback() - raise HTTPException( - status_code=500, detail=f"Failed to create connector: {e!s}" - ) from e - - -# --------------------------------------------------------------------------- -# Token refresh helper (used by the adapter) -# --------------------------------------------------------------------------- - - -async def refresh_notion_mcp_token( - session: AsyncSession, - connector: SearchSourceConnector, -) -> SearchSourceConnector: - """Refresh the MCP access token for a connector. - - Handles refresh-token rotation: persists both new access_token - and new refresh_token atomically. - """ - token_encryption = _get_token_encryption() - - cfg = connector.config or {} - encrypted_refresh = cfg.get("refresh_token") - if not encrypted_refresh: - raise HTTPException(status_code=400, detail="No refresh token available. Please re-authenticate.") - - try: - refresh_token = token_encryption.decrypt_token(encrypted_refresh) - except Exception as e: - raise HTTPException(status_code=500, detail=f"Failed to decrypt refresh token: {e!s}") from e - - mcp_client_id = cfg.get("mcp_client_id") - mcp_client_secret_encrypted = cfg.get("mcp_client_secret") - mcp_client_secret = ( - token_encryption.decrypt_token(mcp_client_secret_encrypted) - if mcp_client_secret_encrypted - else None - ) - - if not mcp_client_id: - raise HTTPException(status_code=400, detail="Missing MCP client_id. Please re-authenticate.") - - metadata = await _get_oauth_metadata() - - try: - token_set = await refresh_access_token( - refresh_token=refresh_token, - metadata=metadata, - client_id=mcp_client_id, - client_secret=mcp_client_secret, - ) - except ValueError as e: - if "REAUTH_REQUIRED" in str(e): - connector.config = {**connector.config, "auth_expired": True} - flag_modified(connector, "config") - await session.commit() - await session.refresh(connector) - raise HTTPException( - status_code=401, detail="Notion MCP authentication expired. Please re-authenticate." - ) from e - raise HTTPException(status_code=400, detail=f"Token refresh failed: {e!s}") from e - - updated_config = { - **connector.config, - "access_token": token_encryption.encrypt_token(token_set.access_token), - "refresh_token": token_encryption.encrypt_token(token_set.refresh_token) - if token_set.refresh_token - else connector.config.get("refresh_token"), - "expires_in": token_set.expires_in, - "expires_at": token_set.expires_at.isoformat() if token_set.expires_at else None, - "_token_encrypted": True, - } - updated_config.pop("auth_expired", None) - - connector.config = updated_config - flag_modified(connector, "config") - await session.commit() - await session.refresh(connector) - - logger.info("Refreshed Notion MCP token for connector %s", connector.id) - return connector diff --git a/surfsense_backend/app/services/notion_mcp/__init__.py b/surfsense_backend/app/services/notion_mcp/__init__.py deleted file mode 100644 index 6a57500b6..000000000 --- a/surfsense_backend/app/services/notion_mcp/__init__.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Notion MCP integration. - -Routes Notion operations through Notion's hosted MCP server -at https://mcp.notion.com/mcp instead of direct API calls. -""" - -from sqlalchemy import select -from sqlalchemy.ext.asyncio import AsyncSession - -from app.db import SearchSourceConnector, SearchSourceConnectorType - - -async def has_mcp_notion_connector( - session: AsyncSession, - search_space_id: int, -) -> bool: - """Check whether the search space has at least one MCP-mode Notion connector.""" - result = await session.execute( - select(SearchSourceConnector.id, SearchSourceConnector.config).filter( - SearchSourceConnector.search_space_id == search_space_id, - SearchSourceConnector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR, - ) - ) - for _, config in result.all(): - if isinstance(config, dict) and config.get("mcp_mode"): - return True - return False diff --git a/surfsense_backend/app/services/notion_mcp/adapter.py b/surfsense_backend/app/services/notion_mcp/adapter.py deleted file mode 100644 index 76eac6305..000000000 --- a/surfsense_backend/app/services/notion_mcp/adapter.py +++ /dev/null @@ -1,253 +0,0 @@ -"""Notion MCP Adapter. - -Connects to Notion's hosted MCP server at ``https://mcp.notion.com/mcp`` -and exposes the same method signatures as ``NotionHistoryConnector``'s -write operations so that tool factories can swap with a one-line change. - -Includes an optional fallback to ``NotionHistoryConnector`` when the MCP -server returns known serialization errors (GitHub issues #215, #216). -""" - -import logging -from datetime import UTC, datetime -from typing import Any - -from mcp import ClientSession -from mcp.client.streamable_http import streamablehttp_client -from sqlalchemy import select -from sqlalchemy.ext.asyncio import AsyncSession - -from app.config import config -from app.db import SearchSourceConnector -from app.schemas.notion_auth_credentials import NotionAuthCredentialsBase -from app.utils.oauth_security import TokenEncryption - -from .response_parser import ( - extract_text_from_mcp_response, - is_mcp_serialization_error, - parse_create_page_response, - parse_delete_page_response, - parse_fetch_page_response, - parse_health_check_response, - parse_update_page_response, -) - -logger = logging.getLogger(__name__) - -NOTION_MCP_URL = "https://mcp.notion.com/mcp" - - -class NotionMCPAdapter: - """Routes Notion operations through the hosted MCP server. - - Drop-in replacement for ``NotionHistoryConnector`` write methods. - Returns the same dict structure so KB sync works unchanged. - """ - - def __init__(self, session: AsyncSession, connector_id: int): - self._session = session - self._connector_id = connector_id - self._access_token: str | None = None - - async def _get_valid_token(self) -> str: - """Get a valid MCP access token, refreshing if expired.""" - result = await self._session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == self._connector_id - ) - ) - connector = result.scalars().first() - if not connector: - raise ValueError(f"Connector {self._connector_id} not found") - - cfg = connector.config or {} - - if not cfg.get("mcp_mode"): - raise ValueError( - f"Connector {self._connector_id} is not an MCP connector" - ) - - access_token = cfg.get("access_token") - if not access_token: - raise ValueError("No access token in MCP connector config") - - is_encrypted = cfg.get("_token_encrypted", False) - if is_encrypted and config.SECRET_KEY: - token_encryption = TokenEncryption(config.SECRET_KEY) - access_token = token_encryption.decrypt_token(access_token) - - expires_at_str = cfg.get("expires_at") - if expires_at_str: - expires_at = datetime.fromisoformat(expires_at_str) - if expires_at.tzinfo is None: - expires_at = expires_at.replace(tzinfo=UTC) - if expires_at <= datetime.now(UTC): - from app.routes.notion_mcp_connector_route import refresh_notion_mcp_token - - connector = await refresh_notion_mcp_token(self._session, connector) - cfg = connector.config or {} - access_token = cfg.get("access_token", "") - if is_encrypted and config.SECRET_KEY: - token_encryption = TokenEncryption(config.SECRET_KEY) - access_token = token_encryption.decrypt_token(access_token) - - self._access_token = access_token - return access_token - - async def _call_mcp_tool( - self, tool_name: str, arguments: dict[str, Any] - ) -> str: - """Connect to Notion MCP server and call a tool. Returns raw text.""" - token = await self._get_valid_token() - headers = {"Authorization": f"Bearer {token}"} - - async with ( - streamablehttp_client(NOTION_MCP_URL, headers=headers) as (read, write, _), - ClientSession(read, write) as session, - ): - await session.initialize() - response = await session.call_tool(tool_name, arguments=arguments) - return extract_text_from_mcp_response(response) - - async def _call_with_fallback( - self, - tool_name: str, - arguments: dict[str, Any], - parser, - fallback_method: str | None = None, - fallback_kwargs: dict[str, Any] | None = None, - ) -> dict[str, Any]: - """Call MCP tool, parse response, and fall back on serialization errors.""" - try: - raw_text = await self._call_mcp_tool(tool_name, arguments) - result = parser(raw_text) - - if result.get("mcp_serialization_error") and fallback_method: - logger.warning( - "MCP tool '%s' hit serialization bug, falling back to direct API", - tool_name, - ) - return await self._fallback(fallback_method, fallback_kwargs or {}) - - return result - - except Exception as e: - error_str = str(e) - if is_mcp_serialization_error(error_str) and fallback_method: - logger.warning( - "MCP tool '%s' raised serialization error, falling back: %s", - tool_name, - error_str, - ) - return await self._fallback(fallback_method, fallback_kwargs or {}) - - logger.error("MCP tool '%s' failed: %s", tool_name, e, exc_info=True) - return {"status": "error", "message": f"MCP call failed: {e!s}"} - - async def _fallback( - self, method_name: str, kwargs: dict[str, Any] - ) -> dict[str, Any]: - """Fall back to NotionHistoryConnector for the given method. - - Uses the already-refreshed MCP access token directly with the - Notion SDK, bypassing the connector's config-based token loading. - """ - from app.connectors.notion_history import NotionHistoryConnector - from app.schemas.notion_auth_credentials import NotionAuthCredentialsBase - - token = self._access_token - if not token: - token = await self._get_valid_token() - - connector = NotionHistoryConnector( - session=self._session, - connector_id=self._connector_id, - ) - connector._credentials = NotionAuthCredentialsBase(access_token=token) - connector._using_legacy_token = True - - method = getattr(connector, method_name) - return await method(**kwargs) - - # ------------------------------------------------------------------ - # Public API — same signatures as NotionHistoryConnector - # ------------------------------------------------------------------ - - async def create_page( - self, - title: str, - content: str, - parent_page_id: str | None = None, - ) -> dict[str, Any]: - arguments: dict[str, Any] = { - "pages": [ - { - "title": title, - "content": content, - } - ] - } - if parent_page_id: - arguments["pages"][0]["parent_page_url"] = parent_page_id - - return await self._call_with_fallback( - tool_name="notion-create-pages", - arguments=arguments, - parser=parse_create_page_response, - fallback_method="create_page", - fallback_kwargs={ - "title": title, - "content": content, - "parent_page_id": parent_page_id, - }, - ) - - async def update_page( - self, - page_id: str, - content: str | None = None, - ) -> dict[str, Any]: - arguments: dict[str, Any] = { - "page_id": page_id, - "command": "replace_content", - } - if content: - arguments["new_str"] = content - - return await self._call_with_fallback( - tool_name="notion-update-page", - arguments=arguments, - parser=parse_update_page_response, - fallback_method="update_page", - fallback_kwargs={"page_id": page_id, "content": content}, - ) - - async def delete_page(self, page_id: str) -> dict[str, Any]: - arguments: dict[str, Any] = { - "page_id": page_id, - "command": "update_properties", - "archived": True, - } - - return await self._call_with_fallback( - tool_name="notion-update-page", - arguments=arguments, - parser=parse_delete_page_response, - fallback_method="delete_page", - fallback_kwargs={"page_id": page_id}, - ) - - async def fetch_page(self, page_url_or_id: str) -> dict[str, Any]: - """Fetch page content via ``notion-fetch``.""" - raw_text = await self._call_mcp_tool( - "notion-fetch", {"url": page_url_or_id} - ) - return parse_fetch_page_response(raw_text) - - async def health_check(self) -> dict[str, Any]: - """Check MCP connection via ``notion-get-self``.""" - try: - raw_text = await self._call_mcp_tool("notion-get-self", {}) - return parse_health_check_response(raw_text) - except Exception as e: - return {"status": "error", "message": str(e)} diff --git a/surfsense_backend/app/services/notion_mcp/oauth.py b/surfsense_backend/app/services/notion_mcp/oauth.py deleted file mode 100644 index cfa6ad3e0..000000000 --- a/surfsense_backend/app/services/notion_mcp/oauth.py +++ /dev/null @@ -1,298 +0,0 @@ -"""OAuth 2.0 + PKCE utilities for Notion's remote MCP server. - -Implements the flow described in the official guide: -https://developers.notion.com/guides/mcp/build-mcp-client - -Steps: - 1. Discover OAuth metadata (RFC 9470 → RFC 8414) - 2. Dynamic client registration (RFC 7591) - 3. Build authorization URL with PKCE code_challenge - 4. Exchange authorization code + code_verifier for tokens - 5. Refresh access tokens (with refresh-token rotation) - -All functions are stateless — callers (route handlers) manage storage. -""" - -import logging -from dataclasses import dataclass -from datetime import UTC, datetime, timedelta -from typing import Any - -import httpx - -logger = logging.getLogger(__name__) - -NOTION_MCP_SERVER_URL = "https://mcp.notion.com/mcp" -_HTTP_TIMEOUT = 30.0 - - -@dataclass(frozen=True) -class OAuthMetadata: - issuer: str - authorization_endpoint: str - token_endpoint: str - registration_endpoint: str | None - code_challenge_methods_supported: list[str] - - -@dataclass(frozen=True) -class ClientCredentials: - client_id: str - client_secret: str | None = None - client_id_issued_at: int | None = None - client_secret_expires_at: int | None = None - - -@dataclass(frozen=True) -class TokenSet: - access_token: str - refresh_token: str | None - token_type: str - expires_in: int | None - expires_at: datetime | None - scope: str | None - - -# --------------------------------------------------------------------------- -# Step 1 — OAuth discovery -# --------------------------------------------------------------------------- - - -async def discover_oauth_metadata( - mcp_server_url: str = NOTION_MCP_SERVER_URL, -) -> OAuthMetadata: - """Discover OAuth endpoints via RFC 9470 + RFC 8414. - - 1. Fetch protected-resource metadata to find the authorization server. - 2. Fetch authorization-server metadata to get OAuth endpoints. - """ - from urllib.parse import urlparse - - parsed = urlparse(mcp_server_url) - origin = f"{parsed.scheme}://{parsed.netloc}" - path = parsed.path.rstrip("/") - - async with httpx.AsyncClient(timeout=_HTTP_TIMEOUT) as client: - # RFC 9470 — Protected Resource Metadata - # URL format: {origin}/.well-known/oauth-protected-resource{path} - pr_url = f"{origin}/.well-known/oauth-protected-resource{path}" - pr_resp = await client.get(pr_url) - pr_resp.raise_for_status() - pr_data = pr_resp.json() - - auth_servers = pr_data.get("authorization_servers", []) - if not auth_servers: - raise ValueError("No authorization_servers in protected resource metadata") - auth_server_url = auth_servers[0] - - # RFC 8414 — Authorization Server Metadata - as_url = f"{auth_server_url}/.well-known/oauth-authorization-server" - as_resp = await client.get(as_url) - as_resp.raise_for_status() - as_data = as_resp.json() - - if not as_data.get("authorization_endpoint") or not as_data.get("token_endpoint"): - raise ValueError("Missing required OAuth endpoints in server metadata") - - return OAuthMetadata( - issuer=as_data.get("issuer", auth_server_url), - authorization_endpoint=as_data["authorization_endpoint"], - token_endpoint=as_data["token_endpoint"], - registration_endpoint=as_data.get("registration_endpoint"), - code_challenge_methods_supported=as_data.get( - "code_challenge_methods_supported", [] - ), - ) - - -# --------------------------------------------------------------------------- -# Step 2 — Dynamic client registration (RFC 7591) -# --------------------------------------------------------------------------- - - -async def register_client( - metadata: OAuthMetadata, - redirect_uri: str, - client_name: str = "SurfSense", -) -> ClientCredentials: - """Dynamically register an OAuth client with the Notion MCP server.""" - if not metadata.registration_endpoint: - raise ValueError("Server does not support dynamic client registration") - - payload = { - "client_name": client_name, - "redirect_uris": [redirect_uri], - "grant_types": ["authorization_code", "refresh_token"], - "response_types": ["code"], - "token_endpoint_auth_method": "none", - } - - async with httpx.AsyncClient(timeout=_HTTP_TIMEOUT) as client: - resp = await client.post( - metadata.registration_endpoint, - json=payload, - headers={"Content-Type": "application/json", "Accept": "application/json"}, - ) - if not resp.is_success: - logger.error( - "Dynamic client registration failed (%s): %s", - resp.status_code, - resp.text, - ) - resp.raise_for_status() - data = resp.json() - - return ClientCredentials( - client_id=data["client_id"], - client_secret=data.get("client_secret"), - client_id_issued_at=data.get("client_id_issued_at"), - client_secret_expires_at=data.get("client_secret_expires_at"), - ) - - -# --------------------------------------------------------------------------- -# Step 3 — Build authorization URL -# --------------------------------------------------------------------------- - - -def build_authorization_url( - metadata: OAuthMetadata, - client_id: str, - redirect_uri: str, - code_challenge: str, - state: str, -) -> str: - """Build the OAuth authorization URL with PKCE parameters.""" - from urllib.parse import urlencode - - params = { - "response_type": "code", - "client_id": client_id, - "redirect_uri": redirect_uri, - "code_challenge": code_challenge, - "code_challenge_method": "S256", - "state": state, - "prompt": "consent", - } - return f"{metadata.authorization_endpoint}?{urlencode(params)}" - - -# --------------------------------------------------------------------------- -# Step 4 — Exchange authorization code for tokens -# --------------------------------------------------------------------------- - - -async def exchange_code_for_tokens( - code: str, - code_verifier: str, - metadata: OAuthMetadata, - client_id: str, - redirect_uri: str, - client_secret: str | None = None, -) -> TokenSet: - """Exchange an authorization code + PKCE verifier for tokens.""" - form_data: dict[str, Any] = { - "grant_type": "authorization_code", - "code": code, - "client_id": client_id, - "redirect_uri": redirect_uri, - "code_verifier": code_verifier, - } - if client_secret: - form_data["client_secret"] = client_secret - - async with httpx.AsyncClient(timeout=_HTTP_TIMEOUT) as client: - resp = await client.post( - metadata.token_endpoint, - data=form_data, - headers={ - "Content-Type": "application/x-www-form-urlencoded", - "Accept": "application/json", - }, - ) - if not resp.is_success: - body = resp.text - raise ValueError(f"Token exchange failed ({resp.status_code}): {body}") - tokens = resp.json() - - if not tokens.get("access_token"): - raise ValueError("No access_token in token response") - - expires_at = None - if tokens.get("expires_in"): - expires_at = datetime.now(UTC) + timedelta(seconds=int(tokens["expires_in"])) - - return TokenSet( - access_token=tokens["access_token"], - refresh_token=tokens.get("refresh_token"), - token_type=tokens.get("token_type", "Bearer"), - expires_in=tokens.get("expires_in"), - expires_at=expires_at, - scope=tokens.get("scope"), - ) - - -# --------------------------------------------------------------------------- -# Step 5 — Refresh access token -# --------------------------------------------------------------------------- - - -async def refresh_access_token( - refresh_token: str, - metadata: OAuthMetadata, - client_id: str, - client_secret: str | None = None, -) -> TokenSet: - """Refresh an access token. - - Notion MCP uses refresh-token rotation: each refresh returns a new - refresh_token and invalidates the old one. Callers MUST persist the - new refresh_token atomically with the new access_token. - """ - form_data: dict[str, Any] = { - "grant_type": "refresh_token", - "refresh_token": refresh_token, - "client_id": client_id, - } - if client_secret: - form_data["client_secret"] = client_secret - - async with httpx.AsyncClient(timeout=_HTTP_TIMEOUT) as client: - resp = await client.post( - metadata.token_endpoint, - data=form_data, - headers={ - "Content-Type": "application/x-www-form-urlencoded", - "Accept": "application/json", - }, - ) - - if not resp.is_success: - body = resp.text - try: - error_data = resp.json() - error_code = error_data.get("error", "") - if error_code == "invalid_grant": - raise ValueError("REAUTH_REQUIRED") - except ValueError: - if "REAUTH_REQUIRED" in str(resp.text) or resp.status_code == 401: - raise - raise ValueError(f"Token refresh failed ({resp.status_code}): {body}") - - tokens = resp.json() - - if not tokens.get("access_token"): - raise ValueError("No access_token in refresh response") - - expires_at = None - if tokens.get("expires_in"): - expires_at = datetime.now(UTC) + timedelta(seconds=int(tokens["expires_in"])) - - return TokenSet( - access_token=tokens["access_token"], - refresh_token=tokens.get("refresh_token"), - token_type=tokens.get("token_type", "Bearer"), - expires_in=tokens.get("expires_in"), - expires_at=expires_at, - scope=tokens.get("scope"), - ) diff --git a/surfsense_backend/app/services/notion_mcp/response_parser.py b/surfsense_backend/app/services/notion_mcp/response_parser.py deleted file mode 100644 index 34d5ef332..000000000 --- a/surfsense_backend/app/services/notion_mcp/response_parser.py +++ /dev/null @@ -1,212 +0,0 @@ -"""Parse Notion MCP tool responses into structured dicts. - -The Notion MCP server returns responses as MCP TextContent where the -``text`` field contains JSON-stringified Notion API response data. -See: https://deepwiki.com/makenotion/notion-mcp-server/4.3-request-and-response-handling - -This module extracts that JSON and normalises it into the same dict -format that ``NotionHistoryConnector`` methods return, so downstream -code (KB sync, tool factories) works unchanged. -""" - -import json -import logging -from typing import Any - -logger = logging.getLogger(__name__) - -MCP_SERIALIZATION_ERROR_MARKERS = [ - "Expected array, received string", - "Expected object, received string", - "should be defined, instead was `undefined`", -] - - -def is_mcp_serialization_error(text: str) -> bool: - """Return True if the MCP error text matches a known serialization bug.""" - return any(marker in text for marker in MCP_SERIALIZATION_ERROR_MARKERS) - - -def extract_text_from_mcp_response(response) -> str: - """Pull the concatenated text out of an MCP ``CallToolResult``. - - Args: - response: The ``CallToolResult`` returned by ``session.call_tool()``. - - Returns: - Concatenated text content from the response. - """ - parts: list[str] = [] - for content in response.content: - if hasattr(content, "text"): - parts.append(content.text) - elif hasattr(content, "data"): - parts.append(str(content.data)) - else: - parts.append(str(content)) - return "\n".join(parts) if parts else "" - - -def _try_parse_json(text: str) -> dict[str, Any] | None: - """Attempt to parse *text* as JSON, returning None on failure.""" - try: - parsed = json.loads(text) - if isinstance(parsed, dict): - return parsed - except (json.JSONDecodeError, TypeError): - pass - return None - - -def _extract_page_title(page_data: dict[str, Any]) -> str: - """Best-effort extraction of the page title from a Notion page object.""" - props = page_data.get("properties", {}) - for prop in props.values(): - if prop.get("type") == "title": - title_parts = prop.get("title", []) - if title_parts: - return " ".join(t.get("plain_text", "") for t in title_parts) - return page_data.get("id", "Untitled") - - -def parse_create_page_response(raw_text: str) -> dict[str, Any]: - """Parse a ``notion-create-pages`` MCP response. - - Returns a dict compatible with ``NotionHistoryConnector.create_page()``: - ``{status, page_id, url, title, message}`` - """ - data = _try_parse_json(raw_text) - - if data is None: - if is_mcp_serialization_error(raw_text): - return { - "status": "mcp_error", - "message": raw_text, - "mcp_serialization_error": True, - } - return {"status": "error", "message": f"Unexpected MCP response: {raw_text[:500]}"} - - if data.get("status") == "error" or "error" in data: - return { - "status": "error", - "message": data.get("message", data.get("error", str(data))), - } - - page_id = data.get("id", "") - url = data.get("url", "") - title = _extract_page_title(data) - - return { - "status": "success", - "page_id": page_id, - "url": url, - "title": title, - "message": f"Created Notion page '{title}'", - } - - -def parse_update_page_response(raw_text: str) -> dict[str, Any]: - """Parse a ``notion-update-page`` MCP response. - - Returns a dict compatible with ``NotionHistoryConnector.update_page()``: - ``{status, page_id, url, title, message}`` - """ - data = _try_parse_json(raw_text) - - if data is None: - if is_mcp_serialization_error(raw_text): - return { - "status": "mcp_error", - "message": raw_text, - "mcp_serialization_error": True, - } - return {"status": "error", "message": f"Unexpected MCP response: {raw_text[:500]}"} - - if data.get("status") == "error" or "error" in data: - return { - "status": "error", - "message": data.get("message", data.get("error", str(data))), - } - - page_id = data.get("id", "") - url = data.get("url", "") - title = _extract_page_title(data) - - return { - "status": "success", - "page_id": page_id, - "url": url, - "title": title, - "message": f"Updated Notion page '{title}' (content appended)", - } - - -def parse_delete_page_response(raw_text: str) -> dict[str, Any]: - """Parse an archive (delete) MCP response. - - The Notion API responds to ``pages.update(archived=True)`` with - the archived page object. - - Returns a dict compatible with ``NotionHistoryConnector.delete_page()``: - ``{status, page_id, message}`` - """ - data = _try_parse_json(raw_text) - - if data is None: - if is_mcp_serialization_error(raw_text): - return { - "status": "mcp_error", - "message": raw_text, - "mcp_serialization_error": True, - } - return {"status": "error", "message": f"Unexpected MCP response: {raw_text[:500]}"} - - if data.get("status") == "error" or "error" in data: - return { - "status": "error", - "message": data.get("message", data.get("error", str(data))), - } - - page_id = data.get("id", "") - title = _extract_page_title(data) - - return { - "status": "success", - "page_id": page_id, - "message": f"Deleted Notion page '{title}'", - } - - -def parse_fetch_page_response(raw_text: str) -> dict[str, Any]: - """Parse a ``notion-fetch`` MCP response. - - Returns the raw parsed dict (Notion page/block data) or an error dict. - """ - data = _try_parse_json(raw_text) - - if data is None: - return {"status": "error", "message": f"Unexpected MCP response: {raw_text[:500]}"} - - if data.get("status") == "error" or "error" in data: - return { - "status": "error", - "message": data.get("message", data.get("error", str(data))), - } - - return {"status": "success", "data": data} - - -def parse_health_check_response(raw_text: str) -> dict[str, Any]: - """Parse a ``notion-get-self`` MCP response for health checking.""" - data = _try_parse_json(raw_text) - - if data is None: - return {"status": "error", "message": raw_text[:500]} - - if data.get("status") == "error" or "error" in data: - return { - "status": "error", - "message": data.get("message", data.get("error", str(data))), - } - - return {"status": "success", "data": data} From 291c1078c3ada5702c891d48e332c8af88e3d24c Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:33:18 +0200 Subject: [PATCH 13/57] remove Notion MCP router from routes --- surfsense_backend/app/routes/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py index faec7fe09..ad40666cd 100644 --- a/surfsense_backend/app/routes/__init__.py +++ b/surfsense_backend/app/routes/__init__.py @@ -37,7 +37,6 @@ from .new_llm_config_routes import router as new_llm_config_router from .notes_routes import router as notes_router from .notifications_routes import router as notifications_router from .notion_add_connector_route import router as notion_add_connector_router -from .notion_mcp_connector_route import router as notion_mcp_connector_router from .onedrive_add_connector_route import router as onedrive_add_connector_router from .podcasts_routes import router as podcasts_router from .prompts_routes import router as prompts_router @@ -82,7 +81,6 @@ router.include_router(airtable_add_connector_router) router.include_router(linear_add_connector_router) router.include_router(luma_add_connector_router) router.include_router(notion_add_connector_router) -router.include_router(notion_mcp_connector_router) router.include_router(slack_add_connector_router) router.include_router(teams_add_connector_router) router.include_router(onedrive_add_connector_router) From 48158740aec368179b5531a5fa014c26552b2999 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:33:22 +0200 Subject: [PATCH 14/57] remove Notion MCP tool definitions from registry --- .../app/agents/new_chat/tools/registry.py | 42 ------------------- 1 file changed, 42 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/tools/registry.py b/surfsense_backend/app/agents/new_chat/tools/registry.py index f9b9287de..6f7a5a03f 100644 --- a/surfsense_backend/app/agents/new_chat/tools/registry.py +++ b/surfsense_backend/app/agents/new_chat/tools/registry.py @@ -86,11 +86,6 @@ from .notion import ( create_delete_notion_page_tool, create_update_notion_page_tool, ) -from .notion_mcp import ( - create_page as notion_mcp_create_page_mod, - delete_page as notion_mcp_delete_page_mod, - update_page as notion_mcp_update_page_mod, -) from .onedrive import ( create_create_onedrive_file_tool, create_delete_onedrive_file_tool, @@ -330,43 +325,6 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ required_connector="NOTION_CONNECTOR", ), # ========================================================================= - # NOTION MCP TOOLS - MCP-backed variants (disabled until swap) - # These route through Notion's hosted MCP server instead of direct API. - # ========================================================================= - ToolDefinition( - name="create_notion_page_mcp", - description="Create a new page in Notion via MCP server", - factory=lambda deps: notion_mcp_create_page_mod.create_create_notion_page_mcp_tool( - db_session=deps["db_session"], - search_space_id=deps["search_space_id"], - user_id=deps["user_id"], - ), - requires=["db_session", "search_space_id", "user_id"], - required_connector="NOTION_CONNECTOR", - ), - ToolDefinition( - name="update_notion_page_mcp", - description="Append new content to an existing Notion page via MCP server", - factory=lambda deps: notion_mcp_update_page_mod.create_update_notion_page_mcp_tool( - db_session=deps["db_session"], - search_space_id=deps["search_space_id"], - user_id=deps["user_id"], - ), - requires=["db_session", "search_space_id", "user_id"], - required_connector="NOTION_CONNECTOR", - ), - ToolDefinition( - name="delete_notion_page_mcp", - description="Delete an existing Notion page via MCP server", - factory=lambda deps: notion_mcp_delete_page_mod.create_delete_notion_page_mcp_tool( - db_session=deps["db_session"], - search_space_id=deps["search_space_id"], - user_id=deps["user_id"], - ), - requires=["db_session", "search_space_id", "user_id"], - required_connector="NOTION_CONNECTOR", - ), - # ========================================================================= # GOOGLE DRIVE TOOLS - create files, delete files # Auto-disabled when no Google Drive connector is configured (see chat_deepagent.py) # ========================================================================= From 177a34667388e7fd84e60edba4130d557c5e195f Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:33:30 +0200 Subject: [PATCH 15/57] remove Notion MCP gating from agent --- .../app/agents/new_chat/chat_deepagent.py | 21 ------------------- 1 file changed, 21 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/chat_deepagent.py b/surfsense_backend/app/agents/new_chat/chat_deepagent.py index 480cae8c9..17334d66a 100644 --- a/surfsense_backend/app/agents/new_chat/chat_deepagent.py +++ b/surfsense_backend/app/agents/new_chat/chat_deepagent.py @@ -290,27 +290,6 @@ async def create_surfsense_deep_agent( get_connector_gated_tools(available_connectors) ) - # TODO(phase-1): Remove Notion MCP gating after revert. - has_notion_connector = ( - available_connectors is not None and "NOTION_CONNECTOR" in available_connectors - ) - if has_notion_connector: - from app.services.notion_mcp import has_mcp_notion_connector - - _use_mcp = await has_mcp_notion_connector(db_session, search_space_id) - if _use_mcp: - modified_disabled_tools.extend([ - "create_notion_page", - "update_notion_page", - "delete_notion_page", - ]) - else: - modified_disabled_tools.extend([ - "create_notion_page_mcp", - "update_notion_page_mcp", - "delete_notion_page_mcp", - ]) - # Remove direct KB search tool; we now pre-seed a scoped filesystem via middleware. if "search_knowledge_base" not in modified_disabled_tools: modified_disabled_tools.append("search_knowledge_base") From 978a8e2e071bedbc97557db4c5c6d4cf9e58879e Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:33:35 +0200 Subject: [PATCH 16/57] remove MCP health check branch from Notion metadata service --- .../services/notion/tool_metadata_service.py | 21 ------------------- 1 file changed, 21 deletions(-) diff --git a/surfsense_backend/app/services/notion/tool_metadata_service.py b/surfsense_backend/app/services/notion/tool_metadata_service.py index 8a58d5e62..19dc1fd89 100644 --- a/surfsense_backend/app/services/notion/tool_metadata_service.py +++ b/surfsense_backend/app/services/notion/tool_metadata_service.py @@ -227,30 +227,9 @@ class NotionToolMetadataService: async def _check_account_health(self, connector_id: int) -> bool: """Check if a Notion connector's token is still valid. - For regular connectors: uses ``users.me()`` via the Notion SDK. - For MCP-mode connectors: uses ``notion-get-self`` via the MCP adapter. - Returns True if the token is expired/invalid, False if healthy. """ try: - result = await self._db_session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == connector_id - ) - ) - db_connector = result.scalars().first() - if not db_connector: - return True - - if (db_connector.config or {}).get("mcp_mode"): - from app.services.notion_mcp.adapter import NotionMCPAdapter - - adapter = NotionMCPAdapter( - session=self._db_session, connector_id=connector_id - ) - health = await adapter.health_check() - return health.get("status") != "success" - connector = NotionHistoryConnector( session=self._db_session, connector_id=connector_id ) From e02fbbef6c5c2054e9ed0db3452e5742de68715c Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:33:57 +0200 Subject: [PATCH 17/57] remove MCP-mode skip from Notion indexer --- .../app/tasks/connector_indexers/notion_indexer.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/surfsense_backend/app/tasks/connector_indexers/notion_indexer.py b/surfsense_backend/app/tasks/connector_indexers/notion_indexer.py index 6a3a99b5c..77aac795a 100644 --- a/surfsense_backend/app/tasks/connector_indexers/notion_indexer.py +++ b/surfsense_backend/app/tasks/connector_indexers/notion_indexer.py @@ -129,18 +129,6 @@ async def index_notion_pages( f"Connector with ID {connector_id} not found or is not a Notion connector", ) - if (connector.config or {}).get("mcp_mode"): - msg = ( - f"Connector {connector_id} is an MCP-mode connector. " - "Background indexing is not supported for MCP connectors — " - "use a regular Notion connector for indexing." - ) - logger.info(msg) - await task_logger.log_task_completion( - log_entry, msg, {"skipped": True, "reason": "mcp_mode"} - ) - return 0, 0, None - if not connector.config.get("access_token") and not connector.config.get( "NOTION_INTEGRATION_TOKEN" ): From c70f0ccf49b21343ddae20153e2caa4ce45a60e4 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:34:08 +0200 Subject: [PATCH 18/57] revert Notion auth URLs to classic OAuth endpoints --- .../connector-configs/views/connector-edit-view.tsx | 2 +- .../connector-popup/constants/connector-constants.ts | 2 +- .../connector-popup/views/connector-accounts-list-view.tsx | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx index 274fc0fc7..e19600ab2 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx @@ -21,7 +21,7 @@ import { getConnectorConfigComponent } from "../index"; const REAUTH_ENDPOINTS: Partial> = { [EnumConnectorName.LINEAR_CONNECTOR]: "/api/v1/auth/linear/connector/reauth", - [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion-mcp/connector/reauth", + [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion/connector/reauth", [EnumConnectorName.GOOGLE_DRIVE_CONNECTOR]: "/api/v1/auth/google/drive/connector/reauth", [EnumConnectorName.GOOGLE_GMAIL_CONNECTOR]: "/api/v1/auth/google/gmail/connector/reauth", [EnumConnectorName.GOOGLE_CALENDAR_CONNECTOR]: "/api/v1/auth/google/calendar/connector/reauth", diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index 0e517b38e..5b61e8bdf 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -38,7 +38,7 @@ export const OAUTH_CONNECTORS = [ title: "Notion", description: "Search your Notion pages", connectorType: EnumConnectorName.NOTION_CONNECTOR, - authEndpoint: "/api/v1/auth/notion-mcp/connector/add", + authEndpoint: "/api/v1/auth/notion/connector/add", }, { id: "linear-connector", diff --git a/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx index 6cdd535db..b4c049c5c 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/views/connector-accounts-list-view.tsx @@ -18,7 +18,7 @@ import { getConnectorDisplayName } from "../tabs/all-connectors-tab"; const REAUTH_ENDPOINTS: Partial> = { [EnumConnectorName.LINEAR_CONNECTOR]: "/api/v1/auth/linear/connector/reauth", - [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion-mcp/connector/reauth", + [EnumConnectorName.NOTION_CONNECTOR]: "/api/v1/auth/notion/connector/reauth", [EnumConnectorName.GOOGLE_DRIVE_CONNECTOR]: "/api/v1/auth/google/drive/connector/reauth", [EnumConnectorName.GOOGLE_GMAIL_CONNECTOR]: "/api/v1/auth/google/gmail/connector/reauth", [EnumConnectorName.GOOGLE_CALENDAR_CONNECTOR]: "/api/v1/auth/google/calendar/connector/reauth", From d2cb778c08bf6f8dbc81d06b2d422ab8f5f51b44 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:48:40 +0200 Subject: [PATCH 19/57] add Gmail search and read email tools --- .../agents/new_chat/tools/gmail/__init__.py | 8 + .../agents/new_chat/tools/gmail/read_email.py | 87 ++++++++++ .../new_chat/tools/gmail/search_emails.py | 148 ++++++++++++++++++ 3 files changed, 243 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/tools/gmail/read_email.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/gmail/search_emails.py diff --git a/surfsense_backend/app/agents/new_chat/tools/gmail/__init__.py b/surfsense_backend/app/agents/new_chat/tools/gmail/__init__.py index efb2fb0fa..294840122 100644 --- a/surfsense_backend/app/agents/new_chat/tools/gmail/__init__.py +++ b/surfsense_backend/app/agents/new_chat/tools/gmail/__init__.py @@ -1,6 +1,12 @@ from app.agents.new_chat.tools.gmail.create_draft import ( create_create_gmail_draft_tool, ) +from app.agents.new_chat.tools.gmail.read_email import ( + create_read_gmail_email_tool, +) +from app.agents.new_chat.tools.gmail.search_emails import ( + create_search_gmail_tool, +) from app.agents.new_chat.tools.gmail.send_email import ( create_send_gmail_email_tool, ) @@ -13,6 +19,8 @@ from app.agents.new_chat.tools.gmail.update_draft import ( __all__ = [ "create_create_gmail_draft_tool", + "create_read_gmail_email_tool", + "create_search_gmail_tool", "create_send_gmail_email_tool", "create_trash_gmail_email_tool", "create_update_gmail_draft_tool", diff --git a/surfsense_backend/app/agents/new_chat/tools/gmail/read_email.py b/surfsense_backend/app/agents/new_chat/tools/gmail/read_email.py new file mode 100644 index 000000000..9071f129a --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/gmail/read_email.py @@ -0,0 +1,87 @@ +import logging +from typing import Any + +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.db import SearchSourceConnector, SearchSourceConnectorType + +logger = logging.getLogger(__name__) + +_GMAIL_TYPES = [ + SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR, + SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR, +] + + +def create_read_gmail_email_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def read_gmail_email(message_id: str) -> dict[str, Any]: + """Read the full content of a specific Gmail email by its message ID. + + Use after search_gmail to get the complete body of an email. + + Args: + message_id: The Gmail message ID (from search_gmail results). + + Returns: + Dictionary with status and the full email content formatted as markdown. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Gmail tool not properly configured."} + + try: + result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type.in_(_GMAIL_TYPES), + ) + ) + connector = result.scalars().first() + if not connector: + return { + "status": "error", + "message": "No Gmail connector found. Please connect Gmail in your workspace settings.", + } + + from app.agents.new_chat.tools.gmail.search_emails import _build_credentials + + creds = _build_credentials(connector) + + from app.connectors.google_gmail_connector import GoogleGmailConnector + + gmail = GoogleGmailConnector( + credentials=creds, + session=db_session, + user_id=user_id, + connector_id=connector.id, + ) + + detail, error = await gmail.get_message_details(message_id) + if error: + if "re-authenticate" in error.lower() or "authentication failed" in error.lower(): + return {"status": "auth_error", "message": error, "connector_type": "gmail"} + return {"status": "error", "message": error} + + if not detail: + return {"status": "not_found", "message": f"Email with ID '{message_id}' not found."} + + content = gmail.format_message_to_markdown(detail) + + return {"status": "success", "message_id": message_id, "content": content} + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error reading Gmail email: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to read email. Please try again."} + + return read_gmail_email diff --git a/surfsense_backend/app/agents/new_chat/tools/gmail/search_emails.py b/surfsense_backend/app/agents/new_chat/tools/gmail/search_emails.py new file mode 100644 index 000000000..bfc328389 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/gmail/search_emails.py @@ -0,0 +1,148 @@ +import logging +from datetime import datetime +from typing import Any + +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.db import SearchSourceConnector, SearchSourceConnectorType + +logger = logging.getLogger(__name__) + +_GMAIL_TYPES = [ + SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR, + SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR, +] + + +def _build_credentials(connector: SearchSourceConnector): + """Build Google OAuth Credentials from a Gmail connector's config.""" + if connector.connector_type == SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR: + from app.utils.google_credentials import build_composio_credentials + + cca_id = connector.config.get("composio_connected_account_id") + if not cca_id: + raise ValueError("Composio connected account ID not found.") + return build_composio_credentials(cca_id) + + from google.oauth2.credentials import Credentials + + from app.config import config + from app.utils.oauth_security import TokenEncryption + + cfg = dict(connector.config) + if cfg.get("_token_encrypted") and config.SECRET_KEY: + enc = TokenEncryption(config.SECRET_KEY) + for key in ("token", "refresh_token", "client_secret"): + if cfg.get(key): + cfg[key] = enc.decrypt_token(cfg[key]) + + exp = (cfg.get("expiry") or "").replace("Z", "") + return Credentials( + token=cfg.get("token"), + refresh_token=cfg.get("refresh_token"), + token_uri=cfg.get("token_uri"), + client_id=cfg.get("client_id"), + client_secret=cfg.get("client_secret"), + scopes=cfg.get("scopes", []), + expiry=datetime.fromisoformat(exp) if exp else None, + ) + + +def create_search_gmail_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def search_gmail( + query: str, + max_results: int = 10, + ) -> dict[str, Any]: + """Search emails in the user's Gmail inbox using Gmail search syntax. + + Args: + query: Gmail search query, same syntax as the Gmail search bar. + Examples: "from:alice@example.com", "subject:meeting", + "is:unread", "after:2024/01/01 before:2024/02/01", + "has:attachment", "in:sent". + max_results: Number of emails to return (default 10, max 20). + + Returns: + Dictionary with status and a list of email summaries including + message_id, subject, from, date, snippet. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Gmail tool not properly configured."} + + max_results = min(max_results, 20) + + try: + result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type.in_(_GMAIL_TYPES), + ) + ) + connector = result.scalars().first() + if not connector: + return { + "status": "error", + "message": "No Gmail connector found. Please connect Gmail in your workspace settings.", + } + + creds = _build_credentials(connector) + + from app.connectors.google_gmail_connector import GoogleGmailConnector + + gmail = GoogleGmailConnector( + credentials=creds, + session=db_session, + user_id=user_id, + connector_id=connector.id, + ) + + messages_list, error = await gmail.get_messages_list( + max_results=max_results, query=query + ) + if error: + if "re-authenticate" in error.lower() or "authentication failed" in error.lower(): + return {"status": "auth_error", "message": error, "connector_type": "gmail"} + return {"status": "error", "message": error} + + if not messages_list: + return {"status": "success", "emails": [], "total": 0, "message": "No emails found."} + + emails = [] + for msg in messages_list: + detail, err = await gmail.get_message_details(msg["id"]) + if err: + continue + headers = { + h["name"].lower(): h["value"] + for h in detail.get("payload", {}).get("headers", []) + } + emails.append({ + "message_id": detail.get("id"), + "thread_id": detail.get("threadId"), + "subject": headers.get("subject", "No Subject"), + "from": headers.get("from", "Unknown"), + "to": headers.get("to", ""), + "date": headers.get("date", ""), + "snippet": detail.get("snippet", ""), + "labels": detail.get("labelIds", []), + }) + + return {"status": "success", "emails": emails, "total": len(emails)} + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error searching Gmail: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to search Gmail. Please try again."} + + return search_gmail From 07a5fac15d5f5a10722c9febed527bd2632e3023 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:48:47 +0200 Subject: [PATCH 20/57] add Calendar search events tool --- .../tools/google_calendar/__init__.py | 4 + .../tools/google_calendar/search_events.py | 148 ++++++++++++++++++ 2 files changed, 152 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/tools/google_calendar/search_events.py diff --git a/surfsense_backend/app/agents/new_chat/tools/google_calendar/__init__.py b/surfsense_backend/app/agents/new_chat/tools/google_calendar/__init__.py index d1ce4e795..13d4c06cb 100644 --- a/surfsense_backend/app/agents/new_chat/tools/google_calendar/__init__.py +++ b/surfsense_backend/app/agents/new_chat/tools/google_calendar/__init__.py @@ -4,6 +4,9 @@ from app.agents.new_chat.tools.google_calendar.create_event import ( from app.agents.new_chat.tools.google_calendar.delete_event import ( create_delete_calendar_event_tool, ) +from app.agents.new_chat.tools.google_calendar.search_events import ( + create_search_calendar_events_tool, +) from app.agents.new_chat.tools.google_calendar.update_event import ( create_update_calendar_event_tool, ) @@ -11,5 +14,6 @@ from app.agents.new_chat.tools.google_calendar.update_event import ( __all__ = [ "create_create_calendar_event_tool", "create_delete_calendar_event_tool", + "create_search_calendar_events_tool", "create_update_calendar_event_tool", ] diff --git a/surfsense_backend/app/agents/new_chat/tools/google_calendar/search_events.py b/surfsense_backend/app/agents/new_chat/tools/google_calendar/search_events.py new file mode 100644 index 000000000..ad66775ef --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/google_calendar/search_events.py @@ -0,0 +1,148 @@ +import logging +from datetime import datetime +from typing import Any + +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.db import SearchSourceConnector, SearchSourceConnectorType + +logger = logging.getLogger(__name__) + +_CALENDAR_TYPES = [ + SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR, + SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR, +] + + +def _build_credentials(connector: SearchSourceConnector): + """Build Google OAuth Credentials from a Calendar connector's config.""" + if connector.connector_type == SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR: + from app.utils.google_credentials import build_composio_credentials + + cca_id = connector.config.get("composio_connected_account_id") + if not cca_id: + raise ValueError("Composio connected account ID not found.") + return build_composio_credentials(cca_id) + + from google.oauth2.credentials import Credentials + + from app.config import config + from app.utils.oauth_security import TokenEncryption + + cfg = dict(connector.config) + if cfg.get("_token_encrypted") and config.SECRET_KEY: + enc = TokenEncryption(config.SECRET_KEY) + for key in ("token", "refresh_token", "client_secret"): + if cfg.get(key): + cfg[key] = enc.decrypt_token(cfg[key]) + + exp = (cfg.get("expiry") or "").replace("Z", "") + return Credentials( + token=cfg.get("token"), + refresh_token=cfg.get("refresh_token"), + token_uri=cfg.get("token_uri"), + client_id=cfg.get("client_id"), + client_secret=cfg.get("client_secret"), + scopes=cfg.get("scopes", []), + expiry=datetime.fromisoformat(exp) if exp else None, + ) + + +def create_search_calendar_events_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def search_calendar_events( + start_date: str, + end_date: str, + max_results: int = 25, + ) -> dict[str, Any]: + """Search Google Calendar events within a date range. + + Args: + start_date: Start date in YYYY-MM-DD format (e.g. "2026-04-01"). + end_date: End date in YYYY-MM-DD format (e.g. "2026-04-30"). + max_results: Maximum number of events to return (default 25, max 50). + + Returns: + Dictionary with status and a list of events including + event_id, summary, start, end, location, attendees. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Calendar tool not properly configured."} + + max_results = min(max_results, 50) + + try: + result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type.in_(_CALENDAR_TYPES), + ) + ) + connector = result.scalars().first() + if not connector: + return { + "status": "error", + "message": "No Google Calendar connector found. Please connect Google Calendar in your workspace settings.", + } + + creds = _build_credentials(connector) + + from app.connectors.google_calendar_connector import GoogleCalendarConnector + + cal = GoogleCalendarConnector( + credentials=creds, + session=db_session, + user_id=user_id, + connector_id=connector.id, + ) + + events_raw, error = await cal.get_all_primary_calendar_events( + start_date=start_date, + end_date=end_date, + max_results=max_results, + ) + + if error: + if "re-authenticate" in error.lower() or "authentication failed" in error.lower(): + return {"status": "auth_error", "message": error, "connector_type": "google_calendar"} + if "no events found" in error.lower(): + return {"status": "success", "events": [], "total": 0, "message": error} + return {"status": "error", "message": error} + + events = [] + for ev in events_raw: + start = ev.get("start", {}) + end = ev.get("end", {}) + attendees_raw = ev.get("attendees", []) + events.append({ + "event_id": ev.get("id"), + "summary": ev.get("summary", "No Title"), + "start": start.get("dateTime") or start.get("date", ""), + "end": end.get("dateTime") or end.get("date", ""), + "location": ev.get("location", ""), + "description": ev.get("description", ""), + "html_link": ev.get("htmlLink", ""), + "attendees": [ + a.get("email", "") for a in attendees_raw[:10] + ], + "status": ev.get("status", ""), + }) + + return {"status": "success", "events": events, "total": len(events)} + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error searching calendar events: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to search calendar events. Please try again."} + + return search_calendar_events From 1de2517eae9b381d6fec4dd8a7ffa21f3de7ce18 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:49:02 +0200 Subject: [PATCH 21/57] add Discord list channels, read messages, send message tools --- .../agents/new_chat/tools/discord/__init__.py | 15 +++ .../agents/new_chat/tools/discord/_auth.py | 46 +++++++++ .../new_chat/tools/discord/list_channels.py | 67 +++++++++++++ .../new_chat/tools/discord/read_messages.py | 80 ++++++++++++++++ .../new_chat/tools/discord/send_message.py | 96 +++++++++++++++++++ 5 files changed, 304 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/tools/discord/__init__.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/discord/_auth.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/discord/list_channels.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/discord/read_messages.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/discord/send_message.py diff --git a/surfsense_backend/app/agents/new_chat/tools/discord/__init__.py b/surfsense_backend/app/agents/new_chat/tools/discord/__init__.py new file mode 100644 index 000000000..b4eaec1f0 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/discord/__init__.py @@ -0,0 +1,15 @@ +from app.agents.new_chat.tools.discord.list_channels import ( + create_list_discord_channels_tool, +) +from app.agents.new_chat.tools.discord.read_messages import ( + create_read_discord_messages_tool, +) +from app.agents.new_chat.tools.discord.send_message import ( + create_send_discord_message_tool, +) + +__all__ = [ + "create_list_discord_channels_tool", + "create_read_discord_messages_tool", + "create_send_discord_message_tool", +] diff --git a/surfsense_backend/app/agents/new_chat/tools/discord/_auth.py b/surfsense_backend/app/agents/new_chat/tools/discord/_auth.py new file mode 100644 index 000000000..b369c10f1 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/discord/_auth.py @@ -0,0 +1,46 @@ +"""Shared auth helper for Discord agent tools (REST API, not gateway bot).""" + +import logging + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.config import config +from app.db import SearchSourceConnector, SearchSourceConnectorType +from app.utils.oauth_security import TokenEncryption + +logger = logging.getLogger(__name__) + +DISCORD_API = "https://discord.com/api/v10" + + +async def get_discord_connector( + db_session: AsyncSession, + search_space_id: int, + user_id: str, +) -> SearchSourceConnector | None: + result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR, + ) + ) + return result.scalars().first() + + +def get_bot_token(connector: SearchSourceConnector) -> str: + """Extract and decrypt the bot token from connector config.""" + cfg = dict(connector.config) + if cfg.get("_token_encrypted") and config.SECRET_KEY: + enc = TokenEncryption(config.SECRET_KEY) + if cfg.get("bot_token"): + cfg["bot_token"] = enc.decrypt_token(cfg["bot_token"]) + token = cfg.get("bot_token") + if not token: + raise ValueError("Discord bot token not found in connector config.") + return token + + +def get_guild_id(connector: SearchSourceConnector) -> str | None: + return connector.config.get("guild_id") diff --git a/surfsense_backend/app/agents/new_chat/tools/discord/list_channels.py b/surfsense_backend/app/agents/new_chat/tools/discord/list_channels.py new file mode 100644 index 000000000..a33b88aa0 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/discord/list_channels.py @@ -0,0 +1,67 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from ._auth import DISCORD_API, get_bot_token, get_discord_connector, get_guild_id + +logger = logging.getLogger(__name__) + + +def create_list_discord_channels_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def list_discord_channels() -> dict[str, Any]: + """List text channels in the connected Discord server. + + Returns: + Dictionary with status and a list of channels (id, name). + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Discord tool not properly configured."} + + try: + connector = await get_discord_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Discord connector found."} + + guild_id = get_guild_id(connector) + if not guild_id: + return {"status": "error", "message": "No guild ID in Discord connector config."} + + token = get_bot_token(connector) + + async with httpx.AsyncClient() as client: + resp = await client.get( + f"{DISCORD_API}/guilds/{guild_id}/channels", + headers={"Authorization": f"Bot {token}"}, + timeout=15.0, + ) + + if resp.status_code == 401: + return {"status": "auth_error", "message": "Discord bot token is invalid.", "connector_type": "discord"} + if resp.status_code != 200: + return {"status": "error", "message": f"Discord API error: {resp.status_code}"} + + # Type 0 = text channel + channels = [ + {"id": ch["id"], "name": ch["name"]} + for ch in resp.json() + if ch.get("type") == 0 + ] + return {"status": "success", "guild_id": guild_id, "channels": channels, "total": len(channels)} + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error listing Discord channels: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to list Discord channels."} + + return list_discord_channels diff --git a/surfsense_backend/app/agents/new_chat/tools/discord/read_messages.py b/surfsense_backend/app/agents/new_chat/tools/discord/read_messages.py new file mode 100644 index 000000000..852a9297b --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/discord/read_messages.py @@ -0,0 +1,80 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from ._auth import DISCORD_API, get_bot_token, get_discord_connector + +logger = logging.getLogger(__name__) + + +def create_read_discord_messages_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def read_discord_messages( + channel_id: str, + limit: int = 25, + ) -> dict[str, Any]: + """Read recent messages from a Discord text channel. + + Args: + channel_id: The Discord channel ID (from list_discord_channels). + limit: Number of messages to fetch (default 25, max 50). + + Returns: + Dictionary with status and a list of messages including + id, author, content, timestamp. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Discord tool not properly configured."} + + limit = min(limit, 50) + + try: + connector = await get_discord_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Discord connector found."} + + token = get_bot_token(connector) + + async with httpx.AsyncClient() as client: + resp = await client.get( + f"{DISCORD_API}/channels/{channel_id}/messages", + headers={"Authorization": f"Bot {token}"}, + params={"limit": limit}, + timeout=15.0, + ) + + if resp.status_code == 401: + return {"status": "auth_error", "message": "Discord bot token is invalid.", "connector_type": "discord"} + if resp.status_code == 403: + return {"status": "error", "message": "Bot lacks permission to read this channel."} + if resp.status_code != 200: + return {"status": "error", "message": f"Discord API error: {resp.status_code}"} + + messages = [ + { + "id": m["id"], + "author": m.get("author", {}).get("username", "Unknown"), + "content": m.get("content", ""), + "timestamp": m.get("timestamp", ""), + } + for m in resp.json() + ] + + return {"status": "success", "channel_id": channel_id, "messages": messages, "total": len(messages)} + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error reading Discord messages: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to read Discord messages."} + + return read_discord_messages diff --git a/surfsense_backend/app/agents/new_chat/tools/discord/send_message.py b/surfsense_backend/app/agents/new_chat/tools/discord/send_message.py new file mode 100644 index 000000000..be4e6fdb2 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/discord/send_message.py @@ -0,0 +1,96 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from app.agents.new_chat.tools.hitl import request_approval + +from ._auth import DISCORD_API, get_bot_token, get_discord_connector + +logger = logging.getLogger(__name__) + + +def create_send_discord_message_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def send_discord_message( + channel_id: str, + content: str, + ) -> dict[str, Any]: + """Send a message to a Discord text channel. + + Args: + channel_id: The Discord channel ID (from list_discord_channels). + content: The message text (max 2000 characters). + + Returns: + Dictionary with status, message_id on success. + + IMPORTANT: + - If status is "rejected", the user explicitly declined. Do NOT retry. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Discord tool not properly configured."} + + if len(content) > 2000: + return {"status": "error", "message": "Message exceeds Discord's 2000-character limit."} + + try: + connector = await get_discord_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Discord connector found."} + + result = request_approval( + action_type="discord_send_message", + tool_name="send_discord_message", + params={"channel_id": channel_id, "content": content}, + context={"connector_id": connector.id}, + ) + + if result.rejected: + return {"status": "rejected", "message": "User declined. Message was not sent."} + + final_content = result.params.get("content", content) + final_channel = result.params.get("channel_id", channel_id) + + token = get_bot_token(connector) + + async with httpx.AsyncClient() as client: + resp = await client.post( + f"{DISCORD_API}/channels/{final_channel}/messages", + headers={ + "Authorization": f"Bot {token}", + "Content-Type": "application/json", + }, + json={"content": final_content}, + timeout=15.0, + ) + + if resp.status_code == 401: + return {"status": "auth_error", "message": "Discord bot token is invalid.", "connector_type": "discord"} + if resp.status_code == 403: + return {"status": "error", "message": "Bot lacks permission to send messages in this channel."} + if resp.status_code not in (200, 201): + return {"status": "error", "message": f"Discord API error: {resp.status_code}"} + + msg_data = resp.json() + return { + "status": "success", + "message_id": msg_data.get("id"), + "message": f"Message sent to channel {final_channel}.", + } + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error sending Discord message: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to send Discord message."} + + return send_discord_message From 49f8d1abd449d4eb24a5db4e9de93ec850fefa32 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:49:50 +0200 Subject: [PATCH 22/57] add Teams list channels, read messages, send message tools --- .../agents/new_chat/tools/teams/__init__.py | 15 +++ .../app/agents/new_chat/tools/teams/_auth.py | 43 ++++++++ .../new_chat/tools/teams/list_channels.py | 77 +++++++++++++ .../new_chat/tools/teams/read_messages.py | 91 ++++++++++++++++ .../new_chat/tools/teams/send_message.py | 101 ++++++++++++++++++ .../app/routes/teams_add_connector_route.py | 1 + 6 files changed, 328 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/tools/teams/__init__.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/teams/_auth.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/teams/list_channels.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/teams/read_messages.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/teams/send_message.py diff --git a/surfsense_backend/app/agents/new_chat/tools/teams/__init__.py b/surfsense_backend/app/agents/new_chat/tools/teams/__init__.py new file mode 100644 index 000000000..60e2add49 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/teams/__init__.py @@ -0,0 +1,15 @@ +from app.agents.new_chat.tools.teams.list_channels import ( + create_list_teams_channels_tool, +) +from app.agents.new_chat.tools.teams.read_messages import ( + create_read_teams_messages_tool, +) +from app.agents.new_chat.tools.teams.send_message import ( + create_send_teams_message_tool, +) + +__all__ = [ + "create_list_teams_channels_tool", + "create_read_teams_messages_tool", + "create_send_teams_message_tool", +] diff --git a/surfsense_backend/app/agents/new_chat/tools/teams/_auth.py b/surfsense_backend/app/agents/new_chat/tools/teams/_auth.py new file mode 100644 index 000000000..989fce7c6 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/teams/_auth.py @@ -0,0 +1,43 @@ +"""Shared auth helper for Teams agent tools (Microsoft Graph REST API).""" + +import logging + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.config import config +from app.db import SearchSourceConnector, SearchSourceConnectorType +from app.utils.oauth_security import TokenEncryption + +logger = logging.getLogger(__name__) + +GRAPH_API = "https://graph.microsoft.com/v1.0" + + +async def get_teams_connector( + db_session: AsyncSession, + search_space_id: int, + user_id: str, +) -> SearchSourceConnector | None: + result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.TEAMS_CONNECTOR, + ) + ) + return result.scalars().first() + + +async def get_access_token( + db_session: AsyncSession, + connector: SearchSourceConnector, +) -> str: + """Get a valid Microsoft Graph access token, refreshing if expired.""" + from app.connectors.teams_connector import TeamsConnector + + tc = TeamsConnector( + session=db_session, + connector_id=connector.id, + ) + return await tc._get_valid_token() diff --git a/surfsense_backend/app/agents/new_chat/tools/teams/list_channels.py b/surfsense_backend/app/agents/new_chat/tools/teams/list_channels.py new file mode 100644 index 000000000..a676595c1 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/teams/list_channels.py @@ -0,0 +1,77 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from ._auth import GRAPH_API, get_access_token, get_teams_connector + +logger = logging.getLogger(__name__) + + +def create_list_teams_channels_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def list_teams_channels() -> dict[str, Any]: + """List all Microsoft Teams and their channels the user has access to. + + Returns: + Dictionary with status and a list of teams, each containing + team_id, team_name, and a list of channels (id, name). + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Teams tool not properly configured."} + + try: + connector = await get_teams_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Teams connector found."} + + token = await get_access_token(db_session, connector) + headers = {"Authorization": f"Bearer {token}"} + + async with httpx.AsyncClient(timeout=20.0) as client: + teams_resp = await client.get(f"{GRAPH_API}/me/joinedTeams", headers=headers) + + if teams_resp.status_code == 401: + return {"status": "auth_error", "message": "Teams token expired. Please re-authenticate.", "connector_type": "teams"} + if teams_resp.status_code != 200: + return {"status": "error", "message": f"Graph API error: {teams_resp.status_code}"} + + teams_data = teams_resp.json().get("value", []) + result_teams = [] + + async with httpx.AsyncClient(timeout=20.0) as client: + for team in teams_data: + team_id = team["id"] + ch_resp = await client.get( + f"{GRAPH_API}/teams/{team_id}/channels", + headers=headers, + ) + channels = [] + if ch_resp.status_code == 200: + channels = [ + {"id": ch["id"], "name": ch.get("displayName", "")} + for ch in ch_resp.json().get("value", []) + ] + result_teams.append({ + "team_id": team_id, + "team_name": team.get("displayName", ""), + "channels": channels, + }) + + return {"status": "success", "teams": result_teams, "total_teams": len(result_teams)} + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error listing Teams channels: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to list Teams channels."} + + return list_teams_channels diff --git a/surfsense_backend/app/agents/new_chat/tools/teams/read_messages.py b/surfsense_backend/app/agents/new_chat/tools/teams/read_messages.py new file mode 100644 index 000000000..90896cb95 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/teams/read_messages.py @@ -0,0 +1,91 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from ._auth import GRAPH_API, get_access_token, get_teams_connector + +logger = logging.getLogger(__name__) + + +def create_read_teams_messages_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def read_teams_messages( + team_id: str, + channel_id: str, + limit: int = 25, + ) -> dict[str, Any]: + """Read recent messages from a Microsoft Teams channel. + + Args: + team_id: The team ID (from list_teams_channels). + channel_id: The channel ID (from list_teams_channels). + limit: Number of messages to fetch (default 25, max 50). + + Returns: + Dictionary with status and a list of messages including + id, sender, content, timestamp. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Teams tool not properly configured."} + + limit = min(limit, 50) + + try: + connector = await get_teams_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Teams connector found."} + + token = await get_access_token(db_session, connector) + + async with httpx.AsyncClient(timeout=20.0) as client: + resp = await client.get( + f"{GRAPH_API}/teams/{team_id}/channels/{channel_id}/messages", + headers={"Authorization": f"Bearer {token}"}, + params={"$top": limit}, + ) + + if resp.status_code == 401: + return {"status": "auth_error", "message": "Teams token expired. Please re-authenticate.", "connector_type": "teams"} + if resp.status_code == 403: + return {"status": "error", "message": "Insufficient permissions to read this channel."} + if resp.status_code != 200: + return {"status": "error", "message": f"Graph API error: {resp.status_code}"} + + raw_msgs = resp.json().get("value", []) + messages = [] + for m in raw_msgs: + sender = m.get("from", {}) + user_info = sender.get("user", {}) if sender else {} + body = m.get("body", {}) + messages.append({ + "id": m.get("id"), + "sender": user_info.get("displayName", "Unknown"), + "content": body.get("content", ""), + "content_type": body.get("contentType", "text"), + "timestamp": m.get("createdDateTime", ""), + }) + + return { + "status": "success", + "team_id": team_id, + "channel_id": channel_id, + "messages": messages, + "total": len(messages), + } + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error reading Teams messages: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to read Teams messages."} + + return read_teams_messages diff --git a/surfsense_backend/app/agents/new_chat/tools/teams/send_message.py b/surfsense_backend/app/agents/new_chat/tools/teams/send_message.py new file mode 100644 index 000000000..ba3a515d9 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/teams/send_message.py @@ -0,0 +1,101 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from app.agents.new_chat.tools.hitl import request_approval + +from ._auth import GRAPH_API, get_access_token, get_teams_connector + +logger = logging.getLogger(__name__) + + +def create_send_teams_message_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def send_teams_message( + team_id: str, + channel_id: str, + content: str, + ) -> dict[str, Any]: + """Send a message to a Microsoft Teams channel. + + Requires the ChannelMessage.Send OAuth scope. If the user gets a + permission error, they may need to re-authenticate with updated scopes. + + Args: + team_id: The team ID (from list_teams_channels). + channel_id: The channel ID (from list_teams_channels). + content: The message text (HTML supported). + + Returns: + Dictionary with status, message_id on success. + + IMPORTANT: + - If status is "rejected", the user explicitly declined. Do NOT retry. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Teams tool not properly configured."} + + try: + connector = await get_teams_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Teams connector found."} + + result = request_approval( + action_type="teams_send_message", + tool_name="send_teams_message", + params={"team_id": team_id, "channel_id": channel_id, "content": content}, + context={"connector_id": connector.id}, + ) + + if result.rejected: + return {"status": "rejected", "message": "User declined. Message was not sent."} + + final_content = result.params.get("content", content) + final_team = result.params.get("team_id", team_id) + final_channel = result.params.get("channel_id", channel_id) + + token = await get_access_token(db_session, connector) + + async with httpx.AsyncClient(timeout=20.0) as client: + resp = await client.post( + f"{GRAPH_API}/teams/{final_team}/channels/{final_channel}/messages", + headers={ + "Authorization": f"Bearer {token}", + "Content-Type": "application/json", + }, + json={"body": {"content": final_content}}, + ) + + if resp.status_code == 401: + return {"status": "auth_error", "message": "Teams token expired. Please re-authenticate.", "connector_type": "teams"} + if resp.status_code == 403: + return { + "status": "insufficient_permissions", + "message": "Missing ChannelMessage.Send permission. Please re-authenticate with updated scopes.", + } + if resp.status_code not in (200, 201): + return {"status": "error", "message": f"Graph API error: {resp.status_code} — {resp.text[:200]}"} + + msg_data = resp.json() + return { + "status": "success", + "message_id": msg_data.get("id"), + "message": f"Message sent to Teams channel.", + } + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error sending Teams message: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to send Teams message."} + + return send_teams_message diff --git a/surfsense_backend/app/routes/teams_add_connector_route.py b/surfsense_backend/app/routes/teams_add_connector_route.py index 4442307ba..bbaae3a5f 100644 --- a/surfsense_backend/app/routes/teams_add_connector_route.py +++ b/surfsense_backend/app/routes/teams_add_connector_route.py @@ -45,6 +45,7 @@ SCOPES = [ "Team.ReadBasic.All", # Read basic team information "Channel.ReadBasic.All", # Read basic channel information "ChannelMessage.Read.All", # Read messages in channels + "ChannelMessage.Send", # Send messages in channels ] # Initialize security utilities From ba8e3133b9281c07ab366039a1eb36c8e231afe8 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:50:31 +0200 Subject: [PATCH 23/57] add Luma list events, read event, create event tools --- .../agents/new_chat/tools/luma/__init__.py | 15 +++ .../app/agents/new_chat/tools/luma/_auth.py | 42 +++++++ .../new_chat/tools/luma/create_event.py | 116 ++++++++++++++++++ .../agents/new_chat/tools/luma/list_events.py | 100 +++++++++++++++ .../agents/new_chat/tools/luma/read_event.py | 82 +++++++++++++ 5 files changed, 355 insertions(+) create mode 100644 surfsense_backend/app/agents/new_chat/tools/luma/__init__.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/luma/_auth.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/luma/create_event.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/luma/list_events.py create mode 100644 surfsense_backend/app/agents/new_chat/tools/luma/read_event.py diff --git a/surfsense_backend/app/agents/new_chat/tools/luma/__init__.py b/surfsense_backend/app/agents/new_chat/tools/luma/__init__.py new file mode 100644 index 000000000..255119bee --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/luma/__init__.py @@ -0,0 +1,15 @@ +from app.agents.new_chat.tools.luma.create_event import ( + create_create_luma_event_tool, +) +from app.agents.new_chat.tools.luma.list_events import ( + create_list_luma_events_tool, +) +from app.agents.new_chat.tools.luma.read_event import ( + create_read_luma_event_tool, +) + +__all__ = [ + "create_create_luma_event_tool", + "create_list_luma_events_tool", + "create_read_luma_event_tool", +] diff --git a/surfsense_backend/app/agents/new_chat/tools/luma/_auth.py b/surfsense_backend/app/agents/new_chat/tools/luma/_auth.py new file mode 100644 index 000000000..ef2fa8540 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/luma/_auth.py @@ -0,0 +1,42 @@ +"""Shared auth helper for Luma agent tools.""" + +import logging + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.db import SearchSourceConnector, SearchSourceConnectorType + +logger = logging.getLogger(__name__) + +LUMA_API = "https://public-api.luma.com/v1" + + +async def get_luma_connector( + db_session: AsyncSession, + search_space_id: int, + user_id: str, +) -> SearchSourceConnector | None: + result = await db_session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type == SearchSourceConnectorType.LUMA_CONNECTOR, + ) + ) + return result.scalars().first() + + +def get_api_key(connector: SearchSourceConnector) -> str: + """Extract the API key from connector config (handles both key names).""" + key = connector.config.get("api_key") or connector.config.get("LUMA_API_KEY") + if not key: + raise ValueError("Luma API key not found in connector config.") + return key + + +def luma_headers(api_key: str) -> dict[str, str]: + return { + "Content-Type": "application/json", + "x-luma-api-key": api_key, + } diff --git a/surfsense_backend/app/agents/new_chat/tools/luma/create_event.py b/surfsense_backend/app/agents/new_chat/tools/luma/create_event.py new file mode 100644 index 000000000..2217d29e6 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/luma/create_event.py @@ -0,0 +1,116 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from app.agents.new_chat.tools.hitl import request_approval + +from ._auth import LUMA_API, get_api_key, get_luma_connector, luma_headers + +logger = logging.getLogger(__name__) + + +def create_create_luma_event_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def create_luma_event( + name: str, + start_at: str, + end_at: str, + description: str | None = None, + timezone: str = "UTC", + ) -> dict[str, Any]: + """Create a new event on Luma. + + Args: + name: The event title. + start_at: Start time in ISO 8601 format (e.g. "2026-05-01T18:00:00"). + end_at: End time in ISO 8601 format (e.g. "2026-05-01T20:00:00"). + description: Optional event description (markdown supported). + timezone: Timezone string (default "UTC", e.g. "America/New_York"). + + Returns: + Dictionary with status, event_id on success. + + IMPORTANT: + - If status is "rejected", the user explicitly declined. Do NOT retry. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Luma tool not properly configured."} + + try: + connector = await get_luma_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Luma connector found."} + + result = request_approval( + action_type="luma_create_event", + tool_name="create_luma_event", + params={ + "name": name, + "start_at": start_at, + "end_at": end_at, + "description": description, + "timezone": timezone, + }, + context={"connector_id": connector.id}, + ) + + if result.rejected: + return {"status": "rejected", "message": "User declined. Event was not created."} + + final_name = result.params.get("name", name) + final_start = result.params.get("start_at", start_at) + final_end = result.params.get("end_at", end_at) + final_desc = result.params.get("description", description) + final_tz = result.params.get("timezone", timezone) + + api_key = get_api_key(connector) + headers = luma_headers(api_key) + + body: dict[str, Any] = { + "name": final_name, + "start_at": final_start, + "end_at": final_end, + "timezone": final_tz, + } + if final_desc: + body["description_md"] = final_desc + + async with httpx.AsyncClient(timeout=20.0) as client: + resp = await client.post( + f"{LUMA_API}/event/create", + headers=headers, + json=body, + ) + + if resp.status_code == 401: + return {"status": "auth_error", "message": "Luma API key is invalid.", "connector_type": "luma"} + if resp.status_code == 403: + return {"status": "error", "message": "Luma Plus subscription required to create events via API."} + if resp.status_code not in (200, 201): + return {"status": "error", "message": f"Luma API error: {resp.status_code} — {resp.text[:200]}"} + + data = resp.json() + event_id = data.get("api_id") or data.get("event", {}).get("api_id") + + return { + "status": "success", + "event_id": event_id, + "message": f"Event '{final_name}' created on Luma.", + } + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error creating Luma event: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to create Luma event."} + + return create_luma_event diff --git a/surfsense_backend/app/agents/new_chat/tools/luma/list_events.py b/surfsense_backend/app/agents/new_chat/tools/luma/list_events.py new file mode 100644 index 000000000..cd4721758 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/luma/list_events.py @@ -0,0 +1,100 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from ._auth import LUMA_API, get_api_key, get_luma_connector, luma_headers + +logger = logging.getLogger(__name__) + + +def create_list_luma_events_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def list_luma_events( + max_results: int = 25, + ) -> dict[str, Any]: + """List upcoming and recent Luma events. + + Args: + max_results: Maximum events to return (default 25, max 50). + + Returns: + Dictionary with status and a list of events including + event_id, name, start_at, end_at, location, url. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Luma tool not properly configured."} + + max_results = min(max_results, 50) + + try: + connector = await get_luma_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Luma connector found."} + + api_key = get_api_key(connector) + headers = luma_headers(api_key) + + all_entries: list[dict] = [] + cursor = None + + async with httpx.AsyncClient(timeout=20.0) as client: + while len(all_entries) < max_results: + params: dict[str, Any] = {"limit": min(100, max_results - len(all_entries))} + if cursor: + params["cursor"] = cursor + + resp = await client.get( + f"{LUMA_API}/calendar/list-events", + headers=headers, + params=params, + ) + + if resp.status_code == 401: + return {"status": "auth_error", "message": "Luma API key is invalid.", "connector_type": "luma"} + if resp.status_code != 200: + return {"status": "error", "message": f"Luma API error: {resp.status_code}"} + + data = resp.json() + entries = data.get("entries", []) + if not entries: + break + all_entries.extend(entries) + + next_cursor = data.get("next_cursor") + if not next_cursor: + break + cursor = next_cursor + + events = [] + for entry in all_entries[:max_results]: + ev = entry.get("event", {}) + geo = ev.get("geo_info", {}) + events.append({ + "event_id": entry.get("api_id"), + "name": ev.get("name", "Untitled"), + "start_at": ev.get("start_at", ""), + "end_at": ev.get("end_at", ""), + "timezone": ev.get("timezone", ""), + "location": geo.get("name", ""), + "url": ev.get("url", ""), + "visibility": ev.get("visibility", ""), + }) + + return {"status": "success", "events": events, "total": len(events)} + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error listing Luma events: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to list Luma events."} + + return list_luma_events diff --git a/surfsense_backend/app/agents/new_chat/tools/luma/read_event.py b/surfsense_backend/app/agents/new_chat/tools/luma/read_event.py new file mode 100644 index 000000000..eb3ac55c6 --- /dev/null +++ b/surfsense_backend/app/agents/new_chat/tools/luma/read_event.py @@ -0,0 +1,82 @@ +import logging +from typing import Any + +import httpx +from langchain_core.tools import tool +from sqlalchemy.ext.asyncio import AsyncSession + +from ._auth import LUMA_API, get_api_key, get_luma_connector, luma_headers + +logger = logging.getLogger(__name__) + + +def create_read_luma_event_tool( + db_session: AsyncSession | None = None, + search_space_id: int | None = None, + user_id: str | None = None, +): + @tool + async def read_luma_event(event_id: str) -> dict[str, Any]: + """Read detailed information about a specific Luma event. + + Args: + event_id: The Luma event API ID (from list_luma_events). + + Returns: + Dictionary with status and full event details including + description, attendees count, meeting URL. + """ + if db_session is None or search_space_id is None or user_id is None: + return {"status": "error", "message": "Luma tool not properly configured."} + + try: + connector = await get_luma_connector(db_session, search_space_id, user_id) + if not connector: + return {"status": "error", "message": "No Luma connector found."} + + api_key = get_api_key(connector) + headers = luma_headers(api_key) + + async with httpx.AsyncClient(timeout=15.0) as client: + resp = await client.get( + f"{LUMA_API}/events/{event_id}", + headers=headers, + ) + + if resp.status_code == 401: + return {"status": "auth_error", "message": "Luma API key is invalid.", "connector_type": "luma"} + if resp.status_code == 404: + return {"status": "not_found", "message": f"Event '{event_id}' not found."} + if resp.status_code != 200: + return {"status": "error", "message": f"Luma API error: {resp.status_code}"} + + data = resp.json() + ev = data.get("event", data) + geo = ev.get("geo_info", {}) + + event_detail = { + "event_id": event_id, + "name": ev.get("name", ""), + "description": ev.get("description", ""), + "start_at": ev.get("start_at", ""), + "end_at": ev.get("end_at", ""), + "timezone": ev.get("timezone", ""), + "location_name": geo.get("name", ""), + "address": geo.get("address", ""), + "url": ev.get("url", ""), + "meeting_url": ev.get("meeting_url", ""), + "visibility": ev.get("visibility", ""), + "cover_url": ev.get("cover_url", ""), + } + + return {"status": "success", "event": event_detail} + + except Exception as e: + from langgraph.errors import GraphInterrupt + + if isinstance(e, GraphInterrupt): + raise + logger.error("Error reading Luma event: %s", e, exc_info=True) + return {"status": "error", "message": "Failed to read Luma event."} + + return read_luma_event From 575b2c64d7a20f1a4673f7e2866515dca240e138 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 20:50:42 +0200 Subject: [PATCH 24/57] register all new live connector tools in registry --- .../app/agents/new_chat/tools/registry.py | 166 +++++++++++++++++- 1 file changed, 164 insertions(+), 2 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/tools/registry.py b/surfsense_backend/app/agents/new_chat/tools/registry.py index 6f7a5a03f..f74b4271f 100644 --- a/surfsense_backend/app/agents/new_chat/tools/registry.py +++ b/surfsense_backend/app/agents/new_chat/tools/registry.py @@ -50,6 +50,11 @@ from .confluence import ( create_delete_confluence_page_tool, create_update_confluence_page_tool, ) +from .discord import ( + create_list_discord_channels_tool, + create_read_discord_messages_tool, + create_send_discord_message_tool, +) from .dropbox import ( create_create_dropbox_file_tool, create_delete_dropbox_file_tool, @@ -57,6 +62,8 @@ from .dropbox import ( from .generate_image import create_generate_image_tool from .gmail import ( create_create_gmail_draft_tool, + create_read_gmail_email_tool, + create_search_gmail_tool, create_send_gmail_email_tool, create_trash_gmail_email_tool, create_update_gmail_draft_tool, @@ -64,6 +71,7 @@ from .gmail import ( from .google_calendar import ( create_create_calendar_event_tool, create_delete_calendar_event_tool, + create_search_calendar_events_tool, create_update_calendar_event_tool, ) from .google_drive import ( @@ -80,6 +88,11 @@ from .linear import ( create_delete_linear_issue_tool, create_update_linear_issue_tool, ) +from .luma import ( + create_create_luma_event_tool, + create_list_luma_events_tool, + create_read_luma_event_tool, +) from .mcp_tool import load_mcp_tools from .notion import ( create_create_notion_page_tool, @@ -95,6 +108,11 @@ from .report import create_generate_report_tool from .resume import create_generate_resume_tool from .scrape_webpage import create_scrape_webpage_tool from .search_surfsense_docs import create_search_surfsense_docs_tool +from .teams import ( + create_list_teams_channels_tool, + create_read_teams_messages_tool, + create_send_teams_message_tool, +) from .update_memory import create_update_memory_tool, create_update_team_memory_tool from .video_presentation import create_generate_video_presentation_tool from .web_search import create_web_search_tool @@ -403,9 +421,20 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ required_connector="ONEDRIVE_FILE", ), # ========================================================================= - # GOOGLE CALENDAR TOOLS - create, update, delete events + # GOOGLE CALENDAR TOOLS - search, create, update, delete events # Auto-disabled when no Google Calendar connector is configured # ========================================================================= + ToolDefinition( + name="search_calendar_events", + description="Search Google Calendar events within a date range", + factory=lambda deps: create_search_calendar_events_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_CALENDAR_CONNECTOR", + ), ToolDefinition( name="create_calendar_event", description="Create a new event on Google Calendar", @@ -440,9 +469,31 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ required_connector="GOOGLE_CALENDAR_CONNECTOR", ), # ========================================================================= - # GMAIL TOOLS - create drafts, update drafts, send emails, trash emails + # GMAIL TOOLS - search, read, create drafts, update drafts, send, trash # Auto-disabled when no Gmail connector is configured # ========================================================================= + ToolDefinition( + name="search_gmail", + description="Search emails in Gmail using Gmail search syntax", + factory=lambda deps: create_search_gmail_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_GMAIL_CONNECTOR", + ), + ToolDefinition( + name="read_gmail_email", + description="Read the full content of a specific Gmail email", + factory=lambda deps: create_read_gmail_email_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="GOOGLE_GMAIL_CONNECTOR", + ), ToolDefinition( name="create_gmail_draft", description="Create a draft email in Gmail", @@ -561,6 +612,117 @@ BUILTIN_TOOLS: list[ToolDefinition] = [ requires=["db_session", "search_space_id", "user_id"], required_connector="CONFLUENCE_CONNECTOR", ), + # ========================================================================= + # DISCORD TOOLS - list channels, read messages, send messages + # Auto-disabled when no Discord connector is configured + # ========================================================================= + ToolDefinition( + name="list_discord_channels", + description="List text channels in the connected Discord server", + factory=lambda deps: create_list_discord_channels_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="DISCORD_CONNECTOR", + ), + ToolDefinition( + name="read_discord_messages", + description="Read recent messages from a Discord text channel", + factory=lambda deps: create_read_discord_messages_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="DISCORD_CONNECTOR", + ), + ToolDefinition( + name="send_discord_message", + description="Send a message to a Discord text channel", + factory=lambda deps: create_send_discord_message_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="DISCORD_CONNECTOR", + ), + # ========================================================================= + # TEAMS TOOLS - list channels, read messages, send messages + # Auto-disabled when no Teams connector is configured + # ========================================================================= + ToolDefinition( + name="list_teams_channels", + description="List Microsoft Teams and their channels", + factory=lambda deps: create_list_teams_channels_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="TEAMS_CONNECTOR", + ), + ToolDefinition( + name="read_teams_messages", + description="Read recent messages from a Microsoft Teams channel", + factory=lambda deps: create_read_teams_messages_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="TEAMS_CONNECTOR", + ), + ToolDefinition( + name="send_teams_message", + description="Send a message to a Microsoft Teams channel", + factory=lambda deps: create_send_teams_message_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="TEAMS_CONNECTOR", + ), + # ========================================================================= + # LUMA TOOLS - list events, read event details, create events + # Auto-disabled when no Luma connector is configured + # ========================================================================= + ToolDefinition( + name="list_luma_events", + description="List upcoming and recent Luma events", + factory=lambda deps: create_list_luma_events_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="LUMA_CONNECTOR", + ), + ToolDefinition( + name="read_luma_event", + description="Read detailed information about a specific Luma event", + factory=lambda deps: create_read_luma_event_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="LUMA_CONNECTOR", + ), + ToolDefinition( + name="create_luma_event", + description="Create a new event on Luma", + factory=lambda deps: create_create_luma_event_tool( + db_session=deps["db_session"], + search_space_id=deps["search_space_id"], + user_id=deps["user_id"], + ), + requires=["db_session", "search_space_id", "user_id"], + required_connector="LUMA_CONNECTOR", + ), ] From 7133655eebd04453366f6f416d417c075d0ca841 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:19:08 +0200 Subject: [PATCH 25/57] add MCP service registry for Linear, Jira, ClickUp --- .../app/services/mcp_oauth/__init__.py | 0 .../app/services/mcp_oauth/registry.py | 41 +++++++++++++++++++ 2 files changed, 41 insertions(+) create mode 100644 surfsense_backend/app/services/mcp_oauth/__init__.py create mode 100644 surfsense_backend/app/services/mcp_oauth/registry.py diff --git a/surfsense_backend/app/services/mcp_oauth/__init__.py b/surfsense_backend/app/services/mcp_oauth/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py new file mode 100644 index 000000000..93d5d5448 --- /dev/null +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -0,0 +1,41 @@ +"""Registry of MCP services with OAuth 2.1 support. + +Each entry maps a URL-safe service key to its MCP server endpoint and +authentication strategy. Services with ``supports_dcr=True`` will use +RFC 7591 Dynamic Client Registration; the rest require pre-configured +credentials via environment variables. +""" + +from __future__ import annotations + +from dataclasses import dataclass, field + + +@dataclass(frozen=True) +class MCPServiceConfig: + name: str + mcp_url: str + supports_dcr: bool = True + client_id_env: str | None = None + client_secret_env: str | None = None + scopes: list[str] = field(default_factory=list) + + +MCP_SERVICES: dict[str, MCPServiceConfig] = { + "linear": MCPServiceConfig( + name="Linear", + mcp_url="https://mcp.linear.app/mcp", + ), + "jira": MCPServiceConfig( + name="Jira", + mcp_url="https://mcp.atlassian.com/v1/mcp", + ), + "clickup": MCPServiceConfig( + name="ClickUp", + mcp_url="https://mcp.clickup.com/mcp", + ), +} + + +def get_service(key: str) -> MCPServiceConfig | None: + return MCP_SERVICES.get(key) From 4efdee5aed65bc41f61f24d37f1502fe4ece5bc4 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:19:15 +0200 Subject: [PATCH 26/57] add MCP OAuth discovery, DCR, and token exchange --- .../app/services/mcp_oauth/discovery.py | 111 ++++++++++++++++++ 1 file changed, 111 insertions(+) create mode 100644 surfsense_backend/app/services/mcp_oauth/discovery.py diff --git a/surfsense_backend/app/services/mcp_oauth/discovery.py b/surfsense_backend/app/services/mcp_oauth/discovery.py new file mode 100644 index 000000000..e8bcd7076 --- /dev/null +++ b/surfsense_backend/app/services/mcp_oauth/discovery.py @@ -0,0 +1,111 @@ +"""MCP OAuth 2.1 metadata discovery, Dynamic Client Registration, and token exchange.""" + +from __future__ import annotations + +import base64 +import logging +from urllib.parse import urlparse + +import httpx + +logger = logging.getLogger(__name__) + + +async def discover_oauth_metadata(mcp_url: str, *, timeout: float = 15.0) -> dict: + """Fetch OAuth 2.1 metadata from the MCP server's well-known endpoint. + + Per the MCP spec the discovery document lives at the *origin* of the + MCP server URL, not at the MCP endpoint path. + """ + parsed = urlparse(mcp_url) + origin = f"{parsed.scheme}://{parsed.netloc}" + discovery_url = f"{origin}/.well-known/oauth-authorization-server" + + async with httpx.AsyncClient(follow_redirects=True) as client: + resp = await client.get(discovery_url, timeout=timeout) + resp.raise_for_status() + return resp.json() + + +async def register_client( + registration_endpoint: str, + redirect_uri: str, + *, + client_name: str = "SurfSense", + timeout: float = 15.0, +) -> dict: + """Perform Dynamic Client Registration (RFC 7591).""" + payload = { + "client_name": client_name, + "redirect_uris": [redirect_uri], + "grant_types": ["authorization_code", "refresh_token"], + "response_types": ["code"], + "token_endpoint_auth_method": "client_secret_basic", + } + + async with httpx.AsyncClient(follow_redirects=True) as client: + resp = await client.post( + registration_endpoint, json=payload, timeout=timeout, + ) + resp.raise_for_status() + return resp.json() + + +async def exchange_code_for_tokens( + token_endpoint: str, + code: str, + redirect_uri: str, + client_id: str, + client_secret: str, + code_verifier: str, + *, + timeout: float = 30.0, +) -> dict: + """Exchange an authorization code for access + refresh tokens.""" + creds = base64.b64encode(f"{client_id}:{client_secret}".encode()).decode() + + async with httpx.AsyncClient(follow_redirects=True) as client: + resp = await client.post( + token_endpoint, + data={ + "grant_type": "authorization_code", + "code": code, + "redirect_uri": redirect_uri, + "code_verifier": code_verifier, + }, + headers={ + "Content-Type": "application/x-www-form-urlencoded", + "Authorization": f"Basic {creds}", + }, + timeout=timeout, + ) + resp.raise_for_status() + return resp.json() + + +async def refresh_access_token( + token_endpoint: str, + refresh_token: str, + client_id: str, + client_secret: str, + *, + timeout: float = 30.0, +) -> dict: + """Refresh an expired access token.""" + creds = base64.b64encode(f"{client_id}:{client_secret}".encode()).decode() + + async with httpx.AsyncClient(follow_redirects=True) as client: + resp = await client.post( + token_endpoint, + data={ + "grant_type": "refresh_token", + "refresh_token": refresh_token, + }, + headers={ + "Content-Type": "application/x-www-form-urlencoded", + "Authorization": f"Basic {creds}", + }, + timeout=timeout, + ) + resp.raise_for_status() + return resp.json() From 45867e5c56a81bf0c308eae56bc8b274d87a980a Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:19:32 +0200 Subject: [PATCH 27/57] add generic MCP OAuth route with DCR + PKCE --- .../app/routes/mcp_oauth_route.py | 508 ++++++++++++++++++ 1 file changed, 508 insertions(+) create mode 100644 surfsense_backend/app/routes/mcp_oauth_route.py diff --git a/surfsense_backend/app/routes/mcp_oauth_route.py b/surfsense_backend/app/routes/mcp_oauth_route.py new file mode 100644 index 000000000..689914ee8 --- /dev/null +++ b/surfsense_backend/app/routes/mcp_oauth_route.py @@ -0,0 +1,508 @@ +"""Generic MCP OAuth 2.1 route for services with official MCP servers. + +Handles the full flow: discovery → DCR → PKCE authorization → token exchange +→ MCP_CONNECTOR creation. Currently supports Linear, Jira, and ClickUp. +""" + +from __future__ import annotations + +import logging +from datetime import UTC, datetime, timedelta +from urllib.parse import urlencode +from uuid import UUID + +from fastapi import APIRouter, Depends, HTTPException +from fastapi.responses import RedirectResponse +from sqlalchemy import select +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm.attributes import flag_modified + +from app.config import config +from app.db import ( + SearchSourceConnector, + SearchSourceConnectorType, + User, + get_async_session, +) +from app.users import current_active_user +from app.utils.connector_naming import generate_unique_connector_name +from app.utils.oauth_security import OAuthStateManager, TokenEncryption, generate_pkce_pair + +logger = logging.getLogger(__name__) + +router = APIRouter() + +_state_manager: OAuthStateManager | None = None +_token_encryption: TokenEncryption | None = None + + +def _get_state_manager() -> OAuthStateManager: + global _state_manager + if _state_manager is None: + if not config.SECRET_KEY: + raise HTTPException(status_code=500, detail="SECRET_KEY not configured.") + _state_manager = OAuthStateManager(config.SECRET_KEY) + return _state_manager + + +def _get_token_encryption() -> TokenEncryption: + global _token_encryption + if _token_encryption is None: + if not config.SECRET_KEY: + raise HTTPException(status_code=500, detail="SECRET_KEY not configured.") + _token_encryption = TokenEncryption(config.SECRET_KEY) + return _token_encryption + + +def _build_redirect_uri(service: str) -> str: + base = config.BACKEND_URL + if not base: + raise HTTPException(status_code=500, detail="BACKEND_URL not configured.") + return f"{base.rstrip('/')}/api/v1/auth/mcp/{service}/connector/callback" + + +def _frontend_redirect( + space_id: int | None, + *, + success: bool = False, + connector_id: int | None = None, + error: str | None = None, + service: str = "mcp", +) -> RedirectResponse: + if success and space_id: + qs = f"success=true&connector={service}-mcp-connector" + if connector_id: + qs += f"&connectorId={connector_id}" + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?{qs}" + ) + if error and space_id: + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/connectors/callback?error={error}" + ) + return RedirectResponse(url=f"{config.NEXT_FRONTEND_URL}/dashboard") + + +# --------------------------------------------------------------------------- +# /add — start MCP OAuth flow +# --------------------------------------------------------------------------- + +@router.get("/auth/mcp/{service}/connector/add") +async def connect_mcp_service( + service: str, + space_id: int, + user: User = Depends(current_active_user), +): + from app.services.mcp_oauth.registry import get_service + + svc = get_service(service) + if not svc: + raise HTTPException(status_code=404, detail=f"Unknown MCP service: {service}") + + try: + from app.services.mcp_oauth.discovery import ( + discover_oauth_metadata, + register_client, + ) + + metadata = await discover_oauth_metadata(svc.mcp_url) + auth_endpoint = metadata.get("authorization_endpoint") + token_endpoint = metadata.get("token_endpoint") + registration_endpoint = metadata.get("registration_endpoint") + + if not auth_endpoint or not token_endpoint: + raise HTTPException( + status_code=502, + detail=f"{svc.name} MCP server returned incomplete OAuth metadata.", + ) + + redirect_uri = _build_redirect_uri(service) + + if svc.supports_dcr and registration_endpoint: + dcr = await register_client(registration_endpoint, redirect_uri) + client_id = dcr.get("client_id") + client_secret = dcr.get("client_secret", "") + if not client_id: + raise HTTPException( + status_code=502, + detail=f"DCR for {svc.name} did not return a client_id.", + ) + elif not svc.supports_dcr and svc.client_id_env: + client_id = getattr(config, svc.client_id_env, None) + client_secret = getattr(config, svc.client_secret_env or "", None) or "" + if not client_id: + raise HTTPException( + status_code=500, + detail=f"{svc.name} MCP OAuth not configured ({svc.client_id_env}).", + ) + else: + raise HTTPException( + status_code=502, + detail=f"{svc.name} MCP server has no DCR and no fallback credentials.", + ) + + verifier, challenge = generate_pkce_pair() + enc = _get_token_encryption() + + state = _get_state_manager().generate_secure_state( + space_id, + user.id, + service=service, + code_verifier=verifier, + mcp_client_id=client_id, + mcp_client_secret=enc.encrypt_token(client_secret) if client_secret else "", + mcp_token_endpoint=token_endpoint, + mcp_url=svc.mcp_url, + ) + + auth_params: dict[str, str] = { + "client_id": client_id, + "response_type": "code", + "redirect_uri": redirect_uri, + "code_challenge": challenge, + "code_challenge_method": "S256", + "state": state, + } + if svc.scopes: + auth_params["scope"] = " ".join(svc.scopes) + + auth_url = f"{auth_endpoint}?{urlencode(auth_params)}" + + logger.info( + "Generated %s MCP OAuth URL for user %s, space %s", + svc.name, user.id, space_id, + ) + return {"auth_url": auth_url} + + except HTTPException: + raise + except Exception as e: + logger.error("Failed to initiate %s MCP OAuth: %s", service, e, exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to initiate {service} MCP OAuth: {e!s}", + ) from e + + +# --------------------------------------------------------------------------- +# /callback — handle OAuth redirect +# --------------------------------------------------------------------------- + +@router.get("/auth/mcp/{service}/connector/callback") +async def mcp_oauth_callback( + service: str, + code: str | None = None, + error: str | None = None, + state: str | None = None, + session: AsyncSession = Depends(get_async_session), +): + if error: + logger.warning("%s MCP OAuth error: %s", service, error) + space_id = None + if state: + try: + data = _get_state_manager().validate_state(state) + space_id = data.get("space_id") + except Exception: + pass + return _frontend_redirect( + space_id, error=f"{service}_mcp_oauth_denied", service=service, + ) + + if not code: + raise HTTPException(status_code=400, detail="Missing authorization code") + if not state: + raise HTTPException(status_code=400, detail="Missing state parameter") + + data = _get_state_manager().validate_state(state) + user_id = UUID(data["user_id"]) + space_id = data["space_id"] + svc_key = data.get("service", service) + + from app.services.mcp_oauth.registry import get_service + + svc = get_service(svc_key) + if not svc: + raise HTTPException(status_code=404, detail=f"Unknown MCP service: {svc_key}") + + try: + from app.services.mcp_oauth.discovery import exchange_code_for_tokens + + enc = _get_token_encryption() + client_id = data["mcp_client_id"] + client_secret = ( + enc.decrypt_token(data["mcp_client_secret"]) + if data.get("mcp_client_secret") + else "" + ) + token_endpoint = data["mcp_token_endpoint"] + code_verifier = data["code_verifier"] + mcp_url = data["mcp_url"] + redirect_uri = _build_redirect_uri(service) + + token_json = await exchange_code_for_tokens( + token_endpoint=token_endpoint, + code=code, + redirect_uri=redirect_uri, + client_id=client_id, + client_secret=client_secret, + code_verifier=code_verifier, + ) + + access_token = token_json.get("access_token") + if not access_token: + raise HTTPException( + status_code=400, + detail=f"No access token received from {svc.name}.", + ) + + refresh_token = token_json.get("refresh_token") + expires_at = None + if token_json.get("expires_in"): + expires_at = datetime.now(UTC) + timedelta( + seconds=int(token_json["expires_in"]) + ) + + connector_config = { + "server_config": { + "transport": "streamable-http", + "url": mcp_url, + "headers": {"Authorization": f"Bearer {access_token}"}, + }, + "mcp_service": svc_key, + "mcp_oauth": { + "client_id": client_id, + "client_secret": enc.encrypt_token(client_secret) if client_secret else "", + "token_endpoint": token_endpoint, + "access_token": enc.encrypt_token(access_token), + "refresh_token": enc.encrypt_token(refresh_token) if refresh_token else None, + "expires_at": expires_at.isoformat() if expires_at else None, + "scope": token_json.get("scope"), + }, + "_token_encrypted": True, + } + + # ---- Re-auth path ---- + reauth_connector_id = data.get("connector_id") + if reauth_connector_id: + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == reauth_connector_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.search_space_id == space_id, + SearchSourceConnector.connector_type + == SearchSourceConnectorType.MCP_CONNECTOR, + ) + ) + db_connector = result.scalars().first() + if not db_connector: + raise HTTPException( + status_code=404, + detail="Connector not found during re-auth", + ) + + db_connector.config = connector_config + flag_modified(db_connector, "config") + await session.commit() + await session.refresh(db_connector) + + _invalidate_cache(space_id) + + logger.info( + "Re-authenticated %s MCP connector %s for user %s", + svc.name, db_connector.id, user_id, + ) + reauth_return_url = data.get("return_url") + if reauth_return_url and reauth_return_url.startswith("/"): + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}{reauth_return_url}" + ) + return _frontend_redirect( + space_id, success=True, connector_id=db_connector.id, service=service, + ) + + # ---- New connector path ---- + connector_name = await generate_unique_connector_name( + session, + SearchSourceConnectorType.MCP_CONNECTOR, + space_id, + user_id, + f"{svc.name} MCP", + ) + + new_connector = SearchSourceConnector( + name=connector_name, + connector_type=SearchSourceConnectorType.MCP_CONNECTOR, + is_indexable=False, + config=connector_config, + search_space_id=space_id, + user_id=user_id, + ) + session.add(new_connector) + + try: + await session.commit() + except IntegrityError as e: + await session.rollback() + raise HTTPException( + status_code=409, detail=f"Database integrity error: {e!s}", + ) from e + + _invalidate_cache(space_id) + + logger.info( + "Created %s MCP connector %s for user %s in space %s", + svc.name, new_connector.id, user_id, space_id, + ) + return _frontend_redirect( + space_id, success=True, connector_id=new_connector.id, service=service, + ) + + except HTTPException: + raise + except Exception as e: + logger.error( + "Failed to complete %s MCP OAuth: %s", service, e, exc_info=True, + ) + raise HTTPException( + status_code=500, + detail=f"Failed to complete {service} MCP OAuth: {e!s}", + ) from e + + +# --------------------------------------------------------------------------- +# /reauth — re-authenticate an existing MCP connector +# --------------------------------------------------------------------------- + +@router.get("/auth/mcp/{service}/connector/reauth") +async def reauth_mcp_service( + service: str, + space_id: int, + connector_id: int, + return_url: str | None = None, + user: User = Depends(current_active_user), + session: AsyncSession = Depends(get_async_session), +): + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == connector_id, + SearchSourceConnector.user_id == user.id, + SearchSourceConnector.search_space_id == space_id, + SearchSourceConnector.connector_type + == SearchSourceConnectorType.MCP_CONNECTOR, + ) + ) + if not result.scalars().first(): + raise HTTPException( + status_code=404, detail="MCP connector not found or access denied", + ) + + from app.services.mcp_oauth.registry import get_service + + svc = get_service(service) + if not svc: + raise HTTPException(status_code=404, detail=f"Unknown MCP service: {service}") + + try: + from app.services.mcp_oauth.discovery import ( + discover_oauth_metadata, + register_client, + ) + + metadata = await discover_oauth_metadata(svc.mcp_url) + auth_endpoint = metadata.get("authorization_endpoint") + token_endpoint = metadata.get("token_endpoint") + registration_endpoint = metadata.get("registration_endpoint") + + if not auth_endpoint or not token_endpoint: + raise HTTPException( + status_code=502, + detail=f"{svc.name} MCP server returned incomplete OAuth metadata.", + ) + + redirect_uri = _build_redirect_uri(service) + + if svc.supports_dcr and registration_endpoint: + dcr = await register_client(registration_endpoint, redirect_uri) + client_id = dcr.get("client_id") + client_secret = dcr.get("client_secret", "") + if not client_id: + raise HTTPException( + status_code=502, + detail=f"DCR for {svc.name} did not return a client_id.", + ) + elif not svc.supports_dcr and svc.client_id_env: + client_id = getattr(config, svc.client_id_env, None) + client_secret = getattr(config, svc.client_secret_env or "", None) or "" + if not client_id: + raise HTTPException( + status_code=500, + detail=f"{svc.name} MCP OAuth not configured ({svc.client_id_env}).", + ) + else: + raise HTTPException( + status_code=502, + detail=f"{svc.name} MCP server has no DCR and no fallback credentials.", + ) + + verifier, challenge = generate_pkce_pair() + enc = _get_token_encryption() + + extra: dict = { + "service": service, + "code_verifier": verifier, + "mcp_client_id": client_id, + "mcp_client_secret": enc.encrypt_token(client_secret) if client_secret else "", + "mcp_token_endpoint": token_endpoint, + "mcp_url": svc.mcp_url, + "connector_id": connector_id, + } + if return_url and return_url.startswith("/"): + extra["return_url"] = return_url + + state = _get_state_manager().generate_secure_state( + space_id, user.id, **extra, + ) + + auth_params: dict[str, str] = { + "client_id": client_id, + "response_type": "code", + "redirect_uri": redirect_uri, + "code_challenge": challenge, + "code_challenge_method": "S256", + "state": state, + } + if svc.scopes: + auth_params["scope"] = " ".join(svc.scopes) + + auth_url = f"{auth_endpoint}?{urlencode(auth_params)}" + + logger.info( + "Initiating %s MCP re-auth for user %s, connector %s", + svc.name, user.id, connector_id, + ) + return {"auth_url": auth_url} + + except HTTPException: + raise + except Exception as e: + logger.error( + "Failed to initiate %s MCP re-auth: %s", service, e, exc_info=True, + ) + raise HTTPException( + status_code=500, + detail=f"Failed to initiate {service} MCP re-auth: {e!s}", + ) from e + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +def _invalidate_cache(space_id: int) -> None: + try: + from app.agents.new_chat.tools.mcp_tool import invalidate_mcp_tools_cache + + invalidate_mcp_tools_cache(space_id) + except Exception: + logger.debug("MCP cache invalidation skipped", exc_info=True) From 81711c9e5b168a9acc4aa5838fe77d3d8260a7ec Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:19:57 +0200 Subject: [PATCH 28/57] wire MCP OAuth route into app router --- surfsense_backend/app/routes/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py index ad40666cd..925c207a6 100644 --- a/surfsense_backend/app/routes/__init__.py +++ b/surfsense_backend/app/routes/__init__.py @@ -30,6 +30,7 @@ from .jira_add_connector_route import router as jira_add_connector_router from .linear_add_connector_route import router as linear_add_connector_router from .logs_routes import router as logs_router from .luma_add_connector_route import router as luma_add_connector_router +from .mcp_oauth_route import router as mcp_oauth_router from .memory_routes import router as memory_router from .model_list_routes import router as model_list_router from .new_chat_routes import router as new_chat_router @@ -95,6 +96,7 @@ router.include_router(logs_router) router.include_router(circleback_webhook_router) # Circleback meeting webhooks router.include_router(surfsense_docs_router) # Surfsense documentation for citations router.include_router(notifications_router) # Notifications with Zero sync +router.include_router(mcp_oauth_router) # MCP OAuth 2.1 for Linear, Jira, ClickUp router.include_router(composio_router) # Composio OAuth and toolkit management router.include_router(public_chat_router) # Public chat sharing and cloning router.include_router(incentive_tasks_router) # Incentive tasks for earning free pages From 9b78fbfe15c36c02e1ed0b958519958d4f93c555 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:20:12 +0200 Subject: [PATCH 29/57] add automatic token refresh for MCP OAuth connectors --- .../app/agents/new_chat/tools/mcp_tool.py | 124 +++++++++++++++++- 1 file changed, 121 insertions(+), 3 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py b/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py index 9743d049d..cf3e51166 100644 --- a/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py +++ b/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py @@ -377,6 +377,118 @@ async def _load_http_mcp_tools( return tools +_TOKEN_REFRESH_BUFFER_SECONDS = 300 # refresh 5 min before expiry + + +async def _maybe_refresh_mcp_oauth_token( + session: AsyncSession, + connector: "SearchSourceConnector", + cfg: dict[str, Any], + server_config: dict[str, Any], +) -> dict[str, Any]: + """Refresh the access token for an MCP OAuth connector if it is about to expire. + + Returns the (possibly updated) ``server_config``. + """ + from datetime import UTC, datetime, timedelta + + mcp_oauth = cfg.get("mcp_oauth", {}) + expires_at_str = mcp_oauth.get("expires_at") + if not expires_at_str: + return server_config + + try: + expires_at = datetime.fromisoformat(expires_at_str) + if expires_at.tzinfo is None: + from datetime import timezone + expires_at = expires_at.replace(tzinfo=timezone.utc) + + if datetime.now(UTC) < expires_at - timedelta(seconds=_TOKEN_REFRESH_BUFFER_SECONDS): + return server_config + except (ValueError, TypeError): + return server_config + + refresh_token = mcp_oauth.get("refresh_token") + if not refresh_token: + logger.warning( + "MCP connector %s token expired but no refresh_token available", + connector.id, + ) + return server_config + + try: + from app.config import config as app_config + from app.services.mcp_oauth.discovery import refresh_access_token + from app.utils.oauth_security import TokenEncryption + + enc = TokenEncryption(app_config.SECRET_KEY) + decrypted_refresh = enc.decrypt_token(refresh_token) + decrypted_secret = ( + enc.decrypt_token(mcp_oauth["client_secret"]) + if mcp_oauth.get("client_secret") + else "" + ) + + token_json = await refresh_access_token( + token_endpoint=mcp_oauth["token_endpoint"], + refresh_token=decrypted_refresh, + client_id=mcp_oauth["client_id"], + client_secret=decrypted_secret, + ) + + new_access = token_json.get("access_token") + if not new_access: + logger.warning( + "MCP connector %s token refresh returned no access_token", + connector.id, + ) + return server_config + + new_expires_at = None + if token_json.get("expires_in"): + new_expires_at = datetime.now(UTC) + timedelta( + seconds=int(token_json["expires_in"]) + ) + + updated_oauth = dict(mcp_oauth) + updated_oauth["access_token"] = enc.encrypt_token(new_access) + if token_json.get("refresh_token"): + updated_oauth["refresh_token"] = enc.encrypt_token( + token_json["refresh_token"] + ) + updated_oauth["expires_at"] = ( + new_expires_at.isoformat() if new_expires_at else None + ) + + updated_server_config = dict(server_config) + updated_server_config["headers"] = { + **server_config.get("headers", {}), + "Authorization": f"Bearer {new_access}", + } + + from sqlalchemy.orm.attributes import flag_modified + + connector.config = { + **cfg, + "server_config": updated_server_config, + "mcp_oauth": updated_oauth, + } + flag_modified(connector, "config") + await session.commit() + await session.refresh(connector) + + logger.info("Refreshed MCP OAuth token for connector %s", connector.id) + return updated_server_config + + except Exception: + logger.warning( + "Failed to refresh MCP OAuth token for connector %s", + connector.id, + exc_info=True, + ) + return server_config + + def invalidate_mcp_tools_cache(search_space_id: int | None = None) -> None: """Invalidate cached MCP tools. @@ -429,9 +541,9 @@ async def load_mcp_tools( tools: list[StructuredTool] = [] for connector in result.scalars(): try: - config = connector.config or {} - server_config = config.get("server_config", {}) - trusted_tools = config.get("trusted_tools", []) + cfg = connector.config or {} + server_config = cfg.get("server_config", {}) + trusted_tools = cfg.get("trusted_tools", []) if not server_config or not isinstance(server_config, dict): logger.warning( @@ -439,6 +551,12 @@ async def load_mcp_tools( ) continue + # Refresh OAuth token for MCP OAuth connectors before connecting + if cfg.get("mcp_oauth"): + server_config = await _maybe_refresh_mcp_oauth_token( + session, connector, cfg, server_config, + ) + transport = server_config.get("transport", "stdio") if transport in ("streamable-http", "http", "sse"): From c414cc257f392f84a82da6100e46701bf630404b Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:20:54 +0200 Subject: [PATCH 30/57] add frontend tiles for Linear, Jira, ClickUp MCP connectors --- .../constants/connector-constants.ts | 25 +++++++++++++++++++ .../tabs/all-connectors-tab.tsx | 23 ++++++++++++++++- 2 files changed, 47 insertions(+), 1 deletion(-) diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index 5b61e8bdf..5ce94809a 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -105,6 +105,31 @@ export const OAUTH_CONNECTORS = [ }, ] as const; +// MCP OAuth Connectors (one-click connect via official MCP servers) +export const MCP_OAUTH_CONNECTORS = [ + { + id: "linear-mcp-connector", + title: "Linear (MCP)", + description: "Interact with Linear issues via MCP", + connectorType: EnumConnectorName.MCP_CONNECTOR, + authEndpoint: "/api/v1/auth/mcp/linear/connector/add/", + }, + { + id: "jira-mcp-connector", + title: "Jira (MCP)", + description: "Interact with Jira issues via MCP", + connectorType: EnumConnectorName.MCP_CONNECTOR, + authEndpoint: "/api/v1/auth/mcp/jira/connector/add/", + }, + { + id: "clickup-mcp-connector", + title: "ClickUp (MCP)", + description: "Interact with ClickUp tasks via MCP", + connectorType: EnumConnectorName.MCP_CONNECTOR, + authEndpoint: "/api/v1/auth/mcp/clickup/connector/add/", + }, +] as const; + // Content Sources (tools that extract and import content from external sources) export const CRAWLERS = [ { diff --git a/surfsense_web/components/assistant-ui/connector-popup/tabs/all-connectors-tab.tsx b/surfsense_web/components/assistant-ui/connector-popup/tabs/all-connectors-tab.tsx index 814959ec4..d4f5e2fc1 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/tabs/all-connectors-tab.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/tabs/all-connectors-tab.tsx @@ -10,12 +10,14 @@ import { ConnectorCard } from "../components/connector-card"; import { COMPOSIO_CONNECTORS, CRAWLERS, + MCP_OAUTH_CONNECTORS, OAUTH_CONNECTORS, OTHER_CONNECTORS, } from "../constants/connector-constants"; import { getDocumentCountForConnector } from "../utils/connector-document-mapping"; type OAuthConnector = (typeof OAUTH_CONNECTORS)[number]; +type MCPOAuthConnector = (typeof MCP_OAUTH_CONNECTORS)[number]; type ComposioConnector = (typeof COMPOSIO_CONNECTORS)[number]; type OtherConnector = (typeof OTHER_CONNECTORS)[number]; type CrawlerConnector = (typeof CRAWLERS)[number]; @@ -128,6 +130,10 @@ export const AllConnectorsTab: FC = ({ (c) => c.connectorType === EnumConnectorName.AIRTABLE_CONNECTOR ); + const filteredMCPOAuth = MCP_OAUTH_CONNECTORS.filter( + (c) => matchesSearch(c.title, c.description), + ); + const moreIntegrationsComposio = filteredComposio.filter( (c) => !DOCUMENT_FILE_CONNECTOR_TYPES.has(c.connectorType) && @@ -279,6 +285,7 @@ export const AllConnectorsTab: FC = ({ nativeGoogleDriveConnectors.length > 0 || composioGoogleDriveConnectors.length > 0 || fileStorageConnectors.length > 0; + const hasMCPOAuth = filteredMCPOAuth.length > 0; const hasMoreIntegrations = otherDocumentYouTubeConnectors.length > 0 || otherDocumentNotionConnectors.length > 0 || @@ -288,7 +295,7 @@ export const AllConnectorsTab: FC = ({ moreIntegrationsOther.length > 0 || moreIntegrationsCrawlers.length > 0; - const hasAnyResults = hasDocumentFileConnectors || hasMoreIntegrations; + const hasAnyResults = hasDocumentFileConnectors || hasMCPOAuth || hasMoreIntegrations; if (!hasAnyResults && searchQuery) { return ( @@ -318,6 +325,20 @@ export const AllConnectorsTab: FC = ({ )} + {/* Live MCP Integrations */} + {hasMCPOAuth && ( +
+
+

+ Live MCP Integrations +

+
+
+ {filteredMCPOAuth.map((connector) => renderOAuthCard(connector as OAuthConnector | ComposioConnector))} +
+
+ )} + {/* More Integrations */} {hasMoreIntegrations && (
From 8b8c9b1f5dd8b8c88e0d351c91adbc1fda5030a0 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:38:24 +0200 Subject: [PATCH 31/57] add Slack and Airtable MCP OAuth support --- .../app/routes/mcp_oauth_route.py | 8 ++++++-- .../app/services/mcp_oauth/discovery.py | 18 ++++++++++++++---- .../app/services/mcp_oauth/registry.py | 13 +++++++++++++ .../constants/connector-constants.ts | 14 ++++++++++++++ 4 files changed, 47 insertions(+), 6 deletions(-) diff --git a/surfsense_backend/app/routes/mcp_oauth_route.py b/surfsense_backend/app/routes/mcp_oauth_route.py index 689914ee8..e47dc0a62 100644 --- a/surfsense_backend/app/routes/mcp_oauth_route.py +++ b/surfsense_backend/app/routes/mcp_oauth_route.py @@ -106,7 +106,9 @@ async def connect_mcp_service( register_client, ) - metadata = await discover_oauth_metadata(svc.mcp_url) + metadata = await discover_oauth_metadata( + svc.mcp_url, origin_override=svc.oauth_discovery_origin, + ) auth_endpoint = metadata.get("authorization_endpoint") token_endpoint = metadata.get("token_endpoint") registration_endpoint = metadata.get("registration_endpoint") @@ -409,7 +411,9 @@ async def reauth_mcp_service( register_client, ) - metadata = await discover_oauth_metadata(svc.mcp_url) + metadata = await discover_oauth_metadata( + svc.mcp_url, origin_override=svc.oauth_discovery_origin, + ) auth_endpoint = metadata.get("authorization_endpoint") token_endpoint = metadata.get("token_endpoint") registration_endpoint = metadata.get("registration_endpoint") diff --git a/surfsense_backend/app/services/mcp_oauth/discovery.py b/surfsense_backend/app/services/mcp_oauth/discovery.py index e8bcd7076..b0f3fef2a 100644 --- a/surfsense_backend/app/services/mcp_oauth/discovery.py +++ b/surfsense_backend/app/services/mcp_oauth/discovery.py @@ -11,14 +11,24 @@ import httpx logger = logging.getLogger(__name__) -async def discover_oauth_metadata(mcp_url: str, *, timeout: float = 15.0) -> dict: +async def discover_oauth_metadata( + mcp_url: str, + *, + origin_override: str | None = None, + timeout: float = 15.0, +) -> dict: """Fetch OAuth 2.1 metadata from the MCP server's well-known endpoint. Per the MCP spec the discovery document lives at the *origin* of the - MCP server URL, not at the MCP endpoint path. + MCP server URL. ``origin_override`` can be used when the OAuth server + lives on a different domain (e.g. Airtable: MCP at ``mcp.airtable.com``, + OAuth at ``airtable.com``). """ - parsed = urlparse(mcp_url) - origin = f"{parsed.scheme}://{parsed.netloc}" + if origin_override: + origin = origin_override.rstrip("/") + else: + parsed = urlparse(mcp_url) + origin = f"{parsed.scheme}://{parsed.netloc}" discovery_url = f"{origin}/.well-known/oauth-authorization-server" async with httpx.AsyncClient(follow_redirects=True) as client: diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index 93d5d5448..3f9a03fbc 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -16,6 +16,7 @@ class MCPServiceConfig: name: str mcp_url: str supports_dcr: bool = True + oauth_discovery_origin: str | None = None client_id_env: str | None = None client_secret_env: str | None = None scopes: list[str] = field(default_factory=list) @@ -34,6 +35,18 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { name="ClickUp", mcp_url="https://mcp.clickup.com/mcp", ), + "slack": MCPServiceConfig( + name="Slack", + mcp_url="https://mcp.slack.com/mcp", + supports_dcr=False, + client_id_env="SLACK_CLIENT_ID", + client_secret_env="SLACK_CLIENT_SECRET", + ), + "airtable": MCPServiceConfig( + name="Airtable", + mcp_url="https://mcp.airtable.com/mcp", + oauth_discovery_origin="https://airtable.com", + ), } diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index 5ce94809a..dcd63f525 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -128,6 +128,20 @@ export const MCP_OAUTH_CONNECTORS = [ connectorType: EnumConnectorName.MCP_CONNECTOR, authEndpoint: "/api/v1/auth/mcp/clickup/connector/add/", }, + { + id: "slack-mcp-connector", + title: "Slack (MCP)", + description: "Interact with Slack channels via MCP", + connectorType: EnumConnectorName.MCP_CONNECTOR, + authEndpoint: "/api/v1/auth/mcp/slack/connector/add/", + }, + { + id: "airtable-mcp-connector", + title: "Airtable (MCP)", + description: "Interact with Airtable bases via MCP", + connectorType: EnumConnectorName.MCP_CONNECTOR, + authEndpoint: "/api/v1/auth/mcp/airtable/connector/add/", + }, ] as const; // Content Sources (tools that extract and import content from external sources) From 5ff0ec5d5de7ab9d880cb9e6911ecebdf54fed14 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:51:40 +0200 Subject: [PATCH 32/57] disable periodic indexing for live connectors --- .../celery_tasks/schedule_checker_task.py | 22 +++---------------- 1 file changed, 3 insertions(+), 19 deletions(-) diff --git a/surfsense_backend/app/tasks/celery_tasks/schedule_checker_task.py b/surfsense_backend/app/tasks/celery_tasks/schedule_checker_task.py index e6890b0a8..3aee5a4ca 100644 --- a/surfsense_backend/app/tasks/celery_tasks/schedule_checker_task.py +++ b/surfsense_backend/app/tasks/celery_tasks/schedule_checker_task.py @@ -51,43 +51,27 @@ async def _check_and_trigger_schedules(): logger.info(f"Found {len(due_connectors)} connectors due for indexing") - # Import all indexing tasks + # Import indexing tasks for KB connectors only. + # Live connectors (Linear, Slack, Jira, ClickUp, Airtable, Discord, + # Teams, Gmail, Calendar, Luma) use real-time tools instead. from app.tasks.celery_tasks.connector_tasks import ( - index_airtable_records_task, - index_clickup_tasks_task, index_confluence_pages_task, index_crawled_urls_task, - index_discord_messages_task, index_elasticsearch_documents_task, index_github_repos_task, index_google_calendar_events_task, index_google_drive_files_task, index_google_gmail_messages_task, - index_jira_issues_task, - index_linear_issues_task, - index_luma_events_task, index_notion_pages_task, - index_slack_messages_task, ) - # Map connector types to their tasks task_map = { - SearchSourceConnectorType.SLACK_CONNECTOR: index_slack_messages_task, SearchSourceConnectorType.NOTION_CONNECTOR: index_notion_pages_task, SearchSourceConnectorType.GITHUB_CONNECTOR: index_github_repos_task, - SearchSourceConnectorType.LINEAR_CONNECTOR: index_linear_issues_task, - SearchSourceConnectorType.JIRA_CONNECTOR: index_jira_issues_task, SearchSourceConnectorType.CONFLUENCE_CONNECTOR: index_confluence_pages_task, - SearchSourceConnectorType.CLICKUP_CONNECTOR: index_clickup_tasks_task, - SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR: index_google_calendar_events_task, - SearchSourceConnectorType.AIRTABLE_CONNECTOR: index_airtable_records_task, - SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR: index_google_gmail_messages_task, - SearchSourceConnectorType.DISCORD_CONNECTOR: index_discord_messages_task, - SearchSourceConnectorType.LUMA_CONNECTOR: index_luma_events_task, SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR: index_elasticsearch_documents_task, SearchSourceConnectorType.WEBCRAWLER_CONNECTOR: index_crawled_urls_task, SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR: index_google_drive_files_task, - # Composio connector types (unified with native Google tasks) SearchSourceConnectorType.COMPOSIO_GOOGLE_DRIVE_CONNECTOR: index_google_drive_files_task, SearchSourceConnectorType.COMPOSIO_GMAIL_CONNECTOR: index_google_gmail_messages_task, SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR: index_google_calendar_events_task, From 328219e46fdc9c0b88d194c1e47b5dbc9d4b5d91 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:52:17 +0200 Subject: [PATCH 33/57] disable first-run indexing for live connectors --- .../app/utils/periodic_scheduler.py | 30 ------------------- 1 file changed, 30 deletions(-) diff --git a/surfsense_backend/app/utils/periodic_scheduler.py b/surfsense_backend/app/utils/periodic_scheduler.py index 9ea45df63..923f969d5 100644 --- a/surfsense_backend/app/utils/periodic_scheduler.py +++ b/surfsense_backend/app/utils/periodic_scheduler.py @@ -18,19 +18,9 @@ logger = logging.getLogger(__name__) # Mapping of connector types to their corresponding Celery task names CONNECTOR_TASK_MAP = { - SearchSourceConnectorType.SLACK_CONNECTOR: "index_slack_messages", - SearchSourceConnectorType.TEAMS_CONNECTOR: "index_teams_messages", SearchSourceConnectorType.NOTION_CONNECTOR: "index_notion_pages", SearchSourceConnectorType.GITHUB_CONNECTOR: "index_github_repos", - SearchSourceConnectorType.LINEAR_CONNECTOR: "index_linear_issues", - SearchSourceConnectorType.JIRA_CONNECTOR: "index_jira_issues", SearchSourceConnectorType.CONFLUENCE_CONNECTOR: "index_confluence_pages", - SearchSourceConnectorType.CLICKUP_CONNECTOR: "index_clickup_tasks", - SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR: "index_google_calendar_events", - SearchSourceConnectorType.AIRTABLE_CONNECTOR: "index_airtable_records", - SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR: "index_google_gmail_messages", - SearchSourceConnectorType.DISCORD_CONNECTOR: "index_discord_messages", - SearchSourceConnectorType.LUMA_CONNECTOR: "index_luma_events", SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR: "index_elasticsearch_documents", SearchSourceConnectorType.WEBCRAWLER_CONNECTOR: "index_crawled_urls", SearchSourceConnectorType.BOOKSTACK_CONNECTOR: "index_bookstack_pages", @@ -84,40 +74,20 @@ def create_periodic_schedule( f"(frequency: {frequency_minutes} minutes). Triggering first run..." ) - # Import all indexing tasks from app.tasks.celery_tasks.connector_tasks import ( - index_airtable_records_task, index_bookstack_pages_task, - index_clickup_tasks_task, index_confluence_pages_task, index_crawled_urls_task, - index_discord_messages_task, index_elasticsearch_documents_task, index_github_repos_task, - index_google_calendar_events_task, - index_google_gmail_messages_task, - index_jira_issues_task, - index_linear_issues_task, - index_luma_events_task, index_notion_pages_task, index_obsidian_vault_task, - index_slack_messages_task, ) - # Map connector type to task task_map = { - SearchSourceConnectorType.SLACK_CONNECTOR: index_slack_messages_task, SearchSourceConnectorType.NOTION_CONNECTOR: index_notion_pages_task, SearchSourceConnectorType.GITHUB_CONNECTOR: index_github_repos_task, - SearchSourceConnectorType.LINEAR_CONNECTOR: index_linear_issues_task, - SearchSourceConnectorType.JIRA_CONNECTOR: index_jira_issues_task, SearchSourceConnectorType.CONFLUENCE_CONNECTOR: index_confluence_pages_task, - SearchSourceConnectorType.CLICKUP_CONNECTOR: index_clickup_tasks_task, - SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR: index_google_calendar_events_task, - SearchSourceConnectorType.AIRTABLE_CONNECTOR: index_airtable_records_task, - SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR: index_google_gmail_messages_task, - SearchSourceConnectorType.DISCORD_CONNECTOR: index_discord_messages_task, - SearchSourceConnectorType.LUMA_CONNECTOR: index_luma_events_task, SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR: index_elasticsearch_documents_task, SearchSourceConnectorType.WEBCRAWLER_CONNECTOR: index_crawled_urls_task, SearchSourceConnectorType.BOOKSTACK_CONNECTOR: index_bookstack_pages_task, From 53a173a8fdc78a35889ceb028a5e102a11a7ecb8 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:52:23 +0200 Subject: [PATCH 34/57] guard manual indexing for live connectors --- .../routes/search_source_connectors_routes.py | 175 +++--------------- 1 file changed, 28 insertions(+), 147 deletions(-) diff --git a/surfsense_backend/app/routes/search_source_connectors_routes.py b/surfsense_backend/app/routes/search_source_connectors_routes.py index b87ce28c9..7ce3ca9a3 100644 --- a/surfsense_backend/app/routes/search_source_connectors_routes.py +++ b/surfsense_backend/app/routes/search_source_connectors_routes.py @@ -693,27 +693,10 @@ async def index_connector_content( user: User = Depends(current_active_user), ): """ - Index content from a connector to a search space. - Requires CONNECTORS_UPDATE permission (to trigger indexing). + Index content from a KB connector to a search space. - Currently supports: - - SLACK_CONNECTOR: Indexes messages from all accessible Slack channels - - TEAMS_CONNECTOR: Indexes messages from all accessible Microsoft Teams channels - - NOTION_CONNECTOR: Indexes pages from all accessible Notion pages - - GITHUB_CONNECTOR: Indexes code and documentation from GitHub repositories - - LINEAR_CONNECTOR: Indexes issues and comments from Linear - - JIRA_CONNECTOR: Indexes issues and comments from Jira - - DISCORD_CONNECTOR: Indexes messages from all accessible Discord channels - - LUMA_CONNECTOR: Indexes events from Luma - - ELASTICSEARCH_CONNECTOR: Indexes documents from Elasticsearch - - WEBCRAWLER_CONNECTOR: Indexes web pages from crawled websites - - Args: - connector_id: ID of the connector to use - search_space_id: ID of the search space to store indexed content - - Returns: - Dictionary with indexing status + Live connectors (Slack, Teams, Linear, Jira, ClickUp, Calendar, Airtable, + Gmail, Discord, Luma) use real-time agent tools instead. """ try: # Get the connector first @@ -770,9 +753,7 @@ async def index_connector_content( # For calendar connectors, default to today but allow future dates if explicitly provided if connector.connector_type in [ - SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR, SearchSourceConnectorType.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR, - SearchSourceConnectorType.LUMA_CONNECTOR, ]: # Default to today if no end_date provided (users can manually select future dates) indexing_to = today_str if end_date is None else end_date @@ -796,33 +777,32 @@ async def index_connector_content( # For non-calendar connectors, cap at today indexing_to = end_date if end_date else today_str - if connector.connector_type == SearchSourceConnectorType.SLACK_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import ( - index_slack_messages_task, - ) + _LIVE_CONNECTOR_TYPES = { + SearchSourceConnectorType.SLACK_CONNECTOR, + SearchSourceConnectorType.TEAMS_CONNECTOR, + SearchSourceConnectorType.LINEAR_CONNECTOR, + SearchSourceConnectorType.JIRA_CONNECTOR, + SearchSourceConnectorType.CLICKUP_CONNECTOR, + SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR, + SearchSourceConnectorType.AIRTABLE_CONNECTOR, + SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR, + SearchSourceConnectorType.DISCORD_CONNECTOR, + SearchSourceConnectorType.LUMA_CONNECTOR, + } + if connector.connector_type in _LIVE_CONNECTOR_TYPES: + return { + "message": ( + f"{connector.connector_type.value} uses real-time agent tools; " + "background indexing is disabled." + ), + "indexing_started": False, + "connector_id": connector_id, + "search_space_id": search_space_id, + "indexing_from": indexing_from, + "indexing_to": indexing_to, + } - logger.info( - f"Triggering Slack indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_slack_messages_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Slack indexing started in the background." - - elif connector.connector_type == SearchSourceConnectorType.TEAMS_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import ( - index_teams_messages_task, - ) - - logger.info( - f"Triggering Teams indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_teams_messages_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Teams indexing started in the background." - - elif connector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR: + if connector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR: from app.tasks.celery_tasks.connector_tasks import index_notion_pages_task logger.info( @@ -844,28 +824,6 @@ async def index_connector_content( ) response_message = "GitHub indexing started in the background." - elif connector.connector_type == SearchSourceConnectorType.LINEAR_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import index_linear_issues_task - - logger.info( - f"Triggering Linear indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_linear_issues_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Linear indexing started in the background." - - elif connector.connector_type == SearchSourceConnectorType.JIRA_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import index_jira_issues_task - - logger.info( - f"Triggering Jira indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_jira_issues_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Jira indexing started in the background." - elif connector.connector_type == SearchSourceConnectorType.CONFLUENCE_CONNECTOR: from app.tasks.celery_tasks.connector_tasks import ( index_confluence_pages_task, @@ -892,59 +850,6 @@ async def index_connector_content( ) response_message = "BookStack indexing started in the background." - elif connector.connector_type == SearchSourceConnectorType.CLICKUP_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import index_clickup_tasks_task - - logger.info( - f"Triggering ClickUp indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_clickup_tasks_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "ClickUp indexing started in the background." - - elif ( - connector.connector_type - == SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR - ): - from app.tasks.celery_tasks.connector_tasks import ( - index_google_calendar_events_task, - ) - - logger.info( - f"Triggering Google Calendar indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_google_calendar_events_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Google Calendar indexing started in the background." - elif connector.connector_type == SearchSourceConnectorType.AIRTABLE_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import ( - index_airtable_records_task, - ) - - logger.info( - f"Triggering Airtable indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_airtable_records_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Airtable indexing started in the background." - elif ( - connector.connector_type == SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR - ): - from app.tasks.celery_tasks.connector_tasks import ( - index_google_gmail_messages_task, - ) - - logger.info( - f"Triggering Google Gmail indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_google_gmail_messages_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Google Gmail indexing started in the background." - elif ( connector.connector_type == SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR ): @@ -1089,30 +994,6 @@ async def index_connector_content( ) response_message = "Dropbox indexing started in the background." - elif connector.connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import ( - index_discord_messages_task, - ) - - logger.info( - f"Triggering Discord indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_discord_messages_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Discord indexing started in the background." - - elif connector.connector_type == SearchSourceConnectorType.LUMA_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import index_luma_events_task - - logger.info( - f"Triggering Luma indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_luma_events_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Luma indexing started in the background." - elif ( connector.connector_type == SearchSourceConnectorType.ELASTICSEARCH_CONNECTOR From 0ab7d6a5e385d071befec0c386121181288b0228 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:52:43 +0200 Subject: [PATCH 35/57] set is_indexable=False for all live connector add routes --- surfsense_backend/app/routes/airtable_add_connector_route.py | 2 +- surfsense_backend/app/routes/clickup_add_connector_route.py | 4 ++-- surfsense_backend/app/routes/discord_add_connector_route.py | 2 +- .../app/routes/google_calendar_add_connector_route.py | 2 +- .../app/routes/google_gmail_add_connector_route.py | 2 +- surfsense_backend/app/routes/jira_add_connector_route.py | 2 +- surfsense_backend/app/routes/linear_add_connector_route.py | 2 +- surfsense_backend/app/routes/luma_add_connector_route.py | 4 ++-- surfsense_backend/app/routes/slack_add_connector_route.py | 2 +- surfsense_backend/app/routes/teams_add_connector_route.py | 2 +- 10 files changed, 12 insertions(+), 12 deletions(-) diff --git a/surfsense_backend/app/routes/airtable_add_connector_route.py b/surfsense_backend/app/routes/airtable_add_connector_route.py index 1e0b1eb5d..f70b9166b 100644 --- a/surfsense_backend/app/routes/airtable_add_connector_route.py +++ b/surfsense_backend/app/routes/airtable_add_connector_route.py @@ -311,7 +311,7 @@ async def airtable_callback( new_connector = SearchSourceConnector( name=connector_name, connector_type=SearchSourceConnectorType.AIRTABLE_CONNECTOR, - is_indexable=True, + is_indexable=False, config=credentials_dict, search_space_id=space_id, user_id=user_id, diff --git a/surfsense_backend/app/routes/clickup_add_connector_route.py b/surfsense_backend/app/routes/clickup_add_connector_route.py index 2cd63eca2..f7b0876e5 100644 --- a/surfsense_backend/app/routes/clickup_add_connector_route.py +++ b/surfsense_backend/app/routes/clickup_add_connector_route.py @@ -301,7 +301,7 @@ async def clickup_callback( # Update existing connector existing_connector.config = connector_config existing_connector.name = "ClickUp Connector" - existing_connector.is_indexable = True + existing_connector.is_indexable = False logger.info( f"Updated existing ClickUp connector for user {user_id} in space {space_id}" ) @@ -310,7 +310,7 @@ async def clickup_callback( new_connector = SearchSourceConnector( name="ClickUp Connector", connector_type=SearchSourceConnectorType.CLICKUP_CONNECTOR, - is_indexable=True, + is_indexable=False, config=connector_config, search_space_id=space_id, user_id=user_id, diff --git a/surfsense_backend/app/routes/discord_add_connector_route.py b/surfsense_backend/app/routes/discord_add_connector_route.py index 27bfffc90..4ab48f544 100644 --- a/surfsense_backend/app/routes/discord_add_connector_route.py +++ b/surfsense_backend/app/routes/discord_add_connector_route.py @@ -326,7 +326,7 @@ async def discord_callback( new_connector = SearchSourceConnector( name=connector_name, connector_type=SearchSourceConnectorType.DISCORD_CONNECTOR, - is_indexable=True, + is_indexable=False, config=connector_config, search_space_id=space_id, user_id=user_id, diff --git a/surfsense_backend/app/routes/google_calendar_add_connector_route.py b/surfsense_backend/app/routes/google_calendar_add_connector_route.py index d7ccf62ca..a143fd50d 100644 --- a/surfsense_backend/app/routes/google_calendar_add_connector_route.py +++ b/surfsense_backend/app/routes/google_calendar_add_connector_route.py @@ -340,7 +340,7 @@ async def calendar_callback( config=creds_dict, search_space_id=space_id, user_id=user_id, - is_indexable=True, + is_indexable=False, ) session.add(db_connector) await session.commit() diff --git a/surfsense_backend/app/routes/google_gmail_add_connector_route.py b/surfsense_backend/app/routes/google_gmail_add_connector_route.py index dd8feb1c7..9b807a556 100644 --- a/surfsense_backend/app/routes/google_gmail_add_connector_route.py +++ b/surfsense_backend/app/routes/google_gmail_add_connector_route.py @@ -371,7 +371,7 @@ async def gmail_callback( config=creds_dict, search_space_id=space_id, user_id=user_id, - is_indexable=True, + is_indexable=False, ) session.add(db_connector) await session.commit() diff --git a/surfsense_backend/app/routes/jira_add_connector_route.py b/surfsense_backend/app/routes/jira_add_connector_route.py index 6cd6283d7..eeb4f91d9 100644 --- a/surfsense_backend/app/routes/jira_add_connector_route.py +++ b/surfsense_backend/app/routes/jira_add_connector_route.py @@ -386,7 +386,7 @@ async def jira_callback( new_connector = SearchSourceConnector( name=connector_name, connector_type=SearchSourceConnectorType.JIRA_CONNECTOR, - is_indexable=True, + is_indexable=False, config=connector_config, search_space_id=space_id, user_id=user_id, diff --git a/surfsense_backend/app/routes/linear_add_connector_route.py b/surfsense_backend/app/routes/linear_add_connector_route.py index 9345ae495..f59c17d25 100644 --- a/surfsense_backend/app/routes/linear_add_connector_route.py +++ b/surfsense_backend/app/routes/linear_add_connector_route.py @@ -399,7 +399,7 @@ async def linear_callback( new_connector = SearchSourceConnector( name=connector_name, connector_type=SearchSourceConnectorType.LINEAR_CONNECTOR, - is_indexable=True, + is_indexable=False, config=connector_config, search_space_id=space_id, user_id=user_id, diff --git a/surfsense_backend/app/routes/luma_add_connector_route.py b/surfsense_backend/app/routes/luma_add_connector_route.py index 04d840a08..7040581bc 100644 --- a/surfsense_backend/app/routes/luma_add_connector_route.py +++ b/surfsense_backend/app/routes/luma_add_connector_route.py @@ -61,7 +61,7 @@ async def add_luma_connector( if existing_connector: # Update existing connector with new API key existing_connector.config = {"api_key": request.api_key} - existing_connector.is_indexable = True + existing_connector.is_indexable = False await session.commit() await session.refresh(existing_connector) @@ -82,7 +82,7 @@ async def add_luma_connector( config={"api_key": request.api_key}, search_space_id=request.space_id, user_id=user.id, - is_indexable=True, + is_indexable=False, ) session.add(db_connector) diff --git a/surfsense_backend/app/routes/slack_add_connector_route.py b/surfsense_backend/app/routes/slack_add_connector_route.py index 405ab2c4f..f6a1458a0 100644 --- a/surfsense_backend/app/routes/slack_add_connector_route.py +++ b/surfsense_backend/app/routes/slack_add_connector_route.py @@ -312,7 +312,7 @@ async def slack_callback( new_connector = SearchSourceConnector( name=connector_name, connector_type=SearchSourceConnectorType.SLACK_CONNECTOR, - is_indexable=True, + is_indexable=False, config=connector_config, search_space_id=space_id, user_id=user_id, diff --git a/surfsense_backend/app/routes/teams_add_connector_route.py b/surfsense_backend/app/routes/teams_add_connector_route.py index bbaae3a5f..9d0f5144f 100644 --- a/surfsense_backend/app/routes/teams_add_connector_route.py +++ b/surfsense_backend/app/routes/teams_add_connector_route.py @@ -321,7 +321,7 @@ async def teams_callback( new_connector = SearchSourceConnector( name=connector_name, connector_type=SearchSourceConnectorType.TEAMS_CONNECTOR, - is_indexable=True, + is_indexable=False, config=connector_config, search_space_id=space_id, user_id=user_id, From e676ebfabeb0584cee14232eb90575646dd8b040 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 21 Apr 2026 21:52:54 +0200 Subject: [PATCH 36/57] remove live connectors from AUTO_INDEX_DEFAULTS --- .../constants/connector-constants.ts | 54 ------------------- 1 file changed, 54 deletions(-) diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index dcd63f525..39e827d1a 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -297,66 +297,18 @@ export interface AutoIndexConfig { } export const AUTO_INDEX_DEFAULTS: Record = { - [EnumConnectorName.GOOGLE_GMAIL_CONNECTOR]: { - daysBack: 30, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your last 30 days of emails.", - }, [EnumConnectorName.COMPOSIO_GMAIL_CONNECTOR]: { daysBack: 30, daysForward: 0, frequencyMinutes: 1440, syncDescription: "Syncing your last 30 days of emails.", }, - [EnumConnectorName.SLACK_CONNECTOR]: { - daysBack: 30, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your last 30 days of messages.", - }, - [EnumConnectorName.DISCORD_CONNECTOR]: { - daysBack: 30, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your last 30 days of messages.", - }, - [EnumConnectorName.TEAMS_CONNECTOR]: { - daysBack: 30, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your last 30 days of messages.", - }, - [EnumConnectorName.GOOGLE_CALENDAR_CONNECTOR]: { - daysBack: 90, - daysForward: 90, - frequencyMinutes: 1440, - syncDescription: "Syncing 90 days of past and upcoming events.", - }, [EnumConnectorName.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR]: { daysBack: 90, daysForward: 90, frequencyMinutes: 1440, syncDescription: "Syncing 90 days of past and upcoming events.", }, - [EnumConnectorName.LINEAR_CONNECTOR]: { - daysBack: 90, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your last 90 days of issues.", - }, - [EnumConnectorName.JIRA_CONNECTOR]: { - daysBack: 90, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your last 90 days of issues.", - }, - [EnumConnectorName.CLICKUP_CONNECTOR]: { - daysBack: 90, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your last 90 days of tasks.", - }, [EnumConnectorName.NOTION_CONNECTOR]: { daysBack: 365, daysForward: 0, @@ -369,12 +321,6 @@ export const AUTO_INDEX_DEFAULTS: Record = { frequencyMinutes: 1440, syncDescription: "Syncing your documentation.", }, - [EnumConnectorName.AIRTABLE_CONNECTOR]: { - daysBack: 365, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your bases.", - }, }; export const AUTO_INDEX_CONNECTOR_TYPES = new Set(Object.keys(AUTO_INDEX_DEFAULTS)); From 940889c291fd8ca6fa445efa5bf73fdde1949d0c Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 08:42:38 +0200 Subject: [PATCH 37/57] fix open redirect, error leaking, unused imports, state validation --- .../app/agents/new_chat/tools/discord/_auth.py | 4 ---- .../app/agents/new_chat/tools/luma/_auth.py | 4 ---- .../app/agents/new_chat/tools/teams/_auth.py | 6 ------ surfsense_backend/app/routes/__init__.py | 2 +- surfsense_backend/app/routes/mcp_oauth_route.py | 13 ++++++++----- .../app/routes/oauth_connector_base.py | 8 ++++---- 6 files changed, 13 insertions(+), 24 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/tools/discord/_auth.py b/surfsense_backend/app/agents/new_chat/tools/discord/_auth.py index b369c10f1..1f51e3660 100644 --- a/surfsense_backend/app/agents/new_chat/tools/discord/_auth.py +++ b/surfsense_backend/app/agents/new_chat/tools/discord/_auth.py @@ -1,7 +1,5 @@ """Shared auth helper for Discord agent tools (REST API, not gateway bot).""" -import logging - from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select @@ -9,8 +7,6 @@ from app.config import config from app.db import SearchSourceConnector, SearchSourceConnectorType from app.utils.oauth_security import TokenEncryption -logger = logging.getLogger(__name__) - DISCORD_API = "https://discord.com/api/v10" diff --git a/surfsense_backend/app/agents/new_chat/tools/luma/_auth.py b/surfsense_backend/app/agents/new_chat/tools/luma/_auth.py index ef2fa8540..1d88161d6 100644 --- a/surfsense_backend/app/agents/new_chat/tools/luma/_auth.py +++ b/surfsense_backend/app/agents/new_chat/tools/luma/_auth.py @@ -1,14 +1,10 @@ """Shared auth helper for Luma agent tools.""" -import logging - from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select from app.db import SearchSourceConnector, SearchSourceConnectorType -logger = logging.getLogger(__name__) - LUMA_API = "https://public-api.luma.com/v1" diff --git a/surfsense_backend/app/agents/new_chat/tools/teams/_auth.py b/surfsense_backend/app/agents/new_chat/tools/teams/_auth.py index 989fce7c6..f24f5502e 100644 --- a/surfsense_backend/app/agents/new_chat/tools/teams/_auth.py +++ b/surfsense_backend/app/agents/new_chat/tools/teams/_auth.py @@ -1,15 +1,9 @@ """Shared auth helper for Teams agent tools (Microsoft Graph REST API).""" -import logging - from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select -from app.config import config from app.db import SearchSourceConnector, SearchSourceConnectorType -from app.utils.oauth_security import TokenEncryption - -logger = logging.getLogger(__name__) GRAPH_API = "https://graph.microsoft.com/v1.0" diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py index 925c207a6..40ca7a7e8 100644 --- a/surfsense_backend/app/routes/__init__.py +++ b/surfsense_backend/app/routes/__init__.py @@ -96,7 +96,7 @@ router.include_router(logs_router) router.include_router(circleback_webhook_router) # Circleback meeting webhooks router.include_router(surfsense_docs_router) # Surfsense documentation for citations router.include_router(notifications_router) # Notifications with Zero sync -router.include_router(mcp_oauth_router) # MCP OAuth 2.1 for Linear, Jira, ClickUp +router.include_router(mcp_oauth_router) # MCP OAuth 2.1 for Linear, Jira, ClickUp, Slack, Airtable router.include_router(composio_router) # Composio OAuth and toolkit management router.include_router(public_chat_router) # Public chat sharing and cloning router.include_router(incentive_tasks_router) # Incentive tasks for earning free pages diff --git a/surfsense_backend/app/routes/mcp_oauth_route.py b/surfsense_backend/app/routes/mcp_oauth_route.py index e47dc0a62..0870d52fe 100644 --- a/surfsense_backend/app/routes/mcp_oauth_route.py +++ b/surfsense_backend/app/routes/mcp_oauth_route.py @@ -182,7 +182,7 @@ async def connect_mcp_service( except Exception as e: logger.error("Failed to initiate %s MCP OAuth: %s", service, e, exc_info=True) raise HTTPException( - status_code=500, detail=f"Failed to initiate {service} MCP OAuth: {e!s}", + status_code=500, detail=f"Failed to initiate {service} MCP OAuth.", ) from e @@ -221,6 +221,9 @@ async def mcp_oauth_callback( space_id = data["space_id"] svc_key = data.get("service", service) + if svc_key != service: + raise HTTPException(status_code=400, detail="State/path service mismatch") + from app.services.mcp_oauth.registry import get_service svc = get_service(svc_key) @@ -315,7 +318,7 @@ async def mcp_oauth_callback( svc.name, db_connector.id, user_id, ) reauth_return_url = data.get("return_url") - if reauth_return_url and reauth_return_url.startswith("/"): + if reauth_return_url and reauth_return_url.startswith("/") and not reauth_return_url.startswith("//"): return RedirectResponse( url=f"{config.NEXT_FRONTEND_URL}{reauth_return_url}" ) @@ -347,7 +350,7 @@ async def mcp_oauth_callback( except IntegrityError as e: await session.rollback() raise HTTPException( - status_code=409, detail=f"Database integrity error: {e!s}", + status_code=409, detail="A connector for this service already exists.", ) from e _invalidate_cache(space_id) @@ -368,7 +371,7 @@ async def mcp_oauth_callback( ) raise HTTPException( status_code=500, - detail=f"Failed to complete {service} MCP OAuth: {e!s}", + detail=f"Failed to complete {service} MCP OAuth.", ) from e @@ -495,7 +498,7 @@ async def reauth_mcp_service( ) raise HTTPException( status_code=500, - detail=f"Failed to initiate {service} MCP re-auth: {e!s}", + detail=f"Failed to initiate {service} MCP re-auth.", ) from e diff --git a/surfsense_backend/app/routes/oauth_connector_base.py b/surfsense_backend/app/routes/oauth_connector_base.py index 0483d2540..0638e8f34 100644 --- a/surfsense_backend/app/routes/oauth_connector_base.py +++ b/surfsense_backend/app/routes/oauth_connector_base.py @@ -430,7 +430,7 @@ class OAuthConnectorRoute: state_mgr = oauth._get_state_manager() extra: dict[str, Any] = {"connector_id": connector_id} - if return_url and return_url.startswith("/"): + if return_url and return_url.startswith("/") and not return_url.startswith("//"): extra["return_url"] = return_url auth_params: dict[str, str] = { @@ -498,7 +498,7 @@ class OAuthConnectorRoute: data = state_mgr.validate_state(state) except Exception as e: raise HTTPException( - status_code=400, detail=f"Invalid state parameter: {e!s}" + status_code=400, detail="Invalid or expired state parameter." ) from e user_id = UUID(data["user_id"]) @@ -552,7 +552,7 @@ class OAuthConnectorRoute: db_connector.id, user_id, ) - if reauth_return_url and reauth_return_url.startswith("/"): + if reauth_return_url and reauth_return_url.startswith("/") and not reauth_return_url.startswith("//"): return RedirectResponse( url=f"{config.NEXT_FRONTEND_URL}{reauth_return_url}" ) @@ -603,7 +603,7 @@ class OAuthConnectorRoute: except IntegrityError as e: await session.rollback() raise HTTPException( - status_code=409, detail=f"Database integrity error: {e!s}" + status_code=409, detail="A connector for this service already exists." ) from e logger.info( From ea3508cb25db5369dc01c5443fc318830089673f Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 08:57:28 +0200 Subject: [PATCH 38/57] use native connector types for MCP OAuth, restore original UI --- .../app/agents/new_chat/tools/mcp_tool.py | 5 +- .../app/routes/mcp_oauth_route.py | 42 ++++++++--------- .../app/services/mcp_oauth/registry.py | 6 +++ .../constants/connector-constants.ts | 47 ++----------------- .../tabs/all-connectors-tab.tsx | 23 +-------- 5 files changed, 34 insertions(+), 89 deletions(-) diff --git a/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py b/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py index cf3e51166..47ee16f7d 100644 --- a/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py +++ b/surfsense_backend/app/agents/new_chat/tools/mcp_tool.py @@ -530,11 +530,12 @@ async def load_mcp_tools( return list(cached_tools) try: + # Find all connectors with MCP server config: generic MCP_CONNECTOR type + # and service-specific types (LINEAR_CONNECTOR, etc.) created via MCP OAuth. result = await session.execute( select(SearchSourceConnector).filter( - SearchSourceConnector.connector_type - == SearchSourceConnectorType.MCP_CONNECTOR, SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.config.has_key("server_config"), # noqa: W601 ), ) diff --git a/surfsense_backend/app/routes/mcp_oauth_route.py b/surfsense_backend/app/routes/mcp_oauth_route.py index 0870d52fe..f7164eab3 100644 --- a/surfsense_backend/app/routes/mcp_oauth_route.py +++ b/surfsense_backend/app/routes/mcp_oauth_route.py @@ -56,9 +56,7 @@ def _get_token_encryption() -> TokenEncryption: def _build_redirect_uri(service: str) -> str: - base = config.BACKEND_URL - if not base: - raise HTTPException(status_code=500, detail="BACKEND_URL not configured.") + base = config.BACKEND_URL or "http://localhost:8000" return f"{base.rstrip('/')}/api/v1/auth/mcp/{service}/connector/callback" @@ -288,6 +286,7 @@ async def mcp_oauth_callback( } # ---- Re-auth path ---- + db_connector_type = SearchSourceConnectorType(svc.connector_type) reauth_connector_id = data.get("connector_id") if reauth_connector_id: result = await session.execute( @@ -295,8 +294,7 @@ async def mcp_oauth_callback( SearchSourceConnector.id == reauth_connector_id, SearchSourceConnector.user_id == user_id, SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.connector_type - == SearchSourceConnectorType.MCP_CONNECTOR, + SearchSourceConnector.connector_type == db_connector_type, ) ) db_connector = result.scalars().first() @@ -329,15 +327,15 @@ async def mcp_oauth_callback( # ---- New connector path ---- connector_name = await generate_unique_connector_name( session, - SearchSourceConnectorType.MCP_CONNECTOR, + db_connector_type, space_id, user_id, - f"{svc.name} MCP", + svc.name, ) new_connector = SearchSourceConnector( name=connector_name, - connector_type=SearchSourceConnectorType.MCP_CONNECTOR, + connector_type=db_connector_type, is_indexable=False, config=connector_config, search_space_id=space_id, @@ -388,26 +386,26 @@ async def reauth_mcp_service( user: User = Depends(current_active_user), session: AsyncSession = Depends(get_async_session), ): - result = await session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == connector_id, - SearchSourceConnector.user_id == user.id, - SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.connector_type - == SearchSourceConnectorType.MCP_CONNECTOR, - ) - ) - if not result.scalars().first(): - raise HTTPException( - status_code=404, detail="MCP connector not found or access denied", - ) - from app.services.mcp_oauth.registry import get_service svc = get_service(service) if not svc: raise HTTPException(status_code=404, detail=f"Unknown MCP service: {service}") + db_connector_type = SearchSourceConnectorType(svc.connector_type) + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == connector_id, + SearchSourceConnector.user_id == user.id, + SearchSourceConnector.search_space_id == space_id, + SearchSourceConnector.connector_type == db_connector_type, + ) + ) + if not result.scalars().first(): + raise HTTPException( + status_code=404, detail="Connector not found or access denied", + ) + try: from app.services.mcp_oauth.discovery import ( discover_oauth_metadata, diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index 3f9a03fbc..e6a9d20a5 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -15,6 +15,7 @@ from dataclasses import dataclass, field class MCPServiceConfig: name: str mcp_url: str + connector_type: str supports_dcr: bool = True oauth_discovery_origin: str | None = None client_id_env: str | None = None @@ -26,18 +27,22 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { "linear": MCPServiceConfig( name="Linear", mcp_url="https://mcp.linear.app/mcp", + connector_type="LINEAR_CONNECTOR", ), "jira": MCPServiceConfig( name="Jira", mcp_url="https://mcp.atlassian.com/v1/mcp", + connector_type="JIRA_CONNECTOR", ), "clickup": MCPServiceConfig( name="ClickUp", mcp_url="https://mcp.clickup.com/mcp", + connector_type="CLICKUP_CONNECTOR", ), "slack": MCPServiceConfig( name="Slack", mcp_url="https://mcp.slack.com/mcp", + connector_type="SLACK_CONNECTOR", supports_dcr=False, client_id_env="SLACK_CLIENT_ID", client_secret_env="SLACK_CLIENT_SECRET", @@ -45,6 +50,7 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { "airtable": MCPServiceConfig( name="Airtable", mcp_url="https://mcp.airtable.com/mcp", + connector_type="AIRTABLE_CONNECTOR", oauth_discovery_origin="https://airtable.com", ), } diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index 39e827d1a..08ffde9ae 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -31,7 +31,7 @@ export const OAUTH_CONNECTORS = [ title: "Airtable", description: "Search your Airtable bases", connectorType: EnumConnectorName.AIRTABLE_CONNECTOR, - authEndpoint: "/api/v1/auth/airtable/connector/add/", + authEndpoint: "/api/v1/auth/mcp/airtable/connector/add/", }, { id: "notion-connector", @@ -45,14 +45,14 @@ export const OAUTH_CONNECTORS = [ title: "Linear", description: "Search issues & projects", connectorType: EnumConnectorName.LINEAR_CONNECTOR, - authEndpoint: "/api/v1/auth/linear/connector/add/", + authEndpoint: "/api/v1/auth/mcp/linear/connector/add/", }, { id: "slack-connector", title: "Slack", description: "Search Slack messages", connectorType: EnumConnectorName.SLACK_CONNECTOR, - authEndpoint: "/api/v1/auth/slack/connector/add/", + authEndpoint: "/api/v1/auth/mcp/slack/connector/add/", }, { id: "teams-connector", @@ -87,7 +87,7 @@ export const OAUTH_CONNECTORS = [ title: "Jira", description: "Search Jira issues", connectorType: EnumConnectorName.JIRA_CONNECTOR, - authEndpoint: "/api/v1/auth/jira/connector/add/", + authEndpoint: "/api/v1/auth/mcp/jira/connector/add/", }, { id: "confluence-connector", @@ -101,47 +101,8 @@ export const OAUTH_CONNECTORS = [ title: "ClickUp", description: "Search ClickUp tasks", connectorType: EnumConnectorName.CLICKUP_CONNECTOR, - authEndpoint: "/api/v1/auth/clickup/connector/add/", - }, -] as const; - -// MCP OAuth Connectors (one-click connect via official MCP servers) -export const MCP_OAUTH_CONNECTORS = [ - { - id: "linear-mcp-connector", - title: "Linear (MCP)", - description: "Interact with Linear issues via MCP", - connectorType: EnumConnectorName.MCP_CONNECTOR, - authEndpoint: "/api/v1/auth/mcp/linear/connector/add/", - }, - { - id: "jira-mcp-connector", - title: "Jira (MCP)", - description: "Interact with Jira issues via MCP", - connectorType: EnumConnectorName.MCP_CONNECTOR, - authEndpoint: "/api/v1/auth/mcp/jira/connector/add/", - }, - { - id: "clickup-mcp-connector", - title: "ClickUp (MCP)", - description: "Interact with ClickUp tasks via MCP", - connectorType: EnumConnectorName.MCP_CONNECTOR, authEndpoint: "/api/v1/auth/mcp/clickup/connector/add/", }, - { - id: "slack-mcp-connector", - title: "Slack (MCP)", - description: "Interact with Slack channels via MCP", - connectorType: EnumConnectorName.MCP_CONNECTOR, - authEndpoint: "/api/v1/auth/mcp/slack/connector/add/", - }, - { - id: "airtable-mcp-connector", - title: "Airtable (MCP)", - description: "Interact with Airtable bases via MCP", - connectorType: EnumConnectorName.MCP_CONNECTOR, - authEndpoint: "/api/v1/auth/mcp/airtable/connector/add/", - }, ] as const; // Content Sources (tools that extract and import content from external sources) diff --git a/surfsense_web/components/assistant-ui/connector-popup/tabs/all-connectors-tab.tsx b/surfsense_web/components/assistant-ui/connector-popup/tabs/all-connectors-tab.tsx index d4f5e2fc1..814959ec4 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/tabs/all-connectors-tab.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/tabs/all-connectors-tab.tsx @@ -10,14 +10,12 @@ import { ConnectorCard } from "../components/connector-card"; import { COMPOSIO_CONNECTORS, CRAWLERS, - MCP_OAUTH_CONNECTORS, OAUTH_CONNECTORS, OTHER_CONNECTORS, } from "../constants/connector-constants"; import { getDocumentCountForConnector } from "../utils/connector-document-mapping"; type OAuthConnector = (typeof OAUTH_CONNECTORS)[number]; -type MCPOAuthConnector = (typeof MCP_OAUTH_CONNECTORS)[number]; type ComposioConnector = (typeof COMPOSIO_CONNECTORS)[number]; type OtherConnector = (typeof OTHER_CONNECTORS)[number]; type CrawlerConnector = (typeof CRAWLERS)[number]; @@ -130,10 +128,6 @@ export const AllConnectorsTab: FC = ({ (c) => c.connectorType === EnumConnectorName.AIRTABLE_CONNECTOR ); - const filteredMCPOAuth = MCP_OAUTH_CONNECTORS.filter( - (c) => matchesSearch(c.title, c.description), - ); - const moreIntegrationsComposio = filteredComposio.filter( (c) => !DOCUMENT_FILE_CONNECTOR_TYPES.has(c.connectorType) && @@ -285,7 +279,6 @@ export const AllConnectorsTab: FC = ({ nativeGoogleDriveConnectors.length > 0 || composioGoogleDriveConnectors.length > 0 || fileStorageConnectors.length > 0; - const hasMCPOAuth = filteredMCPOAuth.length > 0; const hasMoreIntegrations = otherDocumentYouTubeConnectors.length > 0 || otherDocumentNotionConnectors.length > 0 || @@ -295,7 +288,7 @@ export const AllConnectorsTab: FC = ({ moreIntegrationsOther.length > 0 || moreIntegrationsCrawlers.length > 0; - const hasAnyResults = hasDocumentFileConnectors || hasMCPOAuth || hasMoreIntegrations; + const hasAnyResults = hasDocumentFileConnectors || hasMoreIntegrations; if (!hasAnyResults && searchQuery) { return ( @@ -325,20 +318,6 @@ export const AllConnectorsTab: FC = ({
)} - {/* Live MCP Integrations */} - {hasMCPOAuth && ( -
-
-

- Live MCP Integrations -

-
-
- {filteredMCPOAuth.map((connector) => renderOAuthCard(connector as OAuthConnector | ComposioConnector))} -
-
- )} - {/* More Integrations */} {hasMoreIntegrations && (
From c277b6c1219bd4794d7f89da72034e0161e2326e Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 09:01:35 +0200 Subject: [PATCH 39/57] skip indexing config dialog for non-indexable connectors --- .../assistant-ui/connector-popup/hooks/use-connector-dialog.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts index caa85ba2d..4a07693ce 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts @@ -314,6 +314,9 @@ export const useConnectorDialog = () => { oauthConnector.title, oauthConnector.connectorType ); + } else if (!newConnector.is_indexable) { + toast.success(`${oauthConnector.title} connected successfully!`); + await refetchAllConnectors(); } else { toast.dismiss("auto-index"); const config = validateIndexingConfigState({ From 2f4052aa71cfea2ea1d77ba9815eca4634b491ca Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 09:15:06 +0200 Subject: [PATCH 40/57] use pre-configured credentials for Airtable MCP OAuth --- surfsense_backend/app/services/mcp_oauth/registry.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index e6a9d20a5..769f2c88a 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -52,6 +52,9 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { mcp_url="https://mcp.airtable.com/mcp", connector_type="AIRTABLE_CONNECTOR", oauth_discovery_origin="https://airtable.com", + supports_dcr=False, + client_id_env="AIRTABLE_CLIENT_ID", + client_secret_env="AIRTABLE_CLIENT_SECRET", ), } From 0cc2475f6b766f990ff49cca1903c3305c035543 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 09:21:19 +0200 Subject: [PATCH 41/57] add required OAuth scopes for Airtable MCP --- surfsense_backend/app/services/mcp_oauth/registry.py | 1 + 1 file changed, 1 insertion(+) diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index 769f2c88a..173fcf49d 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -55,6 +55,7 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { supports_dcr=False, client_id_env="AIRTABLE_CLIENT_ID", client_secret_env="AIRTABLE_CLIENT_SECRET", + scopes=["data.records:read", "data.records:write", "schema.bases:read", "schema.bases:write"], ), } From 225236e6f1d4a5de2a11280321cb213d4d22471b Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 09:35:15 +0200 Subject: [PATCH 42/57] add required OAuth scopes for Slack MCP --- surfsense_backend/app/services/mcp_oauth/registry.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index 173fcf49d..ea7832f70 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -46,6 +46,14 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { supports_dcr=False, client_id_env="SLACK_CLIENT_ID", client_secret_env="SLACK_CLIENT_SECRET", + scopes=[ + "search:read.public", "search:read.private", "search:read.mpim", + "search:read.im", "search:read.files", "search:read.users", + "chat:write", "channels:history", "groups:history", + "mpim:history", "im:history", + "canvases:read", "canvases:write", + "users:read", "users:read.email", + ], ), "airtable": MCPServiceConfig( name="Airtable", From 3638d72b298e2cebab7cce4d46f80b7bce787d08 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 09:41:19 +0200 Subject: [PATCH 43/57] restore full Slack MCP scopes for all MCP tools --- surfsense_backend/app/services/mcp_oauth/registry.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index ea7832f70..4d87ceb40 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -49,8 +49,8 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { scopes=[ "search:read.public", "search:read.private", "search:read.mpim", "search:read.im", "search:read.files", "search:read.users", - "chat:write", "channels:history", "groups:history", - "mpim:history", "im:history", + "chat:write", + "channels:history", "groups:history", "mpim:history", "im:history", "canvases:read", "canvases:write", "users:read", "users:read.email", ], From 820326e3ee53386cc5c6605e00d4602cb57c7b16 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 09:54:16 +0200 Subject: [PATCH 44/57] use user_scope param for Slack OAuth --- surfsense_backend/app/routes/mcp_oauth_route.py | 4 ++-- surfsense_backend/app/services/mcp_oauth/registry.py | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/surfsense_backend/app/routes/mcp_oauth_route.py b/surfsense_backend/app/routes/mcp_oauth_route.py index f7164eab3..98ca2be0f 100644 --- a/surfsense_backend/app/routes/mcp_oauth_route.py +++ b/surfsense_backend/app/routes/mcp_oauth_route.py @@ -165,7 +165,7 @@ async def connect_mcp_service( "state": state, } if svc.scopes: - auth_params["scope"] = " ".join(svc.scopes) + auth_params[svc.scope_param] = " ".join(svc.scopes) auth_url = f"{auth_endpoint}?{urlencode(auth_params)}" @@ -478,7 +478,7 @@ async def reauth_mcp_service( "state": state, } if svc.scopes: - auth_params["scope"] = " ".join(svc.scopes) + auth_params[svc.scope_param] = " ".join(svc.scopes) auth_url = f"{auth_endpoint}?{urlencode(auth_params)}" diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index 4d87ceb40..62eb2077f 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -21,6 +21,7 @@ class MCPServiceConfig: client_id_env: str | None = None client_secret_env: str | None = None scopes: list[str] = field(default_factory=list) + scope_param: str = "scope" MCP_SERVICES: dict[str, MCPServiceConfig] = { @@ -46,6 +47,7 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { supports_dcr=False, client_id_env="SLACK_CLIENT_ID", client_secret_env="SLACK_CLIENT_SECRET", + scope_param="user_scope", scopes=[ "search:read.public", "search:read.private", "search:read.mpim", "search:read.im", "search:read.files", "search:read.users", From 970f62278b3677541526e6eeba78bf27bb15cbe0 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 09:56:18 +0200 Subject: [PATCH 45/57] revert scope_param, use standard scope for Slack v2_user endpoint --- surfsense_backend/app/routes/mcp_oauth_route.py | 4 ++-- surfsense_backend/app/services/mcp_oauth/registry.py | 2 -- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/surfsense_backend/app/routes/mcp_oauth_route.py b/surfsense_backend/app/routes/mcp_oauth_route.py index 98ca2be0f..f7164eab3 100644 --- a/surfsense_backend/app/routes/mcp_oauth_route.py +++ b/surfsense_backend/app/routes/mcp_oauth_route.py @@ -165,7 +165,7 @@ async def connect_mcp_service( "state": state, } if svc.scopes: - auth_params[svc.scope_param] = " ".join(svc.scopes) + auth_params["scope"] = " ".join(svc.scopes) auth_url = f"{auth_endpoint}?{urlencode(auth_params)}" @@ -478,7 +478,7 @@ async def reauth_mcp_service( "state": state, } if svc.scopes: - auth_params[svc.scope_param] = " ".join(svc.scopes) + auth_params["scope"] = " ".join(svc.scopes) auth_url = f"{auth_endpoint}?{urlencode(auth_params)}" diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index 62eb2077f..4d87ceb40 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -21,7 +21,6 @@ class MCPServiceConfig: client_id_env: str | None = None client_secret_env: str | None = None scopes: list[str] = field(default_factory=list) - scope_param: str = "scope" MCP_SERVICES: dict[str, MCPServiceConfig] = { @@ -47,7 +46,6 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { supports_dcr=False, client_id_env="SLACK_CLIENT_ID", client_secret_env="SLACK_CLIENT_SECRET", - scope_param="user_scope", scopes=[ "search:read.public", "search:read.private", "search:read.mpim", "search:read.im", "search:read.files", "search:read.users", From dde1948a5c8782d96e9a478518940439f1114373 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 10:06:12 +0200 Subject: [PATCH 46/57] fix Slack MCP OAuth: v2 endpoint, user_scope param, nested token extraction --- .../app/routes/mcp_oauth_route.py | 30 ++++++++++++------- .../app/services/mcp_oauth/registry.py | 6 ++++ 2 files changed, 26 insertions(+), 10 deletions(-) diff --git a/surfsense_backend/app/routes/mcp_oauth_route.py b/surfsense_backend/app/routes/mcp_oauth_route.py index f7164eab3..efe928fd1 100644 --- a/surfsense_backend/app/routes/mcp_oauth_route.py +++ b/surfsense_backend/app/routes/mcp_oauth_route.py @@ -107,8 +107,8 @@ async def connect_mcp_service( metadata = await discover_oauth_metadata( svc.mcp_url, origin_override=svc.oauth_discovery_origin, ) - auth_endpoint = metadata.get("authorization_endpoint") - token_endpoint = metadata.get("token_endpoint") + auth_endpoint = svc.auth_endpoint_override or metadata.get("authorization_endpoint") + token_endpoint = svc.token_endpoint_override or metadata.get("token_endpoint") registration_endpoint = metadata.get("registration_endpoint") if not auth_endpoint or not token_endpoint: @@ -165,7 +165,7 @@ async def connect_mcp_service( "state": state, } if svc.scopes: - auth_params["scope"] = " ".join(svc.scopes) + auth_params[svc.scope_param] = " ".join(svc.scopes) auth_url = f"{auth_endpoint}?{urlencode(auth_params)}" @@ -253,17 +253,27 @@ async def mcp_oauth_callback( ) access_token = token_json.get("access_token") + refresh_token = token_json.get("refresh_token") + expires_in = token_json.get("expires_in") + scope = token_json.get("scope") + + if not access_token and "authed_user" in token_json: + authed = token_json["authed_user"] + access_token = authed.get("access_token") + refresh_token = refresh_token or authed.get("refresh_token") + scope = scope or authed.get("scope") + expires_in = expires_in or authed.get("expires_in") + if not access_token: raise HTTPException( status_code=400, detail=f"No access token received from {svc.name}.", ) - refresh_token = token_json.get("refresh_token") expires_at = None - if token_json.get("expires_in"): + if expires_in: expires_at = datetime.now(UTC) + timedelta( - seconds=int(token_json["expires_in"]) + seconds=int(expires_in) ) connector_config = { @@ -280,7 +290,7 @@ async def mcp_oauth_callback( "access_token": enc.encrypt_token(access_token), "refresh_token": enc.encrypt_token(refresh_token) if refresh_token else None, "expires_at": expires_at.isoformat() if expires_at else None, - "scope": token_json.get("scope"), + "scope": scope, }, "_token_encrypted": True, } @@ -415,8 +425,8 @@ async def reauth_mcp_service( metadata = await discover_oauth_metadata( svc.mcp_url, origin_override=svc.oauth_discovery_origin, ) - auth_endpoint = metadata.get("authorization_endpoint") - token_endpoint = metadata.get("token_endpoint") + auth_endpoint = svc.auth_endpoint_override or metadata.get("authorization_endpoint") + token_endpoint = svc.token_endpoint_override or metadata.get("token_endpoint") registration_endpoint = metadata.get("registration_endpoint") if not auth_endpoint or not token_endpoint: @@ -478,7 +488,7 @@ async def reauth_mcp_service( "state": state, } if svc.scopes: - auth_params["scope"] = " ".join(svc.scopes) + auth_params[svc.scope_param] = " ".join(svc.scopes) auth_url = f"{auth_endpoint}?{urlencode(auth_params)}" diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index 4d87ceb40..df6c6bb18 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -21,6 +21,9 @@ class MCPServiceConfig: client_id_env: str | None = None client_secret_env: str | None = None scopes: list[str] = field(default_factory=list) + scope_param: str = "scope" + auth_endpoint_override: str | None = None + token_endpoint_override: str | None = None MCP_SERVICES: dict[str, MCPServiceConfig] = { @@ -46,6 +49,9 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { supports_dcr=False, client_id_env="SLACK_CLIENT_ID", client_secret_env="SLACK_CLIENT_SECRET", + scope_param="user_scope", + auth_endpoint_override="https://slack.com/oauth/v2/authorize", + token_endpoint_override="https://slack.com/api/oauth.v2.access", scopes=[ "search:read.public", "search:read.private", "search:read.mpim", "search:read.im", "search:read.files", "search:read.users", From dfa40b88018e09f1e4f743d1cedd8e1bb4744441 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 10:50:43 +0200 Subject: [PATCH 47/57] fix MCP OAuth for all 5 services, add MCP connector edit view --- .../app/routes/mcp_oauth_route.py | 4 +-- .../app/services/mcp_oauth/registry.py | 10 +++---- .../components/mcp-service-config.tsx | 30 +++++++++++++++++++ .../views/connector-edit-view.tsx | 25 ++++++++++------ 4 files changed, 53 insertions(+), 16 deletions(-) create mode 100644 surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-service-config.tsx diff --git a/surfsense_backend/app/routes/mcp_oauth_route.py b/surfsense_backend/app/routes/mcp_oauth_route.py index efe928fd1..b7c605089 100644 --- a/surfsense_backend/app/routes/mcp_oauth_route.py +++ b/surfsense_backend/app/routes/mcp_oauth_route.py @@ -128,7 +128,7 @@ async def connect_mcp_service( status_code=502, detail=f"DCR for {svc.name} did not return a client_id.", ) - elif not svc.supports_dcr and svc.client_id_env: + elif svc.client_id_env: client_id = getattr(config, svc.client_id_env, None) client_secret = getattr(config, svc.client_secret_env or "", None) or "" if not client_id: @@ -446,7 +446,7 @@ async def reauth_mcp_service( status_code=502, detail=f"DCR for {svc.name} did not return a client_id.", ) - elif not svc.supports_dcr and svc.client_id_env: + elif svc.client_id_env: client_id = getattr(config, svc.client_id_env, None) client_secret = getattr(config, svc.client_secret_env or "", None) or "" if not client_id: diff --git a/surfsense_backend/app/services/mcp_oauth/registry.py b/surfsense_backend/app/services/mcp_oauth/registry.py index df6c6bb18..cd1a0ae8c 100644 --- a/surfsense_backend/app/services/mcp_oauth/registry.py +++ b/surfsense_backend/app/services/mcp_oauth/registry.py @@ -1,9 +1,9 @@ -"""Registry of MCP services with OAuth 2.1 support. +"""Registry of MCP services with OAuth support. Each entry maps a URL-safe service key to its MCP server endpoint and -authentication strategy. Services with ``supports_dcr=True`` will use -RFC 7591 Dynamic Client Registration; the rest require pre-configured -credentials via environment variables. +authentication configuration. Services with ``supports_dcr=True`` use +RFC 7591 Dynamic Client Registration (the MCP server issues its own +credentials); the rest use pre-configured credentials via env vars. """ from __future__ import annotations @@ -65,8 +65,8 @@ MCP_SERVICES: dict[str, MCPServiceConfig] = { name="Airtable", mcp_url="https://mcp.airtable.com/mcp", connector_type="AIRTABLE_CONNECTOR", - oauth_discovery_origin="https://airtable.com", supports_dcr=False, + oauth_discovery_origin="https://airtable.com", client_id_env="AIRTABLE_CLIENT_ID", client_secret_env="AIRTABLE_CLIENT_SECRET", scopes=["data.records:read", "data.records:write", "schema.bases:read", "schema.bases:write"], diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-service-config.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-service-config.tsx new file mode 100644 index 000000000..4f43694ad --- /dev/null +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-service-config.tsx @@ -0,0 +1,30 @@ +"use client"; + +import { CheckCircle2 } from "lucide-react"; +import type { FC } from "react"; +import type { ConnectorConfigProps } from "../index"; + +export const MCPServiceConfig: FC = ({ connector }) => { + const serviceName = connector.config?.mcp_service as string | undefined; + + return ( +
+
+
+ +
+
+

Connected via MCP

+

+ Your agent can search, read, and take actions in{" "} + {serviceName + ? serviceName.charAt(0).toUpperCase() + serviceName.slice(1) + : "this service"}{" "} + in real time. No background indexing needed. +

+
+
+ +
+ ); +}; diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx index e19600ab2..3c92320da 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx @@ -17,7 +17,7 @@ import { PeriodicSyncConfig } from "../../components/periodic-sync-config"; import { SummaryConfig } from "../../components/summary-config"; import { VisionLLMConfig } from "../../components/vision-llm-config"; import { getConnectorDisplayName } from "../../tabs/all-connectors-tab"; -import { getConnectorConfigComponent } from "../index"; +import { type ConnectorConfigProps, getConnectorConfigComponent } from "../index"; const REAUTH_ENDPOINTS: Partial> = { [EnumConnectorName.LINEAR_CONNECTOR]: "/api/v1/auth/linear/connector/reauth", @@ -118,11 +118,16 @@ export const ConnectorEditView: FC = ({ } }, [searchSpaceId, searchSpaceIdAtom, reauthEndpoint, connector.id]); - // Get connector-specific config component - const ConnectorConfigComponent = useMemo( - () => getConnectorConfigComponent(connector.connector_type), - [connector.connector_type] - ); + const isMCPBacked = Boolean(connector.config?.server_config); + + // Get connector-specific config component (MCP-backed connectors use a generic view) + const ConnectorConfigComponent = useMemo(() => { + if (isMCPBacked) { + const { MCPServiceConfig } = require("../components/mcp-service-config"); + return MCPServiceConfig as FC; + } + return getConnectorConfigComponent(connector.connector_type); + }, [connector.connector_type, isMCPBacked]); const [isScrolled, setIsScrolled] = useState(false); const [hasMoreContent, setHasMoreContent] = useState(false); const [showDisconnectConfirm, setShowDisconnectConfirm] = useState(false); @@ -223,7 +228,9 @@ export const ConnectorEditView: FC = ({ {getConnectorDisplayName(connector.name)}

- Manage your connector settings and sync configuration + {isMCPBacked + ? "Connected — your agent can interact with this service in real time" + : "Manage your connector settings and sync configuration"}

@@ -421,7 +428,7 @@ export const ConnectorEditView: FC = ({ Re-authenticate - ) : ( + ) : !isMCPBacked ? ( - )} + ) : null} ); From a4bc621c2acae3a1305da77c3ff8046d7ab40c68 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 22 Apr 2026 11:22:04 +0200 Subject: [PATCH 48/57] uniform connector UX across all connector types --- .../components/connector-card.tsx | 16 ++++---- .../components/discord-config.tsx | 17 ++++----- .../components/mcp-service-config.tsx | 14 +++---- .../components/teams-config.tsx | 6 +-- .../views/connector-edit-view.tsx | 14 ++++--- .../constants/connector-constants.ts | 37 ++++++++++++++----- .../tabs/active-connectors-tab.tsx | 14 +++++-- .../views/connector-accounts-list-view.tsx | 25 +++++-------- 8 files changed, 82 insertions(+), 61 deletions(-) diff --git a/surfsense_web/components/assistant-ui/connector-popup/components/connector-card.tsx b/surfsense_web/components/assistant-ui/connector-popup/components/connector-card.tsx index d24057b1c..e0df73e66 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/components/connector-card.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/components/connector-card.tsx @@ -8,6 +8,7 @@ import { Spinner } from "@/components/ui/spinner"; import { EnumConnectorName } from "@/contracts/enums/connector"; import { getConnectorIcon } from "@/contracts/enums/connectorIcons"; import { cn } from "@/lib/utils"; +import { LIVE_CONNECTOR_TYPES } from "../constants/connector-constants"; import { useConnectorStatus } from "../hooks/use-connector-status"; import { ConnectorStatusBadge } from "./connector-status-badge"; @@ -55,6 +56,7 @@ export const ConnectorCard: FC = ({ onManage, }) => { const isMCP = connectorType === EnumConnectorName.MCP_CONNECTOR; + const isLive = !!connectorType && LIVE_CONNECTOR_TYPES.has(connectorType); // Get connector status const { getConnectorStatus, isConnectorEnabled, getConnectorStatusMessage, shouldShowWarnings } = useConnectorStatus(); @@ -123,14 +125,14 @@ export const ConnectorCard: FC = ({ ) : ( <> - {formatDocumentCount(documentCount)} + {!isLive && {formatDocumentCount(documentCount)}} + {!isLive && accountCount !== undefined && accountCount > 0 && ( + + )} {accountCount !== undefined && accountCount > 0 && ( - <> - - - {accountCount} {accountCount === 1 ? "Account" : "Accounts"} - - + + {accountCount} {accountCount === 1 ? "Account" : "Accounts"} + )} )} diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/discord-config.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/discord-config.tsx index f782a6f4d..c8714ba40 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/discord-config.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/discord-config.tsx @@ -53,8 +53,7 @@ export const DiscordConfig: FC = ({ connector }) => { return () => document.removeEventListener("visibilitychange", handleVisibilityChange); }, [connector?.id, fetchChannels]); - // Separate channels by indexing capability - const readyToIndex = channels.filter((ch) => ch.can_index); + const accessible = channels.filter((ch) => ch.can_index); const needsPermissions = channels.filter((ch) => !ch.can_index); // Format last fetched time @@ -80,7 +79,7 @@ export const DiscordConfig: FC = ({ connector }) => {

- The bot needs "Read Message History" permission to index channels. Ask a + The bot needs "Read Message History" permission to access channels. Ask a server admin to grant this permission for channels shown below.

@@ -127,18 +126,18 @@ export const DiscordConfig: FC = ({ connector }) => { ) : (
- {/* Ready to index */} - {readyToIndex.length > 0 && ( + {/* Accessible channels */} + {accessible.length > 0 && (
0 && "border-b border-border")}>
- Ready to index + Accessible - {readyToIndex.length} {readyToIndex.length === 1 ? "channel" : "channels"} + {accessible.length} {accessible.length === 1 ? "channel" : "channels"}
- {readyToIndex.map((channel) => ( + {accessible.map((channel) => ( ))}
@@ -150,7 +149,7 @@ export const DiscordConfig: FC = ({ connector }) => {
- Grant permissions to index + Needs permissions {needsPermissions.length}{" "} {needsPermissions.length === 1 ? "channel" : "channels"} diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-service-config.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-service-config.tsx index 4f43694ad..71d0e31a8 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-service-config.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/mcp-service-config.tsx @@ -6,25 +6,23 @@ import type { ConnectorConfigProps } from "../index"; export const MCPServiceConfig: FC = ({ connector }) => { const serviceName = connector.config?.mcp_service as string | undefined; + const displayName = serviceName + ? serviceName.charAt(0).toUpperCase() + serviceName.slice(1) + : "this service"; return (
-
+
-

Connected via MCP

+

Connected

- Your agent can search, read, and take actions in{" "} - {serviceName - ? serviceName.charAt(0).toUpperCase() + serviceName.slice(1) - : "this service"}{" "} - in real time. No background indexing needed. + Your agent can search, read, and take actions in {displayName}.

-
); }; diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx index ac08a6c03..e96ddfd29 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx @@ -18,9 +18,9 @@ export const TeamsConfig: FC = () => {

Microsoft Teams Access

- SurfSense will index messages from Teams channels that you have access to. The app can - only read messages from teams and channels where you are a member. Make sure you're a - member of the teams you want to index before connecting. + Your agent can search and read messages from Teams channels you have access to, + and send messages on your behalf. Make sure you're a member of the teams + you want to interact with.

diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx index 3c92320da..aa3c8d193 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx @@ -16,6 +16,7 @@ import { DateRangeSelector } from "../../components/date-range-selector"; import { PeriodicSyncConfig } from "../../components/periodic-sync-config"; import { SummaryConfig } from "../../components/summary-config"; import { VisionLLMConfig } from "../../components/vision-llm-config"; +import { LIVE_CONNECTOR_TYPES } from "../../constants/connector-constants"; import { getConnectorDisplayName } from "../../tabs/all-connectors-tab"; import { type ConnectorConfigProps, getConnectorConfigComponent } from "../index"; @@ -119,6 +120,7 @@ export const ConnectorEditView: FC = ({ }, [searchSpaceId, searchSpaceIdAtom, reauthEndpoint, connector.id]); const isMCPBacked = Boolean(connector.config?.server_config); + const isLive = isMCPBacked || LIVE_CONNECTOR_TYPES.has(connector.connector_type); // Get connector-specific config component (MCP-backed connectors use a generic view) const ConnectorConfigComponent = useMemo(() => { @@ -228,8 +230,8 @@ export const ConnectorEditView: FC = ({ {getConnectorDisplayName(connector.name)}

- {isMCPBacked - ? "Connected — your agent can interact with this service in real time" + {isLive + ? "Manage your connected account" : "Manage your connector settings and sync configuration"}

@@ -381,10 +383,12 @@ export const ConnectorEditView: FC = ({ {/* Fixed Footer - Action buttons */}
- {showDisconnectConfirm ? ( -
+ {showDisconnectConfirm ? ( +
- Are you sure? + {isLive + ? "Your agent will lose access to this service." + : "This will remove all indexed data."}
@@ -234,15 +231,13 @@ export const ConnectorAccountsListView: FC = ({ Syncing

- ) : ( -

- {isIndexableConnector(connector.connector_type) - ? connector.last_indexed_at - ? `Last indexed: ${formatRelativeDate(connector.last_indexed_at)}` - : "Never indexed" - : "Active"} + ) : !isLiveConnector(connector.connector_type) ? ( +

+ {connector.last_indexed_at + ? `Last indexed: ${formatRelativeDate(connector.last_indexed_at)}` + : "Never indexed"}

- )} + ) : null}
{isAuthExpired ? (
- {/* Quick Index Button - hidden when auth is expired */} - {connector.is_indexable && onQuickIndex && !isAuthExpired && ( + {/* Quick Index Button - hidden for live connectors and when auth is expired */} + {connector.is_indexable && !isLive && onQuickIndex && !isAuthExpired && ( - ) : !isMCPBacked ? ( + ) : !isLive ? (

- Configure when to start syncing your data + {isLive + ? "Your account is ready to use" + : "Configure when to start syncing your data"}

@@ -157,8 +161,8 @@ export const IndexingConfigurationView: FC = ({ )} - {/* Summary and sync settings - only shown for indexable connectors */} - {connector?.is_indexable && ( + {/* Summary and sync settings - hidden for live connectors */} + {connector?.is_indexable && !isLive && ( <> {/* AI Summary toggle */} @@ -209,8 +213,8 @@ export const IndexingConfigurationView: FC = ({ )} - {/* Info box - only shown for indexable connectors */} - {connector?.is_indexable && ( + {/* Info box - hidden for live connectors */} + {connector?.is_indexable && !isLive && (
@@ -238,14 +242,20 @@ export const IndexingConfigurationView: FC = ({ {/* Fixed Footer - Action buttons */}
- + {isLive ? ( + + ) : ( + + )}
); diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index 1f324d53e..05f866d0f 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -13,7 +13,9 @@ export const LIVE_CONNECTOR_TYPES = new Set([ EnumConnectorName.DISCORD_CONNECTOR, EnumConnectorName.TEAMS_CONNECTOR, EnumConnectorName.GOOGLE_CALENDAR_CONNECTOR, + EnumConnectorName.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR, EnumConnectorName.GOOGLE_GMAIL_CONNECTOR, + EnumConnectorName.COMPOSIO_GMAIL_CONNECTOR, EnumConnectorName.LUMA_CONNECTOR, ]); @@ -30,7 +32,7 @@ export const OAUTH_CONNECTORS = [ { id: "google-gmail-connector", title: "Gmail", - description: "Search and read your emails", + description: "Search, read, draft, and send emails", connectorType: EnumConnectorName.GOOGLE_GMAIL_CONNECTOR, authEndpoint: "/api/v1/auth/google/gmail/connector/add/", selfHostedOnly: true, @@ -46,7 +48,7 @@ export const OAUTH_CONNECTORS = [ { id: "airtable-connector", title: "Airtable", - description: "Search, read, and manage records", + description: "Browse bases, tables, and records", connectorType: EnumConnectorName.AIRTABLE_CONNECTOR, authEndpoint: "/api/v1/auth/mcp/airtable/connector/add/", }, @@ -67,7 +69,7 @@ export const OAUTH_CONNECTORS = [ { id: "slack-connector", title: "Slack", - description: "Search, read, and send messages", + description: "Search and read channels and threads", connectorType: EnumConnectorName.SLACK_CONNECTOR, authEndpoint: "/api/v1/auth/mcp/slack/connector/add/", }, @@ -116,7 +118,7 @@ export const OAUTH_CONNECTORS = [ { id: "clickup-connector", title: "ClickUp", - description: "Search, read, and manage tasks", + description: "Search and read tasks", connectorType: EnumConnectorName.CLICKUP_CONNECTOR, authEndpoint: "/api/v1/auth/mcp/clickup/connector/add/", }, @@ -155,7 +157,7 @@ export const OTHER_CONNECTORS = [ { id: "luma-connector", title: "Luma", - description: "Search and manage events", + description: "Browse, read, and create events", connectorType: EnumConnectorName.LUMA_CONNECTOR, }, { @@ -214,14 +216,14 @@ export const COMPOSIO_CONNECTORS = [ { id: "composio-gmail", title: "Gmail", - description: "Search through your emails via Composio", + description: "Search, read, draft, and send emails via Composio", connectorType: EnumConnectorName.COMPOSIO_GMAIL_CONNECTOR, authEndpoint: "/api/v1/auth/composio/connector/add/?toolkit_id=gmail", }, { id: "composio-googlecalendar", title: "Google Calendar", - description: "Search through your events via Composio", + description: "Search and manage your events via Composio", connectorType: EnumConnectorName.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR, authEndpoint: "/api/v1/auth/composio/connector/add/?toolkit_id=googlecalendar", }, @@ -238,14 +240,14 @@ export const COMPOSIO_TOOLKITS = [ { id: "gmail", name: "Gmail", - description: "Search through your emails", - isIndexable: true, + description: "Search, read, draft, and send emails", + isIndexable: false, }, { id: "googlecalendar", name: "Google Calendar", - description: "Search through your events", - isIndexable: true, + description: "Search and manage your events", + isIndexable: false, }, { id: "slack", @@ -275,18 +277,6 @@ export interface AutoIndexConfig { } export const AUTO_INDEX_DEFAULTS: Record = { - [EnumConnectorName.COMPOSIO_GMAIL_CONNECTOR]: { - daysBack: 30, - daysForward: 0, - frequencyMinutes: 1440, - syncDescription: "Syncing your last 30 days of emails.", - }, - [EnumConnectorName.COMPOSIO_GOOGLE_CALENDAR_CONNECTOR]: { - daysBack: 90, - daysForward: 90, - frequencyMinutes: 1440, - syncDescription: "Syncing 90 days of past and upcoming events.", - }, [EnumConnectorName.NOTION_CONNECTOR]: { daysBack: 365, daysForward: 0, diff --git a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts index 9f968e2a7..a8d395e5c 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts @@ -38,6 +38,7 @@ import { AUTO_INDEX_CONNECTOR_TYPES, AUTO_INDEX_DEFAULTS, COMPOSIO_CONNECTORS, + LIVE_CONNECTOR_TYPES, OAUTH_CONNECTORS, OTHER_CONNECTORS, } from "../constants/connector-constants"; @@ -317,7 +318,12 @@ export const useConnectorDialog = () => { newConnector.id ); - if ( + const isLiveConnector = LIVE_CONNECTOR_TYPES.has(oauthConnector.connectorType); + + if (isLiveConnector) { + toast.success(`${oauthConnector.title} connected successfully!`); + await refetchAllConnectors(); + } else if ( newConnector.is_indexable && AUTO_INDEX_CONNECTOR_TYPES.has(oauthConnector.connectorType) ) {