From caf8d43afc7662c75b51b50d0b88ba0074a006b2 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 7 Jan 2026 17:22:38 +0200 Subject: [PATCH 01/28] auto-create default search space on registration --- surfsense_backend/app/users.py | 67 +++++++++++++++++++++++++++++++++- 1 file changed, 65 insertions(+), 2 deletions(-) diff --git a/surfsense_backend/app/users.py b/surfsense_backend/app/users.py index d51b30bd7..dd284307f 100644 --- a/surfsense_backend/app/users.py +++ b/surfsense_backend/app/users.py @@ -1,3 +1,4 @@ +import logging import uuid from fastapi import Depends, Request, Response @@ -12,7 +13,17 @@ from fastapi_users.db import SQLAlchemyUserDatabase from pydantic import BaseModel from app.config import config -from app.db import User, get_user_db +from app.db import ( + SearchSpace, + SearchSpaceMembership, + SearchSpaceRole, + User, + async_session_maker, + get_default_roles_config, + get_user_db, +) + +logger = logging.getLogger(__name__) class BearerResponse(BaseModel): @@ -36,7 +47,59 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]): verification_token_secret = SECRET async def on_after_register(self, user: User, request: Request | None = None): - print(f"User {user.id} has registered.") + """ + Called after a user registers. Creates a default search space for the user + so they can start chatting immediately without manual setup. + """ + logger.info(f"User {user.id} has registered. Creating default search space...") + + try: + async with async_session_maker() as session: + # Create default search space + default_search_space = SearchSpace( + name="My Search Space", + description="Your personal search space", + user_id=user.id, + ) + session.add(default_search_space) + await session.flush() # Get the search space ID + + # Create default roles + default_roles = get_default_roles_config() + owner_role_id = None + + for role_config in default_roles: + db_role = SearchSpaceRole( + name=role_config["name"], + description=role_config["description"], + permissions=role_config["permissions"], + is_default=role_config["is_default"], + is_system_role=role_config["is_system_role"], + search_space_id=default_search_space.id, + ) + session.add(db_role) + await session.flush() + + if role_config["name"] == "Owner": + owner_role_id = db_role.id + + # Create owner membership + owner_membership = SearchSpaceMembership( + user_id=user.id, + search_space_id=default_search_space.id, + role_id=owner_role_id, + is_owner=True, + ) + session.add(owner_membership) + + await session.commit() + logger.info( + f"Created default search space (ID: {default_search_space.id}) for user {user.id}" + ) + except Exception as e: + logger.error( + f"Failed to create default search space for user {user.id}: {e}" + ) async def on_after_forgot_password( self, user: User, token: str, request: Request | None = None From 2f8919baef7ef6cea947f4a2c618972ec85a1f43 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 7 Jan 2026 17:22:49 +0200 Subject: [PATCH 02/28] auto-redirect to chat on dashboard load --- surfsense_web/app/dashboard/page.tsx | 47 ++++++++++++++++++++++++++-- 1 file changed, 45 insertions(+), 2 deletions(-) diff --git a/surfsense_web/app/dashboard/page.tsx b/surfsense_web/app/dashboard/page.tsx index e241428d1..d343a28e3 100644 --- a/surfsense_web/app/dashboard/page.tsx +++ b/surfsense_web/app/dashboard/page.tsx @@ -7,7 +7,11 @@ import Image from "next/image"; import Link from "next/link"; import { useRouter } from "next/navigation"; import { useTranslations } from "next-intl"; -import { deleteSearchSpaceMutationAtom } from "@/atoms/search-spaces/search-space-mutation.atoms"; +import { useEffect, useRef, useState } from "react"; +import { + createSearchSpaceMutationAtom, + deleteSearchSpaceMutationAtom, +} from "@/atoms/search-spaces/search-space-mutation.atoms"; import { searchSpacesAtom } from "@/atoms/search-spaces/search-space-query.atoms"; import { currentUserAtom } from "@/atoms/user/user-query.atoms"; import { Logo } from "@/components/Logo"; @@ -129,6 +133,11 @@ const ErrorScreen = ({ message }: { message: string }) => { const DashboardPage = () => { const t = useTranslations("dashboard"); const tCommon = useTranslations("common"); + const router = useRouter(); + + // State for auto-creating search space + const [isAutoCreating, setIsAutoCreating] = useState(false); + const hasAttemptedAutoCreate = useRef(false); // Animation variants const containerVariants: Variants = { @@ -161,9 +170,42 @@ const DashboardPage = () => { refetch: refreshSearchSpaces, } = useAtomValue(searchSpacesAtom); const { mutateAsync: deleteSearchSpace } = useAtomValue(deleteSearchSpaceMutationAtom); + const { mutateAsync: createSearchSpace } = useAtomValue(createSearchSpaceMutationAtom); const { data: user, isPending: isLoadingUser, error: userError } = useAtomValue(currentUserAtom); + // Auto-redirect to chat or auto-create search space + useEffect(() => { + const handleAutoRedirect = async () => { + // Don't run if still loading or already attempted + if (loading || hasAttemptedAutoCreate.current) return; + + // If user has search spaces, redirect to the first one's chat + if (searchSpaces.length > 0) { + router.replace(`/dashboard/${searchSpaces[0].id}/new-chat`); + return; + } + + // If no search spaces exist (edge case for users who registered before this feature), + // auto-create one and redirect + hasAttemptedAutoCreate.current = true; + setIsAutoCreating(true); + + try { + const newSearchSpace = await createSearchSpace({ + name: "My Search Space", + description: "Your personal search space", + }); + router.replace(`/dashboard/${newSearchSpace.id}/new-chat`); + } catch (err) { + console.error("Failed to auto-create search space:", err); + setIsAutoCreating(false); + } + }; + + handleAutoRedirect(); + }, [loading, searchSpaces, router, createSearchSpace]); + // Create user object for UserDropdown const customUser = { name: user?.email ? user.email.split("@")[0] : "User", @@ -173,7 +215,8 @@ const DashboardPage = () => { avatar: "/icon-128.png", // Default avatar }; - if (loading) return ; + // Show loading while loading, auto-redirecting, or auto-creating + if (loading || isAutoCreating || (searchSpaces.length > 0 && !error)) return ; if (error) return ; const handleDeleteSearchSpace = async (id: number) => { From 348898b08b76599dfcf3648cd2f0e7a875a2f31a Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 7 Jan 2026 17:22:57 +0200 Subject: [PATCH 03/28] check search space access before redirect --- surfsense_web/components/TokenHandler.tsx | 74 +++++++++++++++-------- 1 file changed, 48 insertions(+), 26 deletions(-) diff --git a/surfsense_web/components/TokenHandler.tsx b/surfsense_web/components/TokenHandler.tsx index 42905ac0d..6117f6ede 100644 --- a/surfsense_web/components/TokenHandler.tsx +++ b/surfsense_web/components/TokenHandler.tsx @@ -2,8 +2,10 @@ import { useRouter, useSearchParams } from "next/navigation"; import { useEffect } from "react"; +import { membersApiService } from "@/lib/apis/members-api.service"; import { getAndClearRedirectPath, setBearerToken } from "@/lib/auth-utils"; import { trackLoginSuccess } from "@/lib/posthog/events"; +import { queryClient } from "@/lib/query-client/client"; interface TokenHandlerProps { redirectPath?: string; // Default path to redirect after storing token (if no saved path) @@ -36,34 +38,54 @@ const TokenHandler = ({ const token = searchParams.get(tokenParamName); if (token) { - try { - // Track login success for OAuth flows (e.g., Google) - // Local login already tracks success before redirecting here - const alreadyTracked = sessionStorage.getItem("login_success_tracked"); - if (!alreadyTracked) { - // This is an OAuth flow (Google login) - track success - trackLoginSuccess("google"); + const handleAuth = async () => { + try { + // Track login success for OAuth flows (e.g., Google) + // Local login already tracks success before redirecting here + const alreadyTracked = sessionStorage.getItem("login_success_tracked"); + if (!alreadyTracked) { + // This is an OAuth flow (Google login) - track success + trackLoginSuccess("google"); + } + // Clear the flag for future logins + sessionStorage.removeItem("login_success_tracked"); + + // Store token in localStorage using both methods for compatibility + localStorage.setItem(storageKey, token); + setBearerToken(token); + + // Clear any cached data from previous sessions + queryClient.clear(); + + // Check if there's a saved redirect path from before the auth flow + const savedRedirectPath = getAndClearRedirectPath(); + + // Check if saved path contains a search space ID and verify access + const searchSpaceMatch = savedRedirectPath?.match(/^\/dashboard\/(\d+)/); + if (searchSpaceMatch && savedRedirectPath) { + const searchSpaceId = Number(searchSpaceMatch[1]); + try { + await membersApiService.getMyAccess({ search_space_id: searchSpaceId }); + router.push(savedRedirectPath); + return; + } catch { + // User doesn't have access, fall through to default + } + } + + // Use the saved path if available, otherwise use the default redirectPath + const finalRedirectPath = savedRedirectPath || redirectPath; + + // Redirect to the appropriate path + router.push(finalRedirectPath); + } catch (error) { + console.error("Error storing token in localStorage:", error); + // Even if there's an error, try to redirect to the default path + router.push(redirectPath); } - // Clear the flag for future logins - sessionStorage.removeItem("login_success_tracked"); + }; - // Store token in localStorage using both methods for compatibility - localStorage.setItem(storageKey, token); - setBearerToken(token); - - // Check if there's a saved redirect path from before the auth flow - const savedRedirectPath = getAndClearRedirectPath(); - - // Use the saved path if available, otherwise use the default redirectPath - const finalRedirectPath = savedRedirectPath || redirectPath; - - // Redirect to the appropriate path - router.push(finalRedirectPath); - } catch (error) { - console.error("Error storing token in localStorage:", error); - // Even if there's an error, try to redirect to the default path - router.push(redirectPath); - } + handleAuth(); } }, [searchParams, tokenParamName, storageKey, redirectPath, router]); From 5151ba381e25395b3259ab126b7cf88a1114b77c Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 7 Jan 2026 18:13:31 +0200 Subject: [PATCH 04/28] use hard navigation on login and logout to clear state --- surfsense_web/components/TokenHandler.tsx | 68 ++++++----------------- surfsense_web/components/UserDropdown.tsx | 4 +- 2 files changed, 19 insertions(+), 53 deletions(-) diff --git a/surfsense_web/components/TokenHandler.tsx b/surfsense_web/components/TokenHandler.tsx index 6117f6ede..a190fe73f 100644 --- a/surfsense_web/components/TokenHandler.tsx +++ b/surfsense_web/components/TokenHandler.tsx @@ -1,11 +1,8 @@ "use client"; -import { useRouter, useSearchParams } from "next/navigation"; +import { useSearchParams } from "next/navigation"; import { useEffect } from "react"; -import { membersApiService } from "@/lib/apis/members-api.service"; import { getAndClearRedirectPath, setBearerToken } from "@/lib/auth-utils"; -import { trackLoginSuccess } from "@/lib/posthog/events"; -import { queryClient } from "@/lib/query-client/client"; interface TokenHandlerProps { redirectPath?: string; // Default path to redirect after storing token (if no saved path) @@ -27,7 +24,6 @@ const TokenHandler = ({ tokenParamName = "token", storageKey = "surfsense_bearer_token", }: TokenHandlerProps) => { - const router = useRouter(); const searchParams = useSearchParams(); useEffect(() => { @@ -38,56 +34,26 @@ const TokenHandler = ({ const token = searchParams.get(tokenParamName); if (token) { - const handleAuth = async () => { - try { - // Track login success for OAuth flows (e.g., Google) - // Local login already tracks success before redirecting here - const alreadyTracked = sessionStorage.getItem("login_success_tracked"); - if (!alreadyTracked) { - // This is an OAuth flow (Google login) - track success - trackLoginSuccess("google"); - } - // Clear the flag for future logins - sessionStorage.removeItem("login_success_tracked"); + try { + // Store token in localStorage using both methods for compatibility + localStorage.setItem(storageKey, token); + setBearerToken(token); - // Store token in localStorage using both methods for compatibility - localStorage.setItem(storageKey, token); - setBearerToken(token); + // Check if there's a saved redirect path from before the auth flow + const savedRedirectPath = getAndClearRedirectPath(); - // Clear any cached data from previous sessions - queryClient.clear(); + // Use the saved path if available, otherwise use the default redirectPath + const finalRedirectPath = savedRedirectPath || redirectPath; - // Check if there's a saved redirect path from before the auth flow - const savedRedirectPath = getAndClearRedirectPath(); - - // Check if saved path contains a search space ID and verify access - const searchSpaceMatch = savedRedirectPath?.match(/^\/dashboard\/(\d+)/); - if (searchSpaceMatch && savedRedirectPath) { - const searchSpaceId = Number(searchSpaceMatch[1]); - try { - await membersApiService.getMyAccess({ search_space_id: searchSpaceId }); - router.push(savedRedirectPath); - return; - } catch { - // User doesn't have access, fall through to default - } - } - - // Use the saved path if available, otherwise use the default redirectPath - const finalRedirectPath = savedRedirectPath || redirectPath; - - // Redirect to the appropriate path - router.push(finalRedirectPath); - } catch (error) { - console.error("Error storing token in localStorage:", error); - // Even if there's an error, try to redirect to the default path - router.push(redirectPath); - } - }; - - handleAuth(); + // Use hard navigation to clear all React/jotai state from previous session + window.location.href = finalRedirectPath; + } catch (error) { + console.error("Error storing token in localStorage:", error); + // Even if there's an error, try to redirect to the default path + window.location.href = redirectPath; + } } - }, [searchParams, tokenParamName, storageKey, redirectPath, router]); + }, [searchParams, tokenParamName, storageKey, redirectPath]); return (
diff --git a/surfsense_web/components/UserDropdown.tsx b/surfsense_web/components/UserDropdown.tsx index 966193c7f..a7f9c89ac 100644 --- a/surfsense_web/components/UserDropdown.tsx +++ b/surfsense_web/components/UserDropdown.tsx @@ -34,14 +34,14 @@ export function UserDropdown({ if (typeof window !== "undefined") { localStorage.removeItem("surfsense_bearer_token"); - router.push("/"); + window.location.href = "/"; } } catch (error) { console.error("Error during logout:", error); // Optionally, provide user feedback if (typeof window !== "undefined") { alert("Logout failed. Please try again."); - router.push("/"); + window.location.href = "/"; } } }; From edc5f379d38413fb0903958cd5353d57f5ebd7ec Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 7 Jan 2026 18:33:32 +0200 Subject: [PATCH 05/28] auto-redirect only for single search space users --- surfsense_web/app/dashboard/page.tsx | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/surfsense_web/app/dashboard/page.tsx b/surfsense_web/app/dashboard/page.tsx index d343a28e3..22a2307f9 100644 --- a/surfsense_web/app/dashboard/page.tsx +++ b/surfsense_web/app/dashboard/page.tsx @@ -174,18 +174,22 @@ const DashboardPage = () => { const { data: user, isPending: isLoadingUser, error: userError } = useAtomValue(currentUserAtom); - // Auto-redirect to chat or auto-create search space + // Auto-redirect to chat for users with exactly 1 search space, or auto-create if none useEffect(() => { const handleAutoRedirect = async () => { // Don't run if still loading or already attempted if (loading || hasAttemptedAutoCreate.current) return; - // If user has search spaces, redirect to the first one's chat - if (searchSpaces.length > 0) { + // If user has exactly 1 search space, redirect to its chat + if (searchSpaces.length === 1) { router.replace(`/dashboard/${searchSpaces[0].id}/new-chat`); return; } + if (searchSpaces.length > 1) { + return; + } + // If no search spaces exist (edge case for users who registered before this feature), // auto-create one and redirect hasAttemptedAutoCreate.current = true; @@ -215,8 +219,8 @@ const DashboardPage = () => { avatar: "/icon-128.png", // Default avatar }; - // Show loading while loading, auto-redirecting, or auto-creating - if (loading || isAutoCreating || (searchSpaces.length > 0 && !error)) return ; + // Show loading while loading, auto-redirecting (single search space), or auto-creating + if (loading || isAutoCreating || (searchSpaces.length === 1 && !error)) return ; if (error) return ; const handleDeleteSearchSpace = async (id: number) => { From 1d3fd8d47c3e7212e83740cc86c33751e7dafb5b Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 7 Jan 2026 18:34:51 +0200 Subject: [PATCH 06/28] remove redundant comments --- surfsense_web/app/dashboard/page.tsx | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/surfsense_web/app/dashboard/page.tsx b/surfsense_web/app/dashboard/page.tsx index 22a2307f9..fbf567cba 100644 --- a/surfsense_web/app/dashboard/page.tsx +++ b/surfsense_web/app/dashboard/page.tsx @@ -177,10 +177,10 @@ const DashboardPage = () => { // Auto-redirect to chat for users with exactly 1 search space, or auto-create if none useEffect(() => { const handleAutoRedirect = async () => { - // Don't run if still loading or already attempted + if (loading || hasAttemptedAutoCreate.current) return; - // If user has exactly 1 search space, redirect to its chat + if (searchSpaces.length === 1) { router.replace(`/dashboard/${searchSpaces[0].id}/new-chat`); return; @@ -190,8 +190,7 @@ const DashboardPage = () => { return; } - // If no search spaces exist (edge case for users who registered before this feature), - // auto-create one and redirect + hasAttemptedAutoCreate.current = true; setIsAutoCreating(true); From a919f8d9ee5c1d6f7205be9661ea1fb756179ee3 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Thu, 8 Jan 2026 19:10:40 +0200 Subject: [PATCH 07/28] feat: add new layout system (Slack/ClickUp inspired) --- .../components/layout/hooks/index.ts | 1 + .../layout/hooks/useSidebarState.ts | 61 +++ surfsense_web/components/layout/index.ts | 30 ++ .../layout/providers/LayoutDataProvider.tsx | 486 ++++++++++++++++++ .../components/layout/providers/index.ts | 1 + .../components/layout/types/layout.types.ts | 139 +++++ .../components/layout/ui/header/Header.tsx | 49 ++ .../components/layout/ui/header/index.ts | 1 + .../layout/ui/icon-rail/IconRail.tsx | 60 +++ .../layout/ui/icon-rail/NavIcon.tsx | 34 ++ .../layout/ui/icon-rail/WorkspaceAvatar.tsx | 72 +++ .../components/layout/ui/icon-rail/index.ts | 3 + surfsense_web/components/layout/ui/index.ts | 16 + .../layout/ui/shell/LayoutShell.tsx | 203 ++++++++ .../components/layout/ui/shell/index.ts | 1 + .../layout/ui/sidebar/AllChatsSidebar.tsx | 443 ++++++++++++++++ .../layout/ui/sidebar/AllNotesSidebar.tsx | 407 +++++++++++++++ .../layout/ui/sidebar/ChatListItem.tsx | 65 +++ .../layout/ui/sidebar/MobileSidebar.tsx | 154 ++++++ .../layout/ui/sidebar/NavSection.tsx | 73 +++ .../layout/ui/sidebar/NoteListItem.tsx | 76 +++ .../layout/ui/sidebar/PageUsageDisplay.tsx | 34 ++ .../components/layout/ui/sidebar/Sidebar.tsx | 294 +++++++++++ .../ui/sidebar/SidebarCollapseButton.tsx | 31 ++ .../layout/ui/sidebar/SidebarHeader.tsx | 69 +++ .../layout/ui/sidebar/SidebarSection.tsx | 56 ++ .../layout/ui/sidebar/SidebarUserProfile.tsx | 188 +++++++ .../components/layout/ui/sidebar/index.ts | 12 + 28 files changed, 3059 insertions(+) create mode 100644 surfsense_web/components/layout/hooks/index.ts create mode 100644 surfsense_web/components/layout/hooks/useSidebarState.ts create mode 100644 surfsense_web/components/layout/index.ts create mode 100644 surfsense_web/components/layout/providers/LayoutDataProvider.tsx create mode 100644 surfsense_web/components/layout/providers/index.ts create mode 100644 surfsense_web/components/layout/types/layout.types.ts create mode 100644 surfsense_web/components/layout/ui/header/Header.tsx create mode 100644 surfsense_web/components/layout/ui/header/index.ts create mode 100644 surfsense_web/components/layout/ui/icon-rail/IconRail.tsx create mode 100644 surfsense_web/components/layout/ui/icon-rail/NavIcon.tsx create mode 100644 surfsense_web/components/layout/ui/icon-rail/WorkspaceAvatar.tsx create mode 100644 surfsense_web/components/layout/ui/icon-rail/index.ts create mode 100644 surfsense_web/components/layout/ui/index.ts create mode 100644 surfsense_web/components/layout/ui/shell/LayoutShell.tsx create mode 100644 surfsense_web/components/layout/ui/shell/index.ts create mode 100644 surfsense_web/components/layout/ui/sidebar/AllChatsSidebar.tsx create mode 100644 surfsense_web/components/layout/ui/sidebar/AllNotesSidebar.tsx create mode 100644 surfsense_web/components/layout/ui/sidebar/ChatListItem.tsx create mode 100644 surfsense_web/components/layout/ui/sidebar/MobileSidebar.tsx create mode 100644 surfsense_web/components/layout/ui/sidebar/NavSection.tsx create mode 100644 surfsense_web/components/layout/ui/sidebar/NoteListItem.tsx create mode 100644 surfsense_web/components/layout/ui/sidebar/PageUsageDisplay.tsx create mode 100644 surfsense_web/components/layout/ui/sidebar/Sidebar.tsx create mode 100644 surfsense_web/components/layout/ui/sidebar/SidebarCollapseButton.tsx create mode 100644 surfsense_web/components/layout/ui/sidebar/SidebarHeader.tsx create mode 100644 surfsense_web/components/layout/ui/sidebar/SidebarSection.tsx create mode 100644 surfsense_web/components/layout/ui/sidebar/SidebarUserProfile.tsx create mode 100644 surfsense_web/components/layout/ui/sidebar/index.ts diff --git a/surfsense_web/components/layout/hooks/index.ts b/surfsense_web/components/layout/hooks/index.ts new file mode 100644 index 000000000..51cf8f7a0 --- /dev/null +++ b/surfsense_web/components/layout/hooks/index.ts @@ -0,0 +1 @@ +export { useSidebarState } from "./useSidebarState"; diff --git a/surfsense_web/components/layout/hooks/useSidebarState.ts b/surfsense_web/components/layout/hooks/useSidebarState.ts new file mode 100644 index 000000000..9caa0b451 --- /dev/null +++ b/surfsense_web/components/layout/hooks/useSidebarState.ts @@ -0,0 +1,61 @@ +"use client"; + +import { useCallback, useEffect, useState } from "react"; + +const SIDEBAR_COOKIE_NAME = "sidebar_collapsed"; +const SIDEBAR_COOKIE_MAX_AGE = 60 * 60 * 24 * 365; // 1 year + +interface UseSidebarStateReturn { + isCollapsed: boolean; + setIsCollapsed: (collapsed: boolean) => void; + toggleCollapsed: () => void; +} + +export function useSidebarState(defaultCollapsed = false): UseSidebarStateReturn { + const [isCollapsed, setIsCollapsedState] = useState(defaultCollapsed); + + // Initialize from cookie on mount + useEffect(() => { + try { + const match = document.cookie.match(/(?:^|; )sidebar_collapsed=([^;]+)/); + if (match) { + setIsCollapsedState(match[1] === "true"); + } + } catch { + // Ignore cookie read errors + } + }, []); + + // Persist to cookie when state changes + const setIsCollapsed = useCallback((collapsed: boolean) => { + setIsCollapsedState(collapsed); + try { + document.cookie = `${SIDEBAR_COOKIE_NAME}=${collapsed}; path=/; max-age=${SIDEBAR_COOKIE_MAX_AGE}`; + } catch { + // Ignore cookie write errors + } + }, []); + + const toggleCollapsed = useCallback(() => { + setIsCollapsed(!isCollapsed); + }, [isCollapsed, setIsCollapsed]); + + // Keyboard shortcut: Cmd/Ctrl + B + useEffect(() => { + const handleKeyDown = (event: KeyboardEvent) => { + if (event.key === "b" && (event.metaKey || event.ctrlKey)) { + event.preventDefault(); + toggleCollapsed(); + } + }; + + window.addEventListener("keydown", handleKeyDown); + return () => window.removeEventListener("keydown", handleKeyDown); + }, [toggleCollapsed]); + + return { + isCollapsed, + setIsCollapsed, + toggleCollapsed, + }; +} diff --git a/surfsense_web/components/layout/index.ts b/surfsense_web/components/layout/index.ts new file mode 100644 index 000000000..745075b6f --- /dev/null +++ b/surfsense_web/components/layout/index.ts @@ -0,0 +1,30 @@ +export { useSidebarState } from "./hooks"; +export { LayoutDataProvider } from "./providers"; +export type { + ChatItem, + IconRailProps, + NavItem, + NoteItem, + PageUsage, + SidebarSectionProps, + User, + Workspace, +} from "./types/layout.types"; +export { + ChatListItem, + Header, + IconRail, + LayoutShell, + MobileSidebar, + MobileSidebarTrigger, + NavIcon, + NavSection, + NoteListItem, + PageUsageDisplay, + Sidebar, + SidebarCollapseButton, + SidebarHeader, + SidebarSection, + SidebarUserProfile, + WorkspaceAvatar, +} from "./ui"; diff --git a/surfsense_web/components/layout/providers/LayoutDataProvider.tsx b/surfsense_web/components/layout/providers/LayoutDataProvider.tsx new file mode 100644 index 000000000..ea750a365 --- /dev/null +++ b/surfsense_web/components/layout/providers/LayoutDataProvider.tsx @@ -0,0 +1,486 @@ +"use client"; + +import { useQuery, useQueryClient } from "@tanstack/react-query"; +import { useAtomValue, useSetAtom } from "jotai"; +import { Logs, SquareLibrary, Trash2 } from "lucide-react"; +import { useParams, usePathname, useRouter } from "next/navigation"; +import { useTranslations } from "next-intl"; +import { useTheme } from "next-themes"; +import { useCallback, useMemo, useState } from "react"; +import { hasUnsavedEditorChangesAtom, pendingEditorNavigationAtom } from "@/atoms/editor/ui.atoms"; +import { searchSpacesAtom } from "@/atoms/search-spaces/search-space-query.atoms"; +import { currentUserAtom } from "@/atoms/user/user-query.atoms"; +import { Button } from "@/components/ui/button"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { useLogsSummary } from "@/hooks/use-logs"; +import { notesApiService } from "@/lib/apis/notes-api.service"; +import { searchSpacesApiService } from "@/lib/apis/search-spaces-api.service"; +import { deleteThread, fetchThreads } from "@/lib/chat/thread-persistence"; +import { resetUser, trackLogout } from "@/lib/posthog/events"; +import { cacheKeys } from "@/lib/query-client/cache-keys"; +import type { ChatItem, NavItem, NoteItem, Workspace } from "../types/layout.types"; +import { LayoutShell } from "../ui/shell"; +import { AllChatsSidebar } from "../ui/sidebar/AllChatsSidebar"; +import { AllNotesSidebar } from "../ui/sidebar/AllNotesSidebar"; + +interface LayoutDataProviderProps { + searchSpaceId: string; + children: React.ReactNode; + breadcrumb?: React.ReactNode; + languageSwitcher?: React.ReactNode; +} + +export function LayoutDataProvider({ + searchSpaceId, + children, + breadcrumb, + languageSwitcher, +}: LayoutDataProviderProps) { + const t = useTranslations("dashboard"); + const tCommon = useTranslations("common"); + const router = useRouter(); + const params = useParams(); + const pathname = usePathname(); + const queryClient = useQueryClient(); + const { theme, setTheme } = useTheme(); + + // Atoms + const { data: user } = useAtomValue(currentUserAtom); + const { data: searchSpacesData } = useAtomValue(searchSpacesAtom); + const hasUnsavedEditorChanges = useAtomValue(hasUnsavedEditorChangesAtom); + const setPendingNavigation = useSetAtom(pendingEditorNavigationAtom); + + // Current IDs from URL + const currentChatId = params?.chat_id + ? Number(Array.isArray(params.chat_id) ? params.chat_id[0] : params.chat_id) + : null; + const currentNoteId = params?.note_id + ? Number(Array.isArray(params.note_id) ? params.note_id[0] : params.note_id) + : null; + + // Fetch current search space + const { data: searchSpace } = useQuery({ + queryKey: cacheKeys.searchSpaces.detail(searchSpaceId), + queryFn: () => searchSpacesApiService.getSearchSpace({ id: Number(searchSpaceId) }), + enabled: !!searchSpaceId, + }); + + // Fetch threads + const { data: threadsData, refetch: refetchThreads } = useQuery({ + queryKey: ["threads", searchSpaceId, { limit: 4 }], + queryFn: () => fetchThreads(Number(searchSpaceId), 4), + enabled: !!searchSpaceId, + }); + + // Fetch notes + const { data: notesData, refetch: refetchNotes } = useQuery({ + queryKey: ["notes", searchSpaceId], + queryFn: () => + notesApiService.getNotes({ + search_space_id: Number(searchSpaceId), + page_size: 4, + }), + enabled: !!searchSpaceId, + }); + + // Poll for active reindexing tasks to show inline loading indicators + const { summary } = useLogsSummary(searchSpaceId ? Number(searchSpaceId) : 0, 24, { + enablePolling: true, + refetchInterval: 5000, + }); + + // Create a Set of document IDs that are currently being reindexed + const reindexingDocumentIds = useMemo(() => { + if (!summary?.active_tasks) return new Set(); + return new Set( + summary.active_tasks + .filter((task) => task.document_id != null) + .map((task) => task.document_id as number) + ); + }, [summary?.active_tasks]); + + // All chats/notes sidebars state + const [isAllChatsSidebarOpen, setIsAllChatsSidebarOpen] = useState(false); + const [isAllNotesSidebarOpen, setIsAllNotesSidebarOpen] = useState(false); + + // Delete dialogs state + const [showDeleteChatDialog, setShowDeleteChatDialog] = useState(false); + const [chatToDelete, setChatToDelete] = useState<{ id: number; name: string } | null>(null); + const [isDeletingChat, setIsDeletingChat] = useState(false); + + const [showDeleteNoteDialog, setShowDeleteNoteDialog] = useState(false); + const [noteToDelete, setNoteToDelete] = useState<{ + id: number; + name: string; + search_space_id: number; + } | null>(null); + const [isDeletingNote, setIsDeletingNote] = useState(false); + + // Transform workspaces (API returns array directly, not { items: [...] }) + const workspaces: Workspace[] = useMemo(() => { + if (!searchSpacesData || !Array.isArray(searchSpacesData)) return []; + return searchSpacesData.map((space) => ({ + id: space.id, + name: space.name, + description: space.description, + isOwner: space.is_owner, + memberCount: space.member_count || 0, + })); + }, [searchSpacesData]); + + // Use searchSpace query result for current workspace (more reliable than finding in list) + const activeWorkspace: Workspace | null = searchSpace + ? { + id: searchSpace.id, + name: searchSpace.name, + description: searchSpace.description, + isOwner: searchSpace.is_owner, + memberCount: searchSpace.member_count || 0, + } + : null; + + // Transform chats + const chats: ChatItem[] = useMemo(() => { + if (!threadsData?.threads) return []; + return threadsData.threads.map((thread) => ({ + id: thread.id, + name: thread.title || `Chat ${thread.id}`, + url: `/dashboard/${searchSpaceId}/new-chat/${thread.id}`, + })); + }, [threadsData, searchSpaceId]); + + // Transform notes + const notes: NoteItem[] = useMemo(() => { + if (!notesData?.items) return []; + const sortedNotes = [...notesData.items].sort((a, b) => { + const dateA = a.updated_at + ? new Date(a.updated_at).getTime() + : new Date(a.created_at).getTime(); + const dateB = b.updated_at + ? new Date(b.updated_at).getTime() + : new Date(b.created_at).getTime(); + return dateB - dateA; + }); + return sortedNotes.slice(0, 4).map((note) => ({ + id: note.id, + name: note.title, + url: `/dashboard/${note.search_space_id}/editor/${note.id}`, + isReindexing: reindexingDocumentIds.has(note.id), + })); + }, [notesData, reindexingDocumentIds]); + + // Navigation items + const navItems: NavItem[] = useMemo( + () => [ + { + title: "Documents", + url: `/dashboard/${searchSpaceId}/documents`, + icon: SquareLibrary, + isActive: pathname?.includes("/documents"), + }, + { + title: "Logs", + url: `/dashboard/${searchSpaceId}/logs`, + icon: Logs, + isActive: pathname?.includes("/logs"), + }, + ], + [searchSpaceId, pathname] + ); + + // Handlers + const handleWorkspaceSelect = useCallback( + (id: number) => { + router.push(`/dashboard/${id}/new-chat`); + }, + [router] + ); + + const handleAddWorkspace = useCallback(() => { + router.push("/dashboard/searchspaces"); + }, [router]); + + const handleSeeAllWorkspaces = useCallback(() => { + router.push("/dashboard"); + }, [router]); + + const handleNavItemClick = useCallback( + (item: NavItem) => { + router.push(item.url); + }, + [router] + ); + + const handleNewChat = useCallback(() => { + router.push(`/dashboard/${searchSpaceId}/new-chat`); + }, [router, searchSpaceId]); + + const handleChatSelect = useCallback( + (chat: ChatItem) => { + router.push(chat.url); + }, + [router] + ); + + const handleChatDelete = useCallback((chat: ChatItem) => { + setChatToDelete({ id: chat.id, name: chat.name }); + setShowDeleteChatDialog(true); + }, []); + + const handleNoteSelect = useCallback( + (note: NoteItem) => { + if (hasUnsavedEditorChanges) { + setPendingNavigation(note.url); + } else { + router.push(note.url); + } + }, + [router, hasUnsavedEditorChanges, setPendingNavigation] + ); + + const handleNoteDelete = useCallback( + (note: NoteItem) => { + setNoteToDelete({ id: note.id, name: note.name, search_space_id: Number(searchSpaceId) }); + setShowDeleteNoteDialog(true); + }, + [searchSpaceId] + ); + + const handleAddNote = useCallback(() => { + const newNoteUrl = `/dashboard/${searchSpaceId}/editor/new`; + if (hasUnsavedEditorChanges) { + setPendingNavigation(newNoteUrl); + } else { + router.push(newNoteUrl); + } + }, [router, searchSpaceId, hasUnsavedEditorChanges, setPendingNavigation]); + + const handleSettings = useCallback(() => { + router.push(`/dashboard/${searchSpaceId}/settings`); + }, [router, searchSpaceId]); + + const handleInviteMembers = useCallback(() => { + router.push(`/dashboard/${searchSpaceId}/team`); + }, [router, searchSpaceId]); + + const handleLogout = useCallback(() => { + try { + trackLogout(); + resetUser(); + if (typeof window !== "undefined") { + localStorage.removeItem("surfsense_bearer_token"); + router.push("/"); + } + } catch (error) { + console.error("Error during logout:", error); + router.push("/"); + } + }, [router]); + + const handleToggleTheme = useCallback(() => { + setTheme(theme === "dark" ? "light" : "dark"); + }, [theme, setTheme]); + + const handleViewAllChats = useCallback(() => { + setIsAllChatsSidebarOpen(true); + }, []); + + const handleViewAllNotes = useCallback(() => { + setIsAllNotesSidebarOpen(true); + }, []); + + // Delete handlers + const confirmDeleteChat = useCallback(async () => { + if (!chatToDelete) return; + setIsDeletingChat(true); + try { + await deleteThread(chatToDelete.id); + queryClient.invalidateQueries({ queryKey: ["threads", searchSpaceId] }); + if (currentChatId === chatToDelete.id) { + router.push(`/dashboard/${searchSpaceId}/new-chat`); + } + } catch (error) { + console.error("Error deleting thread:", error); + } finally { + setIsDeletingChat(false); + setShowDeleteChatDialog(false); + setChatToDelete(null); + } + }, [chatToDelete, queryClient, searchSpaceId, router, currentChatId]); + + const confirmDeleteNote = useCallback(async () => { + if (!noteToDelete) return; + setIsDeletingNote(true); + try { + await notesApiService.deleteNote({ + search_space_id: noteToDelete.search_space_id, + note_id: noteToDelete.id, + }); + refetchNotes(); + } catch (error) { + console.error("Error deleting note:", error); + } finally { + setIsDeletingNote(false); + setShowDeleteNoteDialog(false); + setNoteToDelete(null); + } + }, [noteToDelete, refetchNotes]); + + // Page usage + const pageUsage = user + ? { + pagesUsed: user.pages_used, + pagesLimit: user.pages_limit, + } + : undefined; + + // Detect if we're on the chat page (needs overflow-hidden for chat's own scroll) + const isChatPage = pathname?.includes("/new-chat") ?? false; + + return ( + <> + + {children} + + + {/* Delete Chat Dialog */} + + + + + + {t("delete_chat")} + + + {t("delete_chat_confirm")} {chatToDelete?.name}?{" "} + {t("action_cannot_undone")} + + + + + + + + + + {/* All Chats Sidebar */} + + + {/* All Notes Sidebar */} + + + {/* Delete Note Dialog */} + + + + + + {t("delete_note")} + + + {t("delete_note_confirm")} {noteToDelete?.name}?{" "} + {t("action_cannot_undone")} + + + + + + + + + + ); +} diff --git a/surfsense_web/components/layout/providers/index.ts b/surfsense_web/components/layout/providers/index.ts new file mode 100644 index 000000000..61ea094de --- /dev/null +++ b/surfsense_web/components/layout/providers/index.ts @@ -0,0 +1 @@ +export { LayoutDataProvider } from "./LayoutDataProvider"; diff --git a/surfsense_web/components/layout/types/layout.types.ts b/surfsense_web/components/layout/types/layout.types.ts new file mode 100644 index 000000000..b11619c60 --- /dev/null +++ b/surfsense_web/components/layout/types/layout.types.ts @@ -0,0 +1,139 @@ +import type { LucideIcon } from "lucide-react"; + +export interface Workspace { + id: number; + name: string; + description?: string | null; + isOwner: boolean; + memberCount: number; +} + +export interface User { + email: string; + name?: string; +} + +export interface NavItem { + title: string; + url: string; + icon: LucideIcon; + isActive?: boolean; + badge?: string | number; +} + +export interface ChatItem { + id: number; + name: string; + url: string; + isActive?: boolean; +} + +export interface NoteItem { + id: number; + name: string; + url: string; + isActive?: boolean; + isReindexing?: boolean; +} + +export interface PageUsage { + pagesUsed: number; + pagesLimit: number; +} + +export interface IconRailProps { + workspaces: Workspace[]; + activeWorkspaceId: number | null; + onWorkspaceSelect: (id: number) => void; + onAddWorkspace: () => void; + className?: string; +} + +export interface SidebarHeaderProps { + workspace: Workspace | null; + onSettings?: () => void; +} + +export interface SidebarSectionProps { + title: string; + defaultOpen?: boolean; + children: React.ReactNode; + action?: React.ReactNode; +} + +export interface NavSectionProps { + items: NavItem[]; + onItemClick?: (item: NavItem) => void; +} + +export interface ChatsSectionProps { + chats: ChatItem[]; + activeChatId?: number | null; + onChatSelect: (chat: ChatItem) => void; + onChatDelete?: (chat: ChatItem) => void; + onViewAllChats?: () => void; + searchSpaceId?: string; +} + +export interface NotesSectionProps { + notes: NoteItem[]; + activeNoteId?: number | null; + onNoteSelect: (note: NoteItem) => void; + onNoteDelete?: (note: NoteItem) => void; + onAddNote?: () => void; + onViewAllNotes?: () => void; + searchSpaceId?: string; +} + +export interface PageUsageDisplayProps { + pagesUsed: number; + pagesLimit: number; +} + +export interface SidebarUserProfileProps { + user: User; + searchSpaceId?: string; + onSettings?: () => void; + onInviteMembers?: () => void; + onSwitchWorkspace?: () => void; + onToggleTheme?: () => void; + onLogout?: () => void; + theme?: string; +} + +export interface SidebarProps { + workspace: Workspace | null; + searchSpaceId?: string; + navItems: NavItem[]; + chats: ChatItem[]; + activeChatId?: number | null; + onNewChat: () => void; + onChatSelect: (chat: ChatItem) => void; + onChatDelete?: (chat: ChatItem) => void; + onViewAllChats?: () => void; + notes: NoteItem[]; + activeNoteId?: number | null; + onNoteSelect: (note: NoteItem) => void; + onNoteDelete?: (note: NoteItem) => void; + onAddNote?: () => void; + onViewAllNotes?: () => void; + user: User; + theme?: string; + onSettings?: () => void; + onInviteMembers?: () => void; + onSwitchWorkspace?: () => void; + onToggleTheme?: () => void; + onLogout?: () => void; + pageUsage?: PageUsage; + className?: string; +} + +export interface LayoutShellProps { + workspaces: Workspace[]; + activeWorkspaceId: number | null; + onWorkspaceSelect: (id: number) => void; + onAddWorkspace: () => void; + sidebarProps: Omit; + children: React.ReactNode; + className?: string; +} diff --git a/surfsense_web/components/layout/ui/header/Header.tsx b/surfsense_web/components/layout/ui/header/Header.tsx new file mode 100644 index 000000000..a03761ef5 --- /dev/null +++ b/surfsense_web/components/layout/ui/header/Header.tsx @@ -0,0 +1,49 @@ +"use client"; + +import { Moon, Sun } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; + +interface HeaderProps { + breadcrumb?: React.ReactNode; + languageSwitcher?: React.ReactNode; + theme?: string; + onToggleTheme?: () => void; + mobileMenuTrigger?: React.ReactNode; +} + +export function Header({ + breadcrumb, + languageSwitcher, + theme, + onToggleTheme, + mobileMenuTrigger, +}: HeaderProps) { + return ( +
+ {/* Left side - Mobile menu trigger + Breadcrumb */} +
+ {mobileMenuTrigger} + {breadcrumb} +
+ + {/* Right side - Actions */} +
+ {/* Theme toggle */} + {onToggleTheme && ( + + + + + {theme === "dark" ? "Light mode" : "Dark mode"} + + )} + + {languageSwitcher} +
+
+ ); +} diff --git a/surfsense_web/components/layout/ui/header/index.ts b/surfsense_web/components/layout/ui/header/index.ts new file mode 100644 index 000000000..c940126c9 --- /dev/null +++ b/surfsense_web/components/layout/ui/header/index.ts @@ -0,0 +1 @@ +export { Header } from "./Header"; diff --git a/surfsense_web/components/layout/ui/icon-rail/IconRail.tsx b/surfsense_web/components/layout/ui/icon-rail/IconRail.tsx new file mode 100644 index 000000000..0d6b39cdc --- /dev/null +++ b/surfsense_web/components/layout/ui/icon-rail/IconRail.tsx @@ -0,0 +1,60 @@ +"use client"; + +import { Plus } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { ScrollArea } from "@/components/ui/scroll-area"; +import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; +import { cn } from "@/lib/utils"; +import type { Workspace } from "../../types/layout.types"; +import { WorkspaceAvatar } from "./WorkspaceAvatar"; + +interface IconRailProps { + workspaces: Workspace[]; + activeWorkspaceId: number | null; + onWorkspaceSelect: (id: number) => void; + onAddWorkspace: () => void; + className?: string; +} + +export function IconRail({ + workspaces, + activeWorkspaceId, + onWorkspaceSelect, + onAddWorkspace, + className, +}: IconRailProps) { + return ( +
+ +
+ {workspaces.map((workspace) => ( + onWorkspaceSelect(workspace.id)} + size="md" + /> + ))} + + + + + + + Add workspace + + +
+
+
+ ); +} diff --git a/surfsense_web/components/layout/ui/icon-rail/NavIcon.tsx b/surfsense_web/components/layout/ui/icon-rail/NavIcon.tsx new file mode 100644 index 000000000..3efb48748 --- /dev/null +++ b/surfsense_web/components/layout/ui/icon-rail/NavIcon.tsx @@ -0,0 +1,34 @@ +"use client"; + +import type { LucideIcon } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; +import { cn } from "@/lib/utils"; + +interface NavIconProps { + icon: LucideIcon; + label: string; + isActive?: boolean; + onClick?: () => void; +} + +export function NavIcon({ icon: Icon, label, isActive, onClick }: NavIconProps) { + return ( + + + + + + {label} + + + ); +} diff --git a/surfsense_web/components/layout/ui/icon-rail/WorkspaceAvatar.tsx b/surfsense_web/components/layout/ui/icon-rail/WorkspaceAvatar.tsx new file mode 100644 index 000000000..1c4798d2a --- /dev/null +++ b/surfsense_web/components/layout/ui/icon-rail/WorkspaceAvatar.tsx @@ -0,0 +1,72 @@ +"use client"; + +import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; +import { cn } from "@/lib/utils"; + +interface WorkspaceAvatarProps { + name: string; + isActive?: boolean; + onClick?: () => void; + size?: "sm" | "md"; +} + +/** + * Generates a consistent color based on workspace name + */ +function stringToColor(str: string): string { + let hash = 0; + for (let i = 0; i < str.length; i++) { + hash = str.charCodeAt(i) + ((hash << 5) - hash); + } + const colors = [ + "#6366f1", // indigo + "#22c55e", // green + "#f59e0b", // amber + "#ef4444", // red + "#8b5cf6", // violet + "#06b6d4", // cyan + "#ec4899", // pink + "#14b8a6", // teal + ]; + return colors[Math.abs(hash) % colors.length]; +} + +/** + * Gets initials from workspace name (max 2 chars) + */ +function getInitials(name: string): string { + const words = name.trim().split(/\s+/); + if (words.length >= 2) { + return (words[0][0] + words[1][0]).toUpperCase(); + } + return name.slice(0, 2).toUpperCase(); +} + +export function WorkspaceAvatar({ name, isActive, onClick, size = "md" }: WorkspaceAvatarProps) { + const bgColor = stringToColor(name); + const initials = getInitials(name); + const sizeClasses = size === "sm" ? "h-8 w-8 text-xs" : "h-10 w-10 text-sm"; + + return ( + + + + + + {name} + + + ); +} diff --git a/surfsense_web/components/layout/ui/icon-rail/index.ts b/surfsense_web/components/layout/ui/icon-rail/index.ts new file mode 100644 index 000000000..0e7e8cd29 --- /dev/null +++ b/surfsense_web/components/layout/ui/icon-rail/index.ts @@ -0,0 +1,3 @@ +export { IconRail } from "./IconRail"; +export { NavIcon } from "./NavIcon"; +export { WorkspaceAvatar } from "./WorkspaceAvatar"; diff --git a/surfsense_web/components/layout/ui/index.ts b/surfsense_web/components/layout/ui/index.ts new file mode 100644 index 000000000..74b1e9240 --- /dev/null +++ b/surfsense_web/components/layout/ui/index.ts @@ -0,0 +1,16 @@ +export { Header } from "./header"; +export { IconRail, NavIcon, WorkspaceAvatar } from "./icon-rail"; +export { LayoutShell } from "./shell"; +export { + ChatListItem, + MobileSidebar, + MobileSidebarTrigger, + NavSection, + NoteListItem, + PageUsageDisplay, + Sidebar, + SidebarCollapseButton, + SidebarHeader, + SidebarSection, + SidebarUserProfile, +} from "./sidebar"; diff --git a/surfsense_web/components/layout/ui/shell/LayoutShell.tsx b/surfsense_web/components/layout/ui/shell/LayoutShell.tsx new file mode 100644 index 000000000..0d7b24113 --- /dev/null +++ b/surfsense_web/components/layout/ui/shell/LayoutShell.tsx @@ -0,0 +1,203 @@ +"use client"; + +import { useState } from "react"; +import { TooltipProvider } from "@/components/ui/tooltip"; +import { useIsMobile } from "@/hooks/use-mobile"; +import { cn } from "@/lib/utils"; +import { useSidebarState } from "../../hooks"; +import type { + ChatItem, + NavItem, + NoteItem, + PageUsage, + User, + Workspace, +} from "../../types/layout.types"; +import { Header } from "../header"; +import { IconRail } from "../icon-rail"; +import { MobileSidebar, MobileSidebarTrigger, Sidebar } from "../sidebar"; + +interface LayoutShellProps { + workspaces: Workspace[]; + activeWorkspaceId: number | null; + onWorkspaceSelect: (id: number) => void; + onAddWorkspace: () => void; + workspace: Workspace | null; + navItems: NavItem[]; + onNavItemClick?: (item: NavItem) => void; + chats: ChatItem[]; + activeChatId?: number | null; + onNewChat: () => void; + onChatSelect: (chat: ChatItem) => void; + onChatDelete?: (chat: ChatItem) => void; + onViewAllChats?: () => void; + notes: NoteItem[]; + activeNoteId?: number | null; + onNoteSelect: (note: NoteItem) => void; + onNoteDelete?: (note: NoteItem) => void; + onAddNote?: () => void; + onViewAllNotes?: () => void; + user: User; + onSettings?: () => void; + onInviteMembers?: () => void; + onSeeAllWorkspaces?: () => void; + onLogout?: () => void; + pageUsage?: PageUsage; + breadcrumb?: React.ReactNode; + languageSwitcher?: React.ReactNode; + theme?: string; + onToggleTheme?: () => void; + defaultCollapsed?: boolean; + isChatPage?: boolean; + children: React.ReactNode; + className?: string; +} + +export function LayoutShell({ + workspaces, + activeWorkspaceId, + onWorkspaceSelect, + onAddWorkspace, + workspace, + navItems, + onNavItemClick, + chats, + activeChatId, + onNewChat, + onChatSelect, + onChatDelete, + onViewAllChats, + notes, + activeNoteId, + onNoteSelect, + onNoteDelete, + onAddNote, + onViewAllNotes, + user, + onSettings, + onInviteMembers, + onSeeAllWorkspaces, + onLogout, + pageUsage, + breadcrumb, + languageSwitcher, + theme, + onToggleTheme, + defaultCollapsed = false, + isChatPage = false, + children, + className, +}: LayoutShellProps) { + const isMobile = useIsMobile(); + const [mobileMenuOpen, setMobileMenuOpen] = useState(false); + const { isCollapsed, toggleCollapsed } = useSidebarState(defaultCollapsed); + + // Mobile layout + if (isMobile) { + return ( + +
+
setMobileMenuOpen(true)} />} + /> + + + +
+ {children} +
+
+
+ ); + } + + // Desktop layout + return ( + +
+
+ +
+ +
+ + +
+
+ +
+ {children} +
+
+
+
+
+ ); +} diff --git a/surfsense_web/components/layout/ui/shell/index.ts b/surfsense_web/components/layout/ui/shell/index.ts new file mode 100644 index 000000000..d7d96a574 --- /dev/null +++ b/surfsense_web/components/layout/ui/shell/index.ts @@ -0,0 +1 @@ +export { LayoutShell } from "./LayoutShell"; diff --git a/surfsense_web/components/layout/ui/sidebar/AllChatsSidebar.tsx b/surfsense_web/components/layout/ui/sidebar/AllChatsSidebar.tsx new file mode 100644 index 000000000..02459f2b9 --- /dev/null +++ b/surfsense_web/components/layout/ui/sidebar/AllChatsSidebar.tsx @@ -0,0 +1,443 @@ +"use client"; + +import { useQuery, useQueryClient } from "@tanstack/react-query"; +import { format } from "date-fns"; +import { + ArchiveIcon, + Loader2, + MessageCircleMore, + MoreHorizontal, + RotateCcwIcon, + Search, + Trash2, + X, +} from "lucide-react"; +import { AnimatePresence, motion } from "motion/react"; +import { useParams, useRouter } from "next/navigation"; +import { useTranslations } from "next-intl"; +import { useCallback, useEffect, useState } from "react"; +import { createPortal } from "react-dom"; +import { toast } from "sonner"; +import { Button } from "@/components/ui/button"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuSeparator, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; +import { Input } from "@/components/ui/input"; +import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; +import { useDebouncedValue } from "@/hooks/use-debounced-value"; +import { + deleteThread, + fetchThreads, + searchThreads, + type ThreadListItem, + updateThread, +} from "@/lib/chat/thread-persistence"; +import { cn } from "@/lib/utils"; + +interface AllChatsSidebarProps { + open: boolean; + onOpenChange: (open: boolean) => void; + searchSpaceId: string; + onCloseMobileSidebar?: () => void; +} + +export function AllChatsSidebar({ + open, + onOpenChange, + searchSpaceId, + onCloseMobileSidebar, +}: AllChatsSidebarProps) { + const t = useTranslations("sidebar"); + const router = useRouter(); + const params = useParams(); + const queryClient = useQueryClient(); + + // Get the current chat ID from URL to check if user is deleting the currently open chat + const currentChatId = Array.isArray(params.chat_id) + ? Number(params.chat_id[0]) + : params.chat_id + ? Number(params.chat_id) + : null; + const [deletingThreadId, setDeletingThreadId] = useState(null); + const [archivingThreadId, setArchivingThreadId] = useState(null); + const [searchQuery, setSearchQuery] = useState(""); + const [showArchived, setShowArchived] = useState(false); + const [mounted, setMounted] = useState(false); + const [openDropdownId, setOpenDropdownId] = useState(null); + const debouncedSearchQuery = useDebouncedValue(searchQuery, 300); + + const isSearchMode = !!debouncedSearchQuery.trim(); + + // Handle mounting for portal + useEffect(() => { + setMounted(true); + }, []); + + // Handle escape key + useEffect(() => { + const handleEscape = (e: KeyboardEvent) => { + if (e.key === "Escape" && open) { + onOpenChange(false); + } + }; + document.addEventListener("keydown", handleEscape); + return () => document.removeEventListener("keydown", handleEscape); + }, [open, onOpenChange]); + + // Lock body scroll when open + useEffect(() => { + if (open) { + document.body.style.overflow = "hidden"; + } else { + document.body.style.overflow = ""; + } + return () => { + document.body.style.overflow = ""; + }; + }, [open]); + + // Fetch all threads (when not searching) + const { + data: threadsData, + error: threadsError, + isLoading: isLoadingThreads, + } = useQuery({ + queryKey: ["all-threads", searchSpaceId], + queryFn: () => fetchThreads(Number(searchSpaceId)), + enabled: !!searchSpaceId && open && !isSearchMode, + }); + + // Search threads (when searching) + const { + data: searchData, + error: searchError, + isLoading: isLoadingSearch, + } = useQuery({ + queryKey: ["search-threads", searchSpaceId, debouncedSearchQuery], + queryFn: () => searchThreads(Number(searchSpaceId), debouncedSearchQuery.trim()), + enabled: !!searchSpaceId && open && isSearchMode, + }); + + // Handle thread navigation + const handleThreadClick = useCallback( + (threadId: number) => { + router.push(`/dashboard/${searchSpaceId}/new-chat/${threadId}`); + onOpenChange(false); + // Also close the main sidebar on mobile + onCloseMobileSidebar?.(); + }, + [router, onOpenChange, searchSpaceId, onCloseMobileSidebar] + ); + + // Handle thread deletion + const handleDeleteThread = useCallback( + async (threadId: number) => { + setDeletingThreadId(threadId); + try { + await deleteThread(threadId); + toast.success(t("chat_deleted") || "Chat deleted successfully"); + queryClient.invalidateQueries({ queryKey: ["all-threads", searchSpaceId] }); + queryClient.invalidateQueries({ queryKey: ["search-threads", searchSpaceId] }); + queryClient.invalidateQueries({ queryKey: ["threads", searchSpaceId] }); + + // If the deleted chat is currently open, close sidebar first then redirect + if (currentChatId === threadId) { + onOpenChange(false); + // Wait for sidebar close animation to complete before navigating + setTimeout(() => { + router.push(`/dashboard/${searchSpaceId}/new-chat`); + }, 250); + } + } catch (error) { + console.error("Error deleting thread:", error); + toast.error(t("error_deleting_chat") || "Failed to delete chat"); + } finally { + setDeletingThreadId(null); + } + }, + [queryClient, searchSpaceId, t, currentChatId, router, onOpenChange] + ); + + // Handle thread archive/unarchive + const handleToggleArchive = useCallback( + async (threadId: number, currentlyArchived: boolean) => { + setArchivingThreadId(threadId); + try { + await updateThread(threadId, { archived: !currentlyArchived }); + toast.success( + currentlyArchived + ? t("chat_unarchived") || "Chat restored" + : t("chat_archived") || "Chat archived" + ); + queryClient.invalidateQueries({ queryKey: ["all-threads", searchSpaceId] }); + queryClient.invalidateQueries({ queryKey: ["search-threads", searchSpaceId] }); + queryClient.invalidateQueries({ queryKey: ["threads", searchSpaceId] }); + } catch (error) { + console.error("Error archiving thread:", error); + toast.error(t("error_archiving_chat") || "Failed to archive chat"); + } finally { + setArchivingThreadId(null); + } + }, + [queryClient, searchSpaceId, t] + ); + + // Clear search + const handleClearSearch = useCallback(() => { + setSearchQuery(""); + }, []); + + // Determine which data source to use + let threads: ThreadListItem[] = []; + if (isSearchMode) { + threads = searchData ?? []; + } else if (threadsData) { + threads = showArchived ? threadsData.archived_threads : threadsData.threads; + } + + const isLoading = isSearchMode ? isLoadingSearch : isLoadingThreads; + const error = isSearchMode ? searchError : threadsError; + + // Get counts for tabs + const activeCount = threadsData?.threads.length ?? 0; + const archivedCount = threadsData?.archived_threads.length ?? 0; + + if (!mounted) return null; + + return createPortal( + + {open && ( + <> + {/* Backdrop */} + onOpenChange(false)} + aria-hidden="true" + /> + + {/* Panel */} + + {/* Header */} +
+
+

{t("all_chats") || "All Chats"}

+ +
+ + {/* Search Input */} +
+ + setSearchQuery(e.target.value)} + className="pl-9 pr-8 h-9" + /> + {searchQuery && ( + + )} +
+
+ + {/* Tab toggle for active/archived (only show when not searching) */} + {!isSearchMode && ( +
+ + +
+ )} + + {/* Scrollable Content */} +
+ {isLoading ? ( +
+ +
+ ) : error ? ( +
+ {t("error_loading_chats") || "Error loading chats"} +
+ ) : threads.length > 0 ? ( +
+ {threads.map((thread) => { + const isDeleting = deletingThreadId === thread.id; + const isArchiving = archivingThreadId === thread.id; + const isBusy = isDeleting || isArchiving; + const isActive = currentChatId === thread.id; + + return ( +
+ {/* Main clickable area for navigation */} + + + + + +

+ {t("updated") || "Updated"}:{" "} + {format(new Date(thread.updatedAt), "MMM d, yyyy 'at' h:mm a")} +

+
+
+ + {/* Actions dropdown */} + setOpenDropdownId(isOpen ? thread.id : null)} + > + + + + + handleToggleArchive(thread.id, thread.archived)} + disabled={isArchiving} + > + {thread.archived ? ( + <> + + {t("unarchive") || "Restore"} + + ) : ( + <> + + {t("archive") || "Archive"} + + )} + + + handleDeleteThread(thread.id)} + className="text-destructive focus:text-destructive" + > + + {t("delete") || "Delete"} + + + +
+ ); + })} +
+ ) : isSearchMode ? ( +
+ +

+ {t("no_chats_found") || "No chats found"} +

+

+ {t("try_different_search") || "Try a different search term"} +

+
+ ) : ( +
+ +

+ {showArchived + ? t("no_archived_chats") || "No archived chats" + : t("no_chats") || "No chats yet"} +

+ {!showArchived && ( +

+ {t("start_new_chat_hint") || "Start a new chat from the chat page"} +

+ )} +
+ )} +
+
+ + )} +
, + document.body + ); +} diff --git a/surfsense_web/components/layout/ui/sidebar/AllNotesSidebar.tsx b/surfsense_web/components/layout/ui/sidebar/AllNotesSidebar.tsx new file mode 100644 index 000000000..67d1b4ba6 --- /dev/null +++ b/surfsense_web/components/layout/ui/sidebar/AllNotesSidebar.tsx @@ -0,0 +1,407 @@ +"use client"; + +import { useQuery, useQueryClient } from "@tanstack/react-query"; +import { format } from "date-fns"; +import { FileText, Loader2, MoreHorizontal, Plus, Search, Trash2, X } from "lucide-react"; +import { AnimatePresence, motion } from "motion/react"; +import { useParams, useRouter } from "next/navigation"; +import { useTranslations } from "next-intl"; +import { useCallback, useEffect, useMemo, useState } from "react"; +import { createPortal } from "react-dom"; +import { Button } from "@/components/ui/button"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; +import { Input } from "@/components/ui/input"; +import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; +import { useDebouncedValue } from "@/hooks/use-debounced-value"; +import { documentsApiService } from "@/lib/apis/documents-api.service"; +import { notesApiService } from "@/lib/apis/notes-api.service"; +import { cn } from "@/lib/utils"; + +interface AllNotesSidebarProps { + open: boolean; + onOpenChange: (open: boolean) => void; + searchSpaceId: string; + onAddNote?: () => void; + onCloseMobileSidebar?: () => void; +} + +export function AllNotesSidebar({ + open, + onOpenChange, + searchSpaceId, + onAddNote, + onCloseMobileSidebar, +}: AllNotesSidebarProps) { + const t = useTranslations("sidebar"); + const router = useRouter(); + const params = useParams(); + const queryClient = useQueryClient(); + + // Get the current note ID from URL to highlight the open note + const currentNoteId = params.note_id ? Number(params.note_id) : null; + const [deletingNoteId, setDeletingNoteId] = useState(null); + const [searchQuery, setSearchQuery] = useState(""); + const [mounted, setMounted] = useState(false); + const [openDropdownId, setOpenDropdownId] = useState(null); + const debouncedSearchQuery = useDebouncedValue(searchQuery, 300); + + // Handle mounting for portal + useEffect(() => { + setMounted(true); + }, []); + + // Handle escape key + useEffect(() => { + const handleEscape = (e: KeyboardEvent) => { + if (e.key === "Escape" && open) { + onOpenChange(false); + } + }; + document.addEventListener("keydown", handleEscape); + return () => document.removeEventListener("keydown", handleEscape); + }, [open, onOpenChange]); + + // Lock body scroll when open + useEffect(() => { + if (open) { + document.body.style.overflow = "hidden"; + } else { + document.body.style.overflow = ""; + } + return () => { + document.body.style.overflow = ""; + }; + }, [open]); + + // Fetch all notes (when no search query) + const { + data: notesData, + error: notesError, + isLoading: isLoadingNotes, + } = useQuery({ + queryKey: ["all-notes", searchSpaceId], + queryFn: () => + notesApiService.getNotes({ + search_space_id: Number(searchSpaceId), + page_size: 1000, + }), + enabled: !!searchSpaceId && open && !debouncedSearchQuery, + }); + + // Search notes (when there's a search query) + const { + data: searchData, + error: searchError, + isLoading: isSearching, + } = useQuery({ + queryKey: ["search-notes", searchSpaceId, debouncedSearchQuery], + queryFn: () => + documentsApiService.searchDocuments({ + queryParams: { + search_space_id: Number(searchSpaceId), + document_types: ["NOTE"], + title: debouncedSearchQuery, + page_size: 100, + }, + }), + enabled: !!searchSpaceId && open && !!debouncedSearchQuery, + }); + + // Handle note navigation + const handleNoteClick = useCallback( + (noteId: number, noteSearchSpaceId: number) => { + router.push(`/dashboard/${noteSearchSpaceId}/editor/${noteId}`); + onOpenChange(false); + // Also close the main sidebar on mobile + onCloseMobileSidebar?.(); + }, + [router, onOpenChange, onCloseMobileSidebar] + ); + + // Handle note deletion + const handleDeleteNote = useCallback( + async (noteId: number, noteSearchSpaceId: number) => { + setDeletingNoteId(noteId); + try { + await notesApiService.deleteNote({ + search_space_id: noteSearchSpaceId, + note_id: noteId, + }); + queryClient.invalidateQueries({ queryKey: ["all-notes", searchSpaceId] }); + queryClient.invalidateQueries({ queryKey: ["notes", searchSpaceId] }); + queryClient.invalidateQueries({ queryKey: ["search-notes", searchSpaceId] }); + } catch (error) { + console.error("Error deleting note:", error); + } finally { + setDeletingNoteId(null); + } + }, + [queryClient, searchSpaceId] + ); + + // Clear search + const handleClearSearch = useCallback(() => { + setSearchQuery(""); + }, []); + + // Determine which data to show + const isSearchMode = !!debouncedSearchQuery; + const isLoading = isSearchMode ? isSearching : isLoadingNotes; + const error = isSearchMode ? searchError : notesError; + + // Transform and sort notes data - handle both regular notes and search results + const notes = useMemo(() => { + let notesList: { + id: number; + title: string; + search_space_id: number; + created_at: string; + updated_at?: string | null; + }[]; + + if (isSearchMode && searchData?.items) { + notesList = searchData.items.map((doc) => ({ + id: doc.id, + title: doc.title, + search_space_id: doc.search_space_id, + created_at: doc.created_at, + updated_at: doc.updated_at, + })); + } else { + notesList = notesData?.items ?? []; + } + + // Sort notes by updated_at (most recent first), fallback to created_at + return [...notesList].sort((a, b) => { + const dateA = a.updated_at + ? new Date(a.updated_at).getTime() + : new Date(a.created_at).getTime(); + const dateB = b.updated_at + ? new Date(b.updated_at).getTime() + : new Date(b.created_at).getTime(); + return dateB - dateA; // Descending order (most recent first) + }); + }, [isSearchMode, searchData, notesData]); + + if (!mounted) return null; + + return createPortal( + + {open && ( + <> + {/* Backdrop */} + onOpenChange(false)} + aria-hidden="true" + /> + + {/* Panel */} + + {/* Header */} +
+
+

{t("all_notes") || "All Notes"}

+ +
+ + {/* Search Input */} +
+ + setSearchQuery(e.target.value)} + className="pl-9 pr-8 h-9" + /> + {searchQuery && ( + + )} +
+
+ + {/* Scrollable Content */} +
+ {isLoading ? ( +
+ +
+ ) : error ? ( +
+ {t("error_loading_notes") || "Error loading notes"} +
+ ) : notes.length > 0 ? ( +
+ {notes.map((note) => { + const isDeleting = deletingNoteId === note.id; + const isActive = currentNoteId === note.id; + + return ( +
+ {/* Main clickable area for navigation */} + + + + + +
+

+ {t("created") || "Created"}:{" "} + {format(new Date(note.created_at), "MMM d, yyyy 'at' h:mm a")} +

+ {note.updated_at && ( +

+ {t("updated") || "Updated"}:{" "} + {format(new Date(note.updated_at), "MMM d, yyyy 'at' h:mm a")} +

+ )} +
+
+
+ + {/* Actions dropdown - separate from main click area */} + setOpenDropdownId(isOpen ? note.id : null)} + > + + + + + handleDeleteNote(note.id, note.search_space_id)} + className="text-destructive focus:text-destructive" + > + + {t("delete") || "Delete"} + + + +
+ ); + })} +
+ ) : isSearchMode ? ( +
+ +

+ {t("no_results_found") || "No notes found"} +

+

+ {t("try_different_search") || "Try a different search term"} +

+
+ ) : ( +
+ +

+ {t("no_notes") || "No notes yet"} +

+ {onAddNote && ( + + )} +
+ )} +
+ + {/* Footer with Add Note button */} + {onAddNote && notes.length > 0 && ( +
+ +
+ )} +
+ + )} +
, + document.body + ); +} diff --git a/surfsense_web/components/layout/ui/sidebar/ChatListItem.tsx b/surfsense_web/components/layout/ui/sidebar/ChatListItem.tsx new file mode 100644 index 000000000..7f5ede04c --- /dev/null +++ b/surfsense_web/components/layout/ui/sidebar/ChatListItem.tsx @@ -0,0 +1,65 @@ +"use client"; + +import { MessageSquare, MoreHorizontal } from "lucide-react"; +import { useTranslations } from "next-intl"; +import { Button } from "@/components/ui/button"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; +import { cn } from "@/lib/utils"; + +interface ChatListItemProps { + name: string; + isActive?: boolean; + onClick?: () => void; + onDelete?: () => void; +} + +export function ChatListItem({ name, isActive, onClick, onDelete }: ChatListItemProps) { + const t = useTranslations("sidebar"); + + return ( +
+ + + {/* Actions dropdown */} +
+ + + + + + { + e.stopPropagation(); + onDelete?.(); + }} + className="text-destructive focus:text-destructive" + > + {t("delete")} + + + +
+
+ ); +} diff --git a/surfsense_web/components/layout/ui/sidebar/MobileSidebar.tsx b/surfsense_web/components/layout/ui/sidebar/MobileSidebar.tsx new file mode 100644 index 000000000..8429d6671 --- /dev/null +++ b/surfsense_web/components/layout/ui/sidebar/MobileSidebar.tsx @@ -0,0 +1,154 @@ +"use client"; + +import { Menu } from "lucide-react"; +import { Button } from "@/components/ui/button"; +import { ScrollArea } from "@/components/ui/scroll-area"; +import { Sheet, SheetContent, SheetTitle } from "@/components/ui/sheet"; +import type { + ChatItem, + NavItem, + NoteItem, + PageUsage, + User, + Workspace, +} from "../../types/layout.types"; +import { IconRail } from "../icon-rail"; +import { Sidebar } from "./Sidebar"; + +interface MobileSidebarProps { + isOpen: boolean; + onOpenChange: (open: boolean) => void; + workspaces: Workspace[]; + activeWorkspaceId: number | null; + onWorkspaceSelect: (id: number) => void; + onAddWorkspace: () => void; + workspace: Workspace | null; + navItems: NavItem[]; + onNavItemClick?: (item: NavItem) => void; + chats: ChatItem[]; + activeChatId?: number | null; + onNewChat: () => void; + onChatSelect: (chat: ChatItem) => void; + onChatDelete?: (chat: ChatItem) => void; + onViewAllChats?: () => void; + notes: NoteItem[]; + activeNoteId?: number | null; + onNoteSelect: (note: NoteItem) => void; + onNoteDelete?: (note: NoteItem) => void; + onAddNote?: () => void; + onViewAllNotes?: () => void; + user: User; + onSettings?: () => void; + onInviteMembers?: () => void; + onSeeAllWorkspaces?: () => void; + onLogout?: () => void; + pageUsage?: PageUsage; +} + +export function MobileSidebarTrigger({ onClick }: { onClick: () => void }) { + return ( + + ); +} + +export function MobileSidebar({ + isOpen, + onOpenChange, + workspaces, + activeWorkspaceId, + onWorkspaceSelect, + onAddWorkspace, + workspace, + navItems, + onNavItemClick, + chats, + activeChatId, + onNewChat, + onChatSelect, + onChatDelete, + onViewAllChats, + notes, + activeNoteId, + onNoteSelect, + onNoteDelete, + onAddNote, + onViewAllNotes, + user, + onSettings, + onInviteMembers, + onSeeAllWorkspaces, + onLogout, + pageUsage, +}: MobileSidebarProps) { + const handleWorkspaceSelect = (id: number) => { + onWorkspaceSelect(id); + }; + + const handleNavItemClick = (item: NavItem) => { + onNavItemClick?.(item); + onOpenChange(false); + }; + + const handleChatSelect = (chat: ChatItem) => { + onChatSelect(chat); + onOpenChange(false); + }; + + const handleNoteSelect = (note: NoteItem) => { + onNoteSelect(note); + onOpenChange(false); + }; + + return ( + + + Navigation + +
+ + + +
+ +
+ { + onNewChat(); + onOpenChange(false); + }} + onChatSelect={handleChatSelect} + onChatDelete={onChatDelete} + onViewAllChats={onViewAllChats} + notes={notes} + activeNoteId={activeNoteId} + onNoteSelect={handleNoteSelect} + onNoteDelete={onNoteDelete} + onAddNote={onAddNote} + onViewAllNotes={onViewAllNotes} + user={user} + onSettings={onSettings} + onInviteMembers={onInviteMembers} + onSeeAllWorkspaces={onSeeAllWorkspaces} + onLogout={onLogout} + pageUsage={pageUsage} + className="w-full border-none" + /> +
+
+
+ ); +} diff --git a/surfsense_web/components/layout/ui/sidebar/NavSection.tsx b/surfsense_web/components/layout/ui/sidebar/NavSection.tsx new file mode 100644 index 000000000..7b694055b --- /dev/null +++ b/surfsense_web/components/layout/ui/sidebar/NavSection.tsx @@ -0,0 +1,73 @@ +"use client"; + +import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; +import { cn } from "@/lib/utils"; +import type { NavItem } from "../../types/layout.types"; + +interface NavSectionProps { + items: NavItem[]; + onItemClick?: (item: NavItem) => void; + isCollapsed?: boolean; +} + +export function NavSection({ items, onItemClick, isCollapsed = false }: NavSectionProps) { + return ( +
+ {items.map((item) => { + const Icon = item.icon; + + // Add data-joyride for onboarding tour + const joyrideAttr = + item.title === "Documents" || item.title.toLowerCase().includes("documents") + ? { "data-joyride": "documents-sidebar" } + : {}; + + if (isCollapsed) { + return ( + + + + + + {item.title} + {item.badge && ` (${item.badge})`} + + + ); + } + + return ( + + ); + })} +
+ ); +} diff --git a/surfsense_web/components/layout/ui/sidebar/NoteListItem.tsx b/surfsense_web/components/layout/ui/sidebar/NoteListItem.tsx new file mode 100644 index 000000000..0491ebcca --- /dev/null +++ b/surfsense_web/components/layout/ui/sidebar/NoteListItem.tsx @@ -0,0 +1,76 @@ +"use client"; + +import { FileText, Loader2, MoreHorizontal } from "lucide-react"; +import { useTranslations } from "next-intl"; +import { Button } from "@/components/ui/button"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; +import { cn } from "@/lib/utils"; + +interface NoteListItemProps { + name: string; + isActive?: boolean; + isReindexing?: boolean; + onClick?: () => void; + onDelete?: () => void; +} + +export function NoteListItem({ + name, + isActive, + isReindexing, + onClick, + onDelete, +}: NoteListItemProps) { + const t = useTranslations("sidebar"); + + return ( +
+ + + {/* Actions dropdown */} +
+ + + + + + { + e.stopPropagation(); + onDelete?.(); + }} + className="text-destructive focus:text-destructive" + > + {t("delete")} + + + +
+
+ ); +} diff --git a/surfsense_web/components/layout/ui/sidebar/PageUsageDisplay.tsx b/surfsense_web/components/layout/ui/sidebar/PageUsageDisplay.tsx new file mode 100644 index 000000000..85abae19b --- /dev/null +++ b/surfsense_web/components/layout/ui/sidebar/PageUsageDisplay.tsx @@ -0,0 +1,34 @@ +"use client"; + +import { Mail } from "lucide-react"; +import { Progress } from "@/components/ui/progress"; + +interface PageUsageDisplayProps { + pagesUsed: number; + pagesLimit: number; +} + +export function PageUsageDisplay({ pagesUsed, pagesLimit }: PageUsageDisplayProps) { + const usagePercentage = (pagesUsed / pagesLimit) * 100; + + return ( +
+
+
+ + {pagesUsed.toLocaleString()} / {pagesLimit.toLocaleString()} pages + + {usagePercentage.toFixed(0)}% +
+ + + + Contact to increase limits + +
+
+ ); +} diff --git a/surfsense_web/components/layout/ui/sidebar/Sidebar.tsx b/surfsense_web/components/layout/ui/sidebar/Sidebar.tsx new file mode 100644 index 000000000..5031b08b5 --- /dev/null +++ b/surfsense_web/components/layout/ui/sidebar/Sidebar.tsx @@ -0,0 +1,294 @@ +"use client"; + +import { FileText, FolderOpen, MessageSquare, PenSquare, Plus } from "lucide-react"; +import { useTranslations } from "next-intl"; +import { Button } from "@/components/ui/button"; +import { ScrollArea } from "@/components/ui/scroll-area"; +import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; +import { cn } from "@/lib/utils"; +import type { + ChatItem, + NavItem, + NoteItem, + PageUsage, + User, + Workspace, +} from "../../types/layout.types"; +import { ChatListItem } from "./ChatListItem"; +import { NavSection } from "./NavSection"; +import { NoteListItem } from "./NoteListItem"; +import { PageUsageDisplay } from "./PageUsageDisplay"; +import { SidebarCollapseButton } from "./SidebarCollapseButton"; +import { SidebarHeader } from "./SidebarHeader"; +import { SidebarSection } from "./SidebarSection"; +import { SidebarUserProfile } from "./SidebarUserProfile"; + +interface SidebarProps { + workspace: Workspace | null; + isCollapsed?: boolean; + onToggleCollapse?: () => void; + navItems: NavItem[]; + onNavItemClick?: (item: NavItem) => void; + chats: ChatItem[]; + activeChatId?: number | null; + onNewChat: () => void; + onChatSelect: (chat: ChatItem) => void; + onChatDelete?: (chat: ChatItem) => void; + onViewAllChats?: () => void; + notes: NoteItem[]; + activeNoteId?: number | null; + onNoteSelect: (note: NoteItem) => void; + onNoteDelete?: (note: NoteItem) => void; + onAddNote?: () => void; + onViewAllNotes?: () => void; + user: User; + onSettings?: () => void; + onInviteMembers?: () => void; + onSeeAllWorkspaces?: () => void; + onLogout?: () => void; + pageUsage?: PageUsage; + className?: string; +} + +export function Sidebar({ + workspace, + isCollapsed = false, + onToggleCollapse, + navItems, + onNavItemClick, + chats, + activeChatId, + onNewChat, + onChatSelect, + onChatDelete, + onViewAllChats, + notes, + activeNoteId, + onNoteSelect, + onNoteDelete, + onAddNote, + onViewAllNotes, + user, + onSettings, + onInviteMembers, + onSeeAllWorkspaces, + onLogout, + pageUsage, + className, +}: SidebarProps) { + const t = useTranslations("sidebar"); + + return ( +
+ {/* Header - workspace name or collapse button when collapsed */} + {isCollapsed ? ( +
+ {})} + /> +
+ ) : ( +
+ +
+ {})} + /> +
+
+ )} + + {/* New chat button */} +
+ {isCollapsed ? ( + + + + + {t("new_chat")} + + ) : ( + + )} +
+ + {/* Platform navigation */} + {navItems.length > 0 && ( + + )} + + {/* Scrollable content */} + + {isCollapsed ? ( +
+ {chats.length > 0 && ( + + + + + + {t("recent_chats")} ({chats.length}) + + + )} + {notes.length > 0 && ( + + + + + + {t("notes")} ({notes.length}) + + + )} +
+ ) : ( +
+ 0 ? ( + + + + + {t("view_all_chats")} + + ) : undefined + } + > + {chats.length > 0 ? ( +
+ {chats.map((chat) => ( + onChatSelect(chat)} + onDelete={() => onChatDelete?.(chat)} + /> + ))} +
+ ) : ( +

{t("no_recent_chats")}

+ )} +
+ + 0 ? ( + + + + + {t("view_all_notes")} + + ) : undefined + } + persistentAction={ + onAddNote && notes.length > 0 ? ( + + + + + {t("add_note")} + + ) : undefined + } + > + {notes.length > 0 ? ( +
+ {notes.map((note) => ( + onNoteSelect(note)} + onDelete={() => onNoteDelete?.(note)} + /> + ))} +
+ ) : onAddNote ? ( + + ) : ( +

{t("no_notes")}

+ )} +
+
+ )} +
+ + {/* Footer */} +
+ {pageUsage && !isCollapsed && ( + + )} + + +
+
+ ); +} diff --git a/surfsense_web/components/layout/ui/sidebar/SidebarCollapseButton.tsx b/surfsense_web/components/layout/ui/sidebar/SidebarCollapseButton.tsx new file mode 100644 index 000000000..3eaa87070 --- /dev/null +++ b/surfsense_web/components/layout/ui/sidebar/SidebarCollapseButton.tsx @@ -0,0 +1,31 @@ +"use client"; + +import { PanelLeft, PanelLeftClose } from "lucide-react"; +import { useTranslations } from "next-intl"; +import { Button } from "@/components/ui/button"; +import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; + +interface SidebarCollapseButtonProps { + isCollapsed: boolean; + onToggle: () => void; +} + +export function SidebarCollapseButton({ isCollapsed, onToggle }: SidebarCollapseButtonProps) { + const t = useTranslations("sidebar"); + + return ( + + + + + + {isCollapsed ? `${t("expand_sidebar")} (⌘B)` : `${t("collapse_sidebar")} (⌘B)`} + + + ); +} diff --git a/surfsense_web/components/layout/ui/sidebar/SidebarHeader.tsx b/surfsense_web/components/layout/ui/sidebar/SidebarHeader.tsx new file mode 100644 index 000000000..cf15a367e --- /dev/null +++ b/surfsense_web/components/layout/ui/sidebar/SidebarHeader.tsx @@ -0,0 +1,69 @@ +"use client"; + +import { ChevronsUpDown, LayoutGrid, Settings, UserPlus } from "lucide-react"; +import { useTranslations } from "next-intl"; +import { Button } from "@/components/ui/button"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuSeparator, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; +import { cn } from "@/lib/utils"; +import type { Workspace } from "../../types/layout.types"; + +interface SidebarHeaderProps { + workspace: Workspace | null; + isCollapsed?: boolean; + onSettings?: () => void; + onInviteMembers?: () => void; + onSeeAllWorkspaces?: () => void; + className?: string; +} + +export function SidebarHeader({ + workspace, + isCollapsed, + onSettings, + onInviteMembers, + onSeeAllWorkspaces, + className, +}: SidebarHeaderProps) { + const t = useTranslations("sidebar"); + + return ( +
+ + + + + + + + {t("invite_members")} + + + + + {t("workspace_settings")} + + + + + {t("see_all_workspaces")} + + + +
+ ); +} diff --git a/surfsense_web/components/layout/ui/sidebar/SidebarSection.tsx b/surfsense_web/components/layout/ui/sidebar/SidebarSection.tsx new file mode 100644 index 000000000..4d161e3fa --- /dev/null +++ b/surfsense_web/components/layout/ui/sidebar/SidebarSection.tsx @@ -0,0 +1,56 @@ +"use client"; + +import { ChevronRight } from "lucide-react"; +import { useState } from "react"; +import { Collapsible, CollapsibleContent, CollapsibleTrigger } from "@/components/ui/collapsible"; +import { cn } from "@/lib/utils"; + +interface SidebarSectionProps { + title: string; + defaultOpen?: boolean; + children: React.ReactNode; + action?: React.ReactNode; + persistentAction?: React.ReactNode; +} + +export function SidebarSection({ + title, + defaultOpen = true, + children, + action, + persistentAction, +}: SidebarSectionProps) { + const [isOpen, setIsOpen] = useState(defaultOpen); + + return ( + +
+ + + {title} + + + {/* Action button - visible on hover (always visible on mobile) */} + {action && ( +
+ {action} +
+ )} + + {/* Persistent action - always visible */} + {persistentAction && ( +
{persistentAction}
+ )} +
+ + +
{children}
+
+
+ ); +} diff --git a/surfsense_web/components/layout/ui/sidebar/SidebarUserProfile.tsx b/surfsense_web/components/layout/ui/sidebar/SidebarUserProfile.tsx new file mode 100644 index 000000000..29b35b9a9 --- /dev/null +++ b/surfsense_web/components/layout/ui/sidebar/SidebarUserProfile.tsx @@ -0,0 +1,188 @@ +"use client"; + +import { ChevronUp, LogOut } from "lucide-react"; +import { useTranslations } from "next-intl"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuLabel, + DropdownMenuSeparator, + DropdownMenuTrigger, +} from "@/components/ui/dropdown-menu"; +import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; +import { cn } from "@/lib/utils"; +import type { User } from "../../types/layout.types"; + +interface SidebarUserProfileProps { + user: User; + onLogout?: () => void; + isCollapsed?: boolean; +} + +/** + * Generates a consistent color based on email + */ +function stringToColor(str: string): string { + let hash = 0; + for (let i = 0; i < str.length; i++) { + hash = str.charCodeAt(i) + ((hash << 5) - hash); + } + const colors = [ + "#6366f1", + "#8b5cf6", + "#a855f7", + "#d946ef", + "#ec4899", + "#f43f5e", + "#ef4444", + "#f97316", + "#eab308", + "#84cc16", + "#22c55e", + "#14b8a6", + "#06b6d4", + "#0ea5e9", + "#3b82f6", + ]; + return colors[Math.abs(hash) % colors.length]; +} + +/** + * Gets initials from email + */ +function getInitials(email: string): string { + const name = email.split("@")[0]; + const parts = name.split(/[._-]/); + if (parts.length >= 2) { + return (parts[0][0] + parts[1][0]).toUpperCase(); + } + return name.slice(0, 2).toUpperCase(); +} + +export function SidebarUserProfile({ + user, + onLogout, + isCollapsed = false, +}: SidebarUserProfileProps) { + const t = useTranslations("sidebar"); + const bgColor = stringToColor(user.email); + const initials = getInitials(user.email); + const displayName = user.name || user.email.split("@")[0]; + + // Collapsed view - just show avatar with dropdown + if (isCollapsed) { + return ( +
+ + + + + + + + {displayName} + + + + +
+
+ {initials} +
+
+

{displayName}

+

{user.email}

+
+
+
+ + + + + + {t("logout")} + +
+
+
+ ); + } + + // Expanded view + return ( +
+ + + + + + + +
+
+ {initials} +
+
+

{displayName}

+

{user.email}

+
+
+
+ + + + + + {t("logout")} + +
+
+
+ ); +} diff --git a/surfsense_web/components/layout/ui/sidebar/index.ts b/surfsense_web/components/layout/ui/sidebar/index.ts new file mode 100644 index 000000000..d98b45ca5 --- /dev/null +++ b/surfsense_web/components/layout/ui/sidebar/index.ts @@ -0,0 +1,12 @@ +export { AllChatsSidebar } from "./AllChatsSidebar"; +export { AllNotesSidebar } from "./AllNotesSidebar"; +export { ChatListItem } from "./ChatListItem"; +export { MobileSidebar, MobileSidebarTrigger } from "./MobileSidebar"; +export { NavSection } from "./NavSection"; +export { NoteListItem } from "./NoteListItem"; +export { PageUsageDisplay } from "./PageUsageDisplay"; +export { Sidebar } from "./Sidebar"; +export { SidebarCollapseButton } from "./SidebarCollapseButton"; +export { SidebarHeader } from "./SidebarHeader"; +export { SidebarSection } from "./SidebarSection"; +export { SidebarUserProfile } from "./SidebarUserProfile"; From 65a10518d5ffc2e1d88088f926daf1cf55f5f219 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Thu, 8 Jan 2026 19:10:53 +0200 Subject: [PATCH 08/28] feat: add sidebar translations --- surfsense_web/messages/en.json | 10 +++++++++- surfsense_web/messages/zh.json | 10 +++++++++- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/surfsense_web/messages/en.json b/surfsense_web/messages/en.json index 6c64e62ba..b803d4b69 100644 --- a/surfsense_web/messages/en.json +++ b/surfsense_web/messages/en.json @@ -622,7 +622,15 @@ "chat_archived": "Chat archived", "chat_unarchived": "Chat restored", "no_archived_chats": "No archived chats", - "error_archiving_chat": "Failed to archive chat" + "error_archiving_chat": "Failed to archive chat", + "new_chat": "New chat", + "select_workspace": "Select Workspace", + "invite_members": "Invite members", + "workspace_settings": "Workspace settings", + "see_all_workspaces": "See all search spaces", + "expand_sidebar": "Expand sidebar", + "collapse_sidebar": "Collapse sidebar", + "logout": "Logout" }, "errors": { "something_went_wrong": "Something went wrong", diff --git a/surfsense_web/messages/zh.json b/surfsense_web/messages/zh.json index 67069cf55..fa690bf39 100644 --- a/surfsense_web/messages/zh.json +++ b/surfsense_web/messages/zh.json @@ -616,7 +616,15 @@ "more_options": "更多选项", "clear_search": "清除搜索", "view_all_notes": "查看所有笔记", - "add_note": "添加笔记" + "add_note": "添加笔记", + "new_chat": "新对话", + "select_workspace": "选择工作空间", + "invite_members": "邀请成员", + "workspace_settings": "工作空间设置", + "see_all_workspaces": "查看所有搜索空间", + "expand_sidebar": "展开侧边栏", + "collapse_sidebar": "收起侧边栏", + "logout": "退出登录" }, "errors": { "something_went_wrong": "出错了", From ab052bf1f20d56bb07780227b9ace42ea848b773 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Thu, 8 Jan 2026 19:11:07 +0200 Subject: [PATCH 09/28] refactor: use new layout in dashboard --- .../[search_space_id]/client-layout.tsx | 89 +++---------------- 1 file changed, 11 insertions(+), 78 deletions(-) diff --git a/surfsense_web/app/dashboard/[search_space_id]/client-layout.tsx b/surfsense_web/app/dashboard/[search_space_id]/client-layout.tsx index c78cc7762..7b1bb61b0 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/client-layout.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/client-layout.tsx @@ -5,7 +5,7 @@ import { Loader2 } from "lucide-react"; import { useParams, usePathname, useRouter } from "next/navigation"; import { useTranslations } from "next-intl"; import type React from "react"; -import { useCallback, useEffect, useMemo, useRef, useState } from "react"; +import { useCallback, useEffect, useRef, useState } from "react"; import { toast } from "sonner"; import { myAccessAtom } from "@/atoms/members/members-query.atoms"; import { updateLLMPreferencesMutationAtom } from "@/atoms/new-llm-config/new-llm-config-mutation.atoms"; @@ -17,22 +17,18 @@ import { activeSearchSpaceIdAtom } from "@/atoms/search-spaces/search-space-quer import { DocumentUploadDialogProvider } from "@/components/assistant-ui/document-upload-popup"; import { DashboardBreadcrumb } from "@/components/dashboard-breadcrumb"; import { LanguageSwitcher } from "@/components/LanguageSwitcher"; +import { LayoutDataProvider } from "@/components/layout"; import { OnboardingTour } from "@/components/onboarding-tour"; -import { AppSidebarProvider } from "@/components/sidebar/AppSidebarProvider"; import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"; -import { Separator } from "@/components/ui/separator"; -import { SidebarInset, SidebarProvider, SidebarTrigger } from "@/components/ui/sidebar"; export function DashboardClientLayout({ children, searchSpaceId, - navSecondary, - navMain, }: { children: React.ReactNode; searchSpaceId: string; - navSecondary: any[]; - navMain: any[]; + navSecondary?: any[]; + navMain?: any[]; }) { const t = useTranslations("dashboard"); const router = useRouter(); @@ -59,50 +55,15 @@ export function DashboardClientLayout({ const [isAutoConfiguring, setIsAutoConfiguring] = useState(false); const hasAttemptedAutoConfig = useRef(false); - // Skip onboarding check if we're already on the onboarding page const isOnboardingPage = pathname?.includes("/onboard"); - - // Only owners should see onboarding - invited members use existing config const isOwner = access?.is_owner ?? false; - // Translate navigation items - const tNavMenu = useTranslations("nav_menu"); - const translatedNavMain = useMemo(() => { - return navMain.map((item) => ({ - ...item, - title: tNavMenu(item.title.toLowerCase().replace(/ /g, "_")), - items: item.items?.map((subItem: any) => ({ - ...subItem, - title: tNavMenu(subItem.title.toLowerCase().replace(/ /g, "_")), - })), - })); - }, [navMain, tNavMenu]); - - const translatedNavSecondary = useMemo(() => { - return navSecondary.map((item) => ({ - ...item, - title: item.title === "All Search Spaces" ? tNavMenu("all_search_spaces") : item.title, - })); - }, [navSecondary, tNavMenu]); - - const [open, setOpen] = useState(() => { - try { - const match = document.cookie.match(/(?:^|; )sidebar_state=([^;]+)/); - if (match) return match[1] === "true"; - } catch { - // ignore - } - return true; - }); - useEffect(() => { - // Skip check if already on onboarding page if (isOnboardingPage) { setHasCheckedOnboarding(true); return; } - // Wait for all data to load if ( !loading && !accessLoading && @@ -112,19 +73,16 @@ export function DashboardClientLayout({ ) { const onboardingComplete = isOnboardingComplete(); - // If onboarding is complete, nothing to do if (onboardingComplete) { setHasCheckedOnboarding(true); return; } - // Only handle onboarding for owners if (!isOwner) { setHasCheckedOnboarding(true); return; } - // If global configs available, auto-configure without going to onboard page if (globalConfigs.length > 0 && !hasAttemptedAutoConfig.current) { hasAttemptedAutoConfig.current = true; setIsAutoConfiguring(true); @@ -149,7 +107,6 @@ export function DashboardClientLayout({ setHasCheckedOnboarding(true); } catch (error) { console.error("Auto-configuration failed:", error); - // Fall back to onboard page router.push(`/dashboard/${searchSpaceId}/onboard`); } finally { setIsAutoConfiguring(false); @@ -160,7 +117,6 @@ export function DashboardClientLayout({ return; } - // No global configs - redirect to onboard page router.push(`/dashboard/${searchSpaceId}/onboard`); setHasCheckedOnboarding(true); } @@ -180,7 +136,6 @@ export function DashboardClientLayout({ refetchPreferences, ]); - // Synchronize active search space and chat IDs with URL useEffect(() => { const activeSeacrhSpaceId = typeof search_space_id === "string" @@ -192,7 +147,6 @@ export function DashboardClientLayout({ setActiveSearchSpaceIdState(activeSeacrhSpaceId); }, [search_space_id, setActiveSearchSpaceIdState]); - // Show loading screen while checking onboarding status or auto-configuring if ( (!hasCheckedOnboarding && (loading || accessLoading || globalConfigsLoading) && @@ -220,7 +174,6 @@ export function DashboardClientLayout({ ); } - // Show error screen if there's an error loading preferences (but not on onboarding page) if (error && !hasCheckedOnboarding && !isOnboardingPage) { return (
@@ -244,33 +197,13 @@ export function DashboardClientLayout({ return ( - - {/* Use AppSidebarProvider which fetches user, search space, and recent chats */} - - -
-
-
-
- -
- - -
-
-
- -
-
-
-
{children}
-
-
-
+ } + languageSwitcher={} + > + {children} +
); } From 16c94d1c47f9a672c95a5cf62c39403cb4c845a3 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Thu, 8 Jan 2026 19:11:18 +0200 Subject: [PATCH 10/28] chore: remove old sidebar components --- .../components/sidebar/AppSidebarProvider.tsx | 383 -------------- .../components/sidebar/all-chats-sidebar.tsx | 443 ---------------- .../components/sidebar/all-notes-sidebar.tsx | 407 --------------- .../components/sidebar/app-sidebar.tsx | 473 ------------------ .../components/sidebar/nav-chats.tsx | 237 --------- surfsense_web/components/sidebar/nav-main.tsx | 207 -------- .../components/sidebar/nav-notes.tsx | 287 ----------- .../components/sidebar/nav-secondary.tsx | 59 --- .../components/sidebar/page-usage-display.tsx | 57 --- 9 files changed, 2553 deletions(-) delete mode 100644 surfsense_web/components/sidebar/AppSidebarProvider.tsx delete mode 100644 surfsense_web/components/sidebar/all-chats-sidebar.tsx delete mode 100644 surfsense_web/components/sidebar/all-notes-sidebar.tsx delete mode 100644 surfsense_web/components/sidebar/app-sidebar.tsx delete mode 100644 surfsense_web/components/sidebar/nav-chats.tsx delete mode 100644 surfsense_web/components/sidebar/nav-main.tsx delete mode 100644 surfsense_web/components/sidebar/nav-notes.tsx delete mode 100644 surfsense_web/components/sidebar/nav-secondary.tsx delete mode 100644 surfsense_web/components/sidebar/page-usage-display.tsx diff --git a/surfsense_web/components/sidebar/AppSidebarProvider.tsx b/surfsense_web/components/sidebar/AppSidebarProvider.tsx deleted file mode 100644 index f5146c427..000000000 --- a/surfsense_web/components/sidebar/AppSidebarProvider.tsx +++ /dev/null @@ -1,383 +0,0 @@ -"use client"; - -import { useQuery, useQueryClient } from "@tanstack/react-query"; -import { useAtomValue, useSetAtom } from "jotai"; -import { Trash2 } from "lucide-react"; -import { useParams, useRouter } from "next/navigation"; -import { useTranslations } from "next-intl"; -import { useCallback, useMemo, useState } from "react"; -import { hasUnsavedEditorChangesAtom, pendingEditorNavigationAtom } from "@/atoms/editor/ui.atoms"; -import { currentUserAtom } from "@/atoms/user/user-query.atoms"; -import { AppSidebar } from "@/components/sidebar/app-sidebar"; -import { Button } from "@/components/ui/button"; -import { - Dialog, - DialogContent, - DialogDescription, - DialogFooter, - DialogHeader, - DialogTitle, -} from "@/components/ui/dialog"; -import { notesApiService } from "@/lib/apis/notes-api.service"; -import { searchSpacesApiService } from "@/lib/apis/search-spaces-api.service"; -import { deleteThread, fetchThreads } from "@/lib/chat/thread-persistence"; -import { cacheKeys } from "@/lib/query-client/cache-keys"; - -interface AppSidebarProviderProps { - searchSpaceId: string; - navSecondary: { - title: string; - url: string; - icon: string; - }[]; - navMain: { - title: string; - url: string; - icon: string; - isActive?: boolean; - items?: { - title: string; - url: string; - }[]; - }[]; -} - -export function AppSidebarProvider({ - searchSpaceId, - navSecondary, - navMain, -}: AppSidebarProviderProps) { - const t = useTranslations("dashboard"); - const tCommon = useTranslations("common"); - const router = useRouter(); - const params = useParams(); - const queryClient = useQueryClient(); - - // Get current chat ID from URL params - const currentChatId = params?.chat_id - ? Number(Array.isArray(params.chat_id) ? params.chat_id[0] : params.chat_id) - : null; - const [isDeletingThread, setIsDeletingThread] = useState(false); - - // Editor state for handling unsaved changes - const hasUnsavedEditorChanges = useAtomValue(hasUnsavedEditorChangesAtom); - const setPendingNavigation = useSetAtom(pendingEditorNavigationAtom); - - // Fetch new chat threads - const { - data: threadsData, - error: threadError, - refetch: refetchThreads, - } = useQuery({ - queryKey: ["threads", searchSpaceId], - queryFn: () => fetchThreads(Number(searchSpaceId), 4), - enabled: !!searchSpaceId, - }); - - const { - data: searchSpace, - isLoading: isLoadingSearchSpace, - error: searchSpaceError, - } = useQuery({ - queryKey: cacheKeys.searchSpaces.detail(searchSpaceId), - queryFn: () => searchSpacesApiService.getSearchSpace({ id: Number(searchSpaceId) }), - enabled: !!searchSpaceId, - }); - - const { data: user } = useAtomValue(currentUserAtom); - - // Fetch notes - const { data: notesData, refetch: refetchNotes } = useQuery({ - queryKey: ["notes", searchSpaceId], - queryFn: () => - notesApiService.getNotes({ - search_space_id: Number(searchSpaceId), - page_size: 4, // Get 4 notes for compact sidebar - }), - enabled: !!searchSpaceId, - }); - - const [showDeleteDialog, setShowDeleteDialog] = useState(false); - const [threadToDelete, setThreadToDelete] = useState<{ id: number; name: string } | null>(null); - const [showDeleteNoteDialog, setShowDeleteNoteDialog] = useState(false); - const [noteToDelete, setNoteToDelete] = useState<{ - id: number; - name: string; - search_space_id: number; - } | null>(null); - const [isDeletingNote, setIsDeletingNote] = useState(false); - - // Transform threads to the format expected by AppSidebar - const recentChats = useMemo(() => { - if (!threadsData?.threads) return []; - - // Threads are already sorted by updated_at desc from the API - return threadsData.threads.map((thread) => ({ - name: thread.title || `Chat ${thread.id}`, - url: `/dashboard/${searchSpaceId}/new-chat/${thread.id}`, - icon: "MessageCircleMore", - id: thread.id, - search_space_id: Number(searchSpaceId), - actions: [ - { - name: "Delete", - icon: "Trash2", - onClick: () => { - setThreadToDelete({ - id: thread.id, - name: thread.title || `Chat ${thread.id}`, - }); - setShowDeleteDialog(true); - }, - }, - ], - })); - }, [threadsData, searchSpaceId]); - - // Handle delete thread - const handleDeleteThread = useCallback(async () => { - if (!threadToDelete) return; - - setIsDeletingThread(true); - try { - await deleteThread(threadToDelete.id); - // Invalidate threads query to refresh the list - queryClient.invalidateQueries({ queryKey: ["threads", searchSpaceId] }); - // Only navigate to new-chat if the deleted chat is currently open - if (currentChatId === threadToDelete.id) { - router.push(`/dashboard/${searchSpaceId}/new-chat`); - } - } catch (error) { - console.error("Error deleting thread:", error); - } finally { - setIsDeletingThread(false); - setShowDeleteDialog(false); - setThreadToDelete(null); - } - }, [threadToDelete, queryClient, searchSpaceId, router, currentChatId]); - - // Handle delete note with confirmation - const handleDeleteNote = useCallback(async () => { - if (!noteToDelete) return; - - setIsDeletingNote(true); - try { - await notesApiService.deleteNote({ - search_space_id: noteToDelete.search_space_id, - note_id: noteToDelete.id, - }); - refetchNotes(); - } catch (error) { - console.error("Error deleting note:", error); - } finally { - setIsDeletingNote(false); - setShowDeleteNoteDialog(false); - setNoteToDelete(null); - } - }, [noteToDelete, refetchNotes]); - - // Memoized fallback chats - const fallbackChats = useMemo(() => { - if (threadError) { - return [ - { - name: t("error_loading_chats"), - url: "#", - icon: "AlertCircle", - id: 0, - search_space_id: Number(searchSpaceId), - actions: [ - { - name: tCommon("retry"), - icon: "RefreshCw", - onClick: () => refetchThreads(), - }, - ], - }, - ]; - } - - return []; - }, [threadError, searchSpaceId, refetchThreads, t, tCommon]); - - // Use fallback chats if there's an error or no chats - const displayChats = recentChats.length > 0 ? recentChats : fallbackChats; - - // Transform notes to the format expected by NavNotes - const recentNotes = useMemo(() => { - if (!notesData?.items) return []; - - // Sort notes by updated_at (most recent first), fallback to created_at if updated_at is null - const sortedNotes = [...notesData.items].sort((a, b) => { - const dateA = a.updated_at - ? new Date(a.updated_at).getTime() - : new Date(a.created_at).getTime(); - const dateB = b.updated_at - ? new Date(b.updated_at).getTime() - : new Date(b.created_at).getTime(); - return dateB - dateA; // Descending order (most recent first) - }); - - // Limit to 4 notes for compact sidebar - return sortedNotes.slice(0, 4).map((note) => ({ - name: note.title, - url: `/dashboard/${note.search_space_id}/editor/${note.id}`, - icon: "FileText", - id: note.id, - search_space_id: note.search_space_id, - actions: [ - { - name: "Delete", - icon: "Trash2", - onClick: () => { - setNoteToDelete({ - id: note.id, - name: note.title, - search_space_id: note.search_space_id, - }); - setShowDeleteNoteDialog(true); - }, - }, - ], - })); - }, [notesData]); - - // Handle add note - check for unsaved changes first - const handleAddNote = useCallback(() => { - const newNoteUrl = `/dashboard/${searchSpaceId}/editor/new`; - - if (hasUnsavedEditorChanges) { - // Set pending navigation - the editor will show the unsaved changes dialog - setPendingNavigation(newNoteUrl); - } else { - // No unsaved changes, navigate directly - router.push(newNoteUrl); - } - }, [router, searchSpaceId, hasUnsavedEditorChanges, setPendingNavigation]); - - // Memoized updated navSecondary - const updatedNavSecondary = useMemo(() => { - const updated = [...navSecondary]; - if (updated.length > 0) { - updated[0] = { - ...updated[0], - title: - searchSpace?.name || - (isLoadingSearchSpace - ? tCommon("loading") - : searchSpaceError - ? t("error_loading_space") - : t("unknown_search_space")), - }; - } - return updated; - }, [navSecondary, searchSpace?.name, isLoadingSearchSpace, searchSpaceError, t, tCommon]); - - // Prepare page usage data - const pageUsage = user - ? { - pagesUsed: user.pages_used, - pagesLimit: user.pages_limit, - } - : undefined; - - return ( - <> - - - {/* Delete Confirmation Dialog */} - - - - - - {t("delete_chat")} - - - {t("delete_chat_confirm")} {threadToDelete?.name} - ? {t("action_cannot_undone")} - - - - - - - - - - {/* Delete Note Confirmation Dialog */} - - - - - - {t("delete_note")} - - - {t("delete_note_confirm")} {noteToDelete?.name}?{" "} - {t("action_cannot_undone")} - - - - - - - - - - ); -} diff --git a/surfsense_web/components/sidebar/all-chats-sidebar.tsx b/surfsense_web/components/sidebar/all-chats-sidebar.tsx deleted file mode 100644 index 02459f2b9..000000000 --- a/surfsense_web/components/sidebar/all-chats-sidebar.tsx +++ /dev/null @@ -1,443 +0,0 @@ -"use client"; - -import { useQuery, useQueryClient } from "@tanstack/react-query"; -import { format } from "date-fns"; -import { - ArchiveIcon, - Loader2, - MessageCircleMore, - MoreHorizontal, - RotateCcwIcon, - Search, - Trash2, - X, -} from "lucide-react"; -import { AnimatePresence, motion } from "motion/react"; -import { useParams, useRouter } from "next/navigation"; -import { useTranslations } from "next-intl"; -import { useCallback, useEffect, useState } from "react"; -import { createPortal } from "react-dom"; -import { toast } from "sonner"; -import { Button } from "@/components/ui/button"; -import { - DropdownMenu, - DropdownMenuContent, - DropdownMenuItem, - DropdownMenuSeparator, - DropdownMenuTrigger, -} from "@/components/ui/dropdown-menu"; -import { Input } from "@/components/ui/input"; -import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; -import { useDebouncedValue } from "@/hooks/use-debounced-value"; -import { - deleteThread, - fetchThreads, - searchThreads, - type ThreadListItem, - updateThread, -} from "@/lib/chat/thread-persistence"; -import { cn } from "@/lib/utils"; - -interface AllChatsSidebarProps { - open: boolean; - onOpenChange: (open: boolean) => void; - searchSpaceId: string; - onCloseMobileSidebar?: () => void; -} - -export function AllChatsSidebar({ - open, - onOpenChange, - searchSpaceId, - onCloseMobileSidebar, -}: AllChatsSidebarProps) { - const t = useTranslations("sidebar"); - const router = useRouter(); - const params = useParams(); - const queryClient = useQueryClient(); - - // Get the current chat ID from URL to check if user is deleting the currently open chat - const currentChatId = Array.isArray(params.chat_id) - ? Number(params.chat_id[0]) - : params.chat_id - ? Number(params.chat_id) - : null; - const [deletingThreadId, setDeletingThreadId] = useState(null); - const [archivingThreadId, setArchivingThreadId] = useState(null); - const [searchQuery, setSearchQuery] = useState(""); - const [showArchived, setShowArchived] = useState(false); - const [mounted, setMounted] = useState(false); - const [openDropdownId, setOpenDropdownId] = useState(null); - const debouncedSearchQuery = useDebouncedValue(searchQuery, 300); - - const isSearchMode = !!debouncedSearchQuery.trim(); - - // Handle mounting for portal - useEffect(() => { - setMounted(true); - }, []); - - // Handle escape key - useEffect(() => { - const handleEscape = (e: KeyboardEvent) => { - if (e.key === "Escape" && open) { - onOpenChange(false); - } - }; - document.addEventListener("keydown", handleEscape); - return () => document.removeEventListener("keydown", handleEscape); - }, [open, onOpenChange]); - - // Lock body scroll when open - useEffect(() => { - if (open) { - document.body.style.overflow = "hidden"; - } else { - document.body.style.overflow = ""; - } - return () => { - document.body.style.overflow = ""; - }; - }, [open]); - - // Fetch all threads (when not searching) - const { - data: threadsData, - error: threadsError, - isLoading: isLoadingThreads, - } = useQuery({ - queryKey: ["all-threads", searchSpaceId], - queryFn: () => fetchThreads(Number(searchSpaceId)), - enabled: !!searchSpaceId && open && !isSearchMode, - }); - - // Search threads (when searching) - const { - data: searchData, - error: searchError, - isLoading: isLoadingSearch, - } = useQuery({ - queryKey: ["search-threads", searchSpaceId, debouncedSearchQuery], - queryFn: () => searchThreads(Number(searchSpaceId), debouncedSearchQuery.trim()), - enabled: !!searchSpaceId && open && isSearchMode, - }); - - // Handle thread navigation - const handleThreadClick = useCallback( - (threadId: number) => { - router.push(`/dashboard/${searchSpaceId}/new-chat/${threadId}`); - onOpenChange(false); - // Also close the main sidebar on mobile - onCloseMobileSidebar?.(); - }, - [router, onOpenChange, searchSpaceId, onCloseMobileSidebar] - ); - - // Handle thread deletion - const handleDeleteThread = useCallback( - async (threadId: number) => { - setDeletingThreadId(threadId); - try { - await deleteThread(threadId); - toast.success(t("chat_deleted") || "Chat deleted successfully"); - queryClient.invalidateQueries({ queryKey: ["all-threads", searchSpaceId] }); - queryClient.invalidateQueries({ queryKey: ["search-threads", searchSpaceId] }); - queryClient.invalidateQueries({ queryKey: ["threads", searchSpaceId] }); - - // If the deleted chat is currently open, close sidebar first then redirect - if (currentChatId === threadId) { - onOpenChange(false); - // Wait for sidebar close animation to complete before navigating - setTimeout(() => { - router.push(`/dashboard/${searchSpaceId}/new-chat`); - }, 250); - } - } catch (error) { - console.error("Error deleting thread:", error); - toast.error(t("error_deleting_chat") || "Failed to delete chat"); - } finally { - setDeletingThreadId(null); - } - }, - [queryClient, searchSpaceId, t, currentChatId, router, onOpenChange] - ); - - // Handle thread archive/unarchive - const handleToggleArchive = useCallback( - async (threadId: number, currentlyArchived: boolean) => { - setArchivingThreadId(threadId); - try { - await updateThread(threadId, { archived: !currentlyArchived }); - toast.success( - currentlyArchived - ? t("chat_unarchived") || "Chat restored" - : t("chat_archived") || "Chat archived" - ); - queryClient.invalidateQueries({ queryKey: ["all-threads", searchSpaceId] }); - queryClient.invalidateQueries({ queryKey: ["search-threads", searchSpaceId] }); - queryClient.invalidateQueries({ queryKey: ["threads", searchSpaceId] }); - } catch (error) { - console.error("Error archiving thread:", error); - toast.error(t("error_archiving_chat") || "Failed to archive chat"); - } finally { - setArchivingThreadId(null); - } - }, - [queryClient, searchSpaceId, t] - ); - - // Clear search - const handleClearSearch = useCallback(() => { - setSearchQuery(""); - }, []); - - // Determine which data source to use - let threads: ThreadListItem[] = []; - if (isSearchMode) { - threads = searchData ?? []; - } else if (threadsData) { - threads = showArchived ? threadsData.archived_threads : threadsData.threads; - } - - const isLoading = isSearchMode ? isLoadingSearch : isLoadingThreads; - const error = isSearchMode ? searchError : threadsError; - - // Get counts for tabs - const activeCount = threadsData?.threads.length ?? 0; - const archivedCount = threadsData?.archived_threads.length ?? 0; - - if (!mounted) return null; - - return createPortal( - - {open && ( - <> - {/* Backdrop */} - onOpenChange(false)} - aria-hidden="true" - /> - - {/* Panel */} - - {/* Header */} -
-
-

{t("all_chats") || "All Chats"}

- -
- - {/* Search Input */} -
- - setSearchQuery(e.target.value)} - className="pl-9 pr-8 h-9" - /> - {searchQuery && ( - - )} -
-
- - {/* Tab toggle for active/archived (only show when not searching) */} - {!isSearchMode && ( -
- - -
- )} - - {/* Scrollable Content */} -
- {isLoading ? ( -
- -
- ) : error ? ( -
- {t("error_loading_chats") || "Error loading chats"} -
- ) : threads.length > 0 ? ( -
- {threads.map((thread) => { - const isDeleting = deletingThreadId === thread.id; - const isArchiving = archivingThreadId === thread.id; - const isBusy = isDeleting || isArchiving; - const isActive = currentChatId === thread.id; - - return ( -
- {/* Main clickable area for navigation */} - - - - - -

- {t("updated") || "Updated"}:{" "} - {format(new Date(thread.updatedAt), "MMM d, yyyy 'at' h:mm a")} -

-
-
- - {/* Actions dropdown */} - setOpenDropdownId(isOpen ? thread.id : null)} - > - - - - - handleToggleArchive(thread.id, thread.archived)} - disabled={isArchiving} - > - {thread.archived ? ( - <> - - {t("unarchive") || "Restore"} - - ) : ( - <> - - {t("archive") || "Archive"} - - )} - - - handleDeleteThread(thread.id)} - className="text-destructive focus:text-destructive" - > - - {t("delete") || "Delete"} - - - -
- ); - })} -
- ) : isSearchMode ? ( -
- -

- {t("no_chats_found") || "No chats found"} -

-

- {t("try_different_search") || "Try a different search term"} -

-
- ) : ( -
- -

- {showArchived - ? t("no_archived_chats") || "No archived chats" - : t("no_chats") || "No chats yet"} -

- {!showArchived && ( -

- {t("start_new_chat_hint") || "Start a new chat from the chat page"} -

- )} -
- )} -
-
- - )} -
, - document.body - ); -} diff --git a/surfsense_web/components/sidebar/all-notes-sidebar.tsx b/surfsense_web/components/sidebar/all-notes-sidebar.tsx deleted file mode 100644 index 67d1b4ba6..000000000 --- a/surfsense_web/components/sidebar/all-notes-sidebar.tsx +++ /dev/null @@ -1,407 +0,0 @@ -"use client"; - -import { useQuery, useQueryClient } from "@tanstack/react-query"; -import { format } from "date-fns"; -import { FileText, Loader2, MoreHorizontal, Plus, Search, Trash2, X } from "lucide-react"; -import { AnimatePresence, motion } from "motion/react"; -import { useParams, useRouter } from "next/navigation"; -import { useTranslations } from "next-intl"; -import { useCallback, useEffect, useMemo, useState } from "react"; -import { createPortal } from "react-dom"; -import { Button } from "@/components/ui/button"; -import { - DropdownMenu, - DropdownMenuContent, - DropdownMenuItem, - DropdownMenuTrigger, -} from "@/components/ui/dropdown-menu"; -import { Input } from "@/components/ui/input"; -import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; -import { useDebouncedValue } from "@/hooks/use-debounced-value"; -import { documentsApiService } from "@/lib/apis/documents-api.service"; -import { notesApiService } from "@/lib/apis/notes-api.service"; -import { cn } from "@/lib/utils"; - -interface AllNotesSidebarProps { - open: boolean; - onOpenChange: (open: boolean) => void; - searchSpaceId: string; - onAddNote?: () => void; - onCloseMobileSidebar?: () => void; -} - -export function AllNotesSidebar({ - open, - onOpenChange, - searchSpaceId, - onAddNote, - onCloseMobileSidebar, -}: AllNotesSidebarProps) { - const t = useTranslations("sidebar"); - const router = useRouter(); - const params = useParams(); - const queryClient = useQueryClient(); - - // Get the current note ID from URL to highlight the open note - const currentNoteId = params.note_id ? Number(params.note_id) : null; - const [deletingNoteId, setDeletingNoteId] = useState(null); - const [searchQuery, setSearchQuery] = useState(""); - const [mounted, setMounted] = useState(false); - const [openDropdownId, setOpenDropdownId] = useState(null); - const debouncedSearchQuery = useDebouncedValue(searchQuery, 300); - - // Handle mounting for portal - useEffect(() => { - setMounted(true); - }, []); - - // Handle escape key - useEffect(() => { - const handleEscape = (e: KeyboardEvent) => { - if (e.key === "Escape" && open) { - onOpenChange(false); - } - }; - document.addEventListener("keydown", handleEscape); - return () => document.removeEventListener("keydown", handleEscape); - }, [open, onOpenChange]); - - // Lock body scroll when open - useEffect(() => { - if (open) { - document.body.style.overflow = "hidden"; - } else { - document.body.style.overflow = ""; - } - return () => { - document.body.style.overflow = ""; - }; - }, [open]); - - // Fetch all notes (when no search query) - const { - data: notesData, - error: notesError, - isLoading: isLoadingNotes, - } = useQuery({ - queryKey: ["all-notes", searchSpaceId], - queryFn: () => - notesApiService.getNotes({ - search_space_id: Number(searchSpaceId), - page_size: 1000, - }), - enabled: !!searchSpaceId && open && !debouncedSearchQuery, - }); - - // Search notes (when there's a search query) - const { - data: searchData, - error: searchError, - isLoading: isSearching, - } = useQuery({ - queryKey: ["search-notes", searchSpaceId, debouncedSearchQuery], - queryFn: () => - documentsApiService.searchDocuments({ - queryParams: { - search_space_id: Number(searchSpaceId), - document_types: ["NOTE"], - title: debouncedSearchQuery, - page_size: 100, - }, - }), - enabled: !!searchSpaceId && open && !!debouncedSearchQuery, - }); - - // Handle note navigation - const handleNoteClick = useCallback( - (noteId: number, noteSearchSpaceId: number) => { - router.push(`/dashboard/${noteSearchSpaceId}/editor/${noteId}`); - onOpenChange(false); - // Also close the main sidebar on mobile - onCloseMobileSidebar?.(); - }, - [router, onOpenChange, onCloseMobileSidebar] - ); - - // Handle note deletion - const handleDeleteNote = useCallback( - async (noteId: number, noteSearchSpaceId: number) => { - setDeletingNoteId(noteId); - try { - await notesApiService.deleteNote({ - search_space_id: noteSearchSpaceId, - note_id: noteId, - }); - queryClient.invalidateQueries({ queryKey: ["all-notes", searchSpaceId] }); - queryClient.invalidateQueries({ queryKey: ["notes", searchSpaceId] }); - queryClient.invalidateQueries({ queryKey: ["search-notes", searchSpaceId] }); - } catch (error) { - console.error("Error deleting note:", error); - } finally { - setDeletingNoteId(null); - } - }, - [queryClient, searchSpaceId] - ); - - // Clear search - const handleClearSearch = useCallback(() => { - setSearchQuery(""); - }, []); - - // Determine which data to show - const isSearchMode = !!debouncedSearchQuery; - const isLoading = isSearchMode ? isSearching : isLoadingNotes; - const error = isSearchMode ? searchError : notesError; - - // Transform and sort notes data - handle both regular notes and search results - const notes = useMemo(() => { - let notesList: { - id: number; - title: string; - search_space_id: number; - created_at: string; - updated_at?: string | null; - }[]; - - if (isSearchMode && searchData?.items) { - notesList = searchData.items.map((doc) => ({ - id: doc.id, - title: doc.title, - search_space_id: doc.search_space_id, - created_at: doc.created_at, - updated_at: doc.updated_at, - })); - } else { - notesList = notesData?.items ?? []; - } - - // Sort notes by updated_at (most recent first), fallback to created_at - return [...notesList].sort((a, b) => { - const dateA = a.updated_at - ? new Date(a.updated_at).getTime() - : new Date(a.created_at).getTime(); - const dateB = b.updated_at - ? new Date(b.updated_at).getTime() - : new Date(b.created_at).getTime(); - return dateB - dateA; // Descending order (most recent first) - }); - }, [isSearchMode, searchData, notesData]); - - if (!mounted) return null; - - return createPortal( - - {open && ( - <> - {/* Backdrop */} - onOpenChange(false)} - aria-hidden="true" - /> - - {/* Panel */} - - {/* Header */} -
-
-

{t("all_notes") || "All Notes"}

- -
- - {/* Search Input */} -
- - setSearchQuery(e.target.value)} - className="pl-9 pr-8 h-9" - /> - {searchQuery && ( - - )} -
-
- - {/* Scrollable Content */} -
- {isLoading ? ( -
- -
- ) : error ? ( -
- {t("error_loading_notes") || "Error loading notes"} -
- ) : notes.length > 0 ? ( -
- {notes.map((note) => { - const isDeleting = deletingNoteId === note.id; - const isActive = currentNoteId === note.id; - - return ( -
- {/* Main clickable area for navigation */} - - - - - -
-

- {t("created") || "Created"}:{" "} - {format(new Date(note.created_at), "MMM d, yyyy 'at' h:mm a")} -

- {note.updated_at && ( -

- {t("updated") || "Updated"}:{" "} - {format(new Date(note.updated_at), "MMM d, yyyy 'at' h:mm a")} -

- )} -
-
-
- - {/* Actions dropdown - separate from main click area */} - setOpenDropdownId(isOpen ? note.id : null)} - > - - - - - handleDeleteNote(note.id, note.search_space_id)} - className="text-destructive focus:text-destructive" - > - - {t("delete") || "Delete"} - - - -
- ); - })} -
- ) : isSearchMode ? ( -
- -

- {t("no_results_found") || "No notes found"} -

-

- {t("try_different_search") || "Try a different search term"} -

-
- ) : ( -
- -

- {t("no_notes") || "No notes yet"} -

- {onAddNote && ( - - )} -
- )} -
- - {/* Footer with Add Note button */} - {onAddNote && notes.length > 0 && ( -
- -
- )} -
- - )} -
, - document.body - ); -} diff --git a/surfsense_web/components/sidebar/app-sidebar.tsx b/surfsense_web/components/sidebar/app-sidebar.tsx deleted file mode 100644 index 8030cb9d2..000000000 --- a/surfsense_web/components/sidebar/app-sidebar.tsx +++ /dev/null @@ -1,473 +0,0 @@ -"use client"; - -import { useAtomValue } from "jotai"; -import { - AlertCircle, - ArrowLeftRight, - BookOpen, - Cable, - ChevronsUpDown, - Database, - ExternalLink, - FileStack, - FileText, - Info, - LogOut, - Logs, - type LucideIcon, - MessageCircle, - MessageCircleMore, - MoonIcon, - Podcast, - RefreshCw, - Settings2, - SquareLibrary, - SquareTerminal, - SunIcon, - Trash2, - Undo2, - UserPlus, - Users, -} from "lucide-react"; -import { useRouter } from "next/navigation"; -import { useTheme } from "next-themes"; -import { memo, useEffect, useMemo, useState } from "react"; -import { currentUserAtom } from "@/atoms/user/user-query.atoms"; -import { - DropdownMenu, - DropdownMenuContent, - DropdownMenuGroup, - DropdownMenuItem, - DropdownMenuLabel, - DropdownMenuSeparator, - DropdownMenuTrigger, -} from "@/components/ui/dropdown-menu"; -import { resetUser, trackLogout } from "@/lib/posthog/events"; - -/** - * Generates a consistent color based on a string (email) - */ -function stringToColor(str: string): string { - let hash = 0; - for (let i = 0; i < str.length; i++) { - hash = str.charCodeAt(i) + ((hash << 5) - hash); - } - const colors = [ - "#6366f1", // indigo - "#8b5cf6", // violet - "#a855f7", // purple - "#d946ef", // fuchsia - "#ec4899", // pink - "#f43f5e", // rose - "#ef4444", // red - "#f97316", // orange - "#eab308", // yellow - "#84cc16", // lime - "#22c55e", // green - "#14b8a6", // teal - "#06b6d4", // cyan - "#0ea5e9", // sky - "#3b82f6", // blue - ]; - return colors[Math.abs(hash) % colors.length]; -} - -/** - * Gets initials from an email address - */ -function getInitials(email: string): string { - const name = email.split("@")[0]; - const parts = name.split(/[._-]/); - if (parts.length >= 2) { - return (parts[0][0] + parts[1][0]).toUpperCase(); - } - return name.slice(0, 2).toUpperCase(); -} - -/** - * Dynamic avatar component that generates an SVG based on email - */ -function UserAvatar({ email, size = 32 }: { email: string; size?: number }) { - const bgColor = stringToColor(email); - const initials = getInitials(email); - - return ( - - Avatar for {email} - - - {initials} - - - ); -} - -import { NavChats } from "@/components/sidebar/nav-chats"; -import { NavMain } from "@/components/sidebar/nav-main"; -import { NavNotes } from "@/components/sidebar/nav-notes"; -import { NavSecondary } from "@/components/sidebar/nav-secondary"; -import { PageUsageDisplay } from "@/components/sidebar/page-usage-display"; -import { - Sidebar, - SidebarContent, - SidebarFooter, - SidebarHeader, - SidebarMenu, - SidebarMenuButton, - SidebarMenuItem, -} from "@/components/ui/sidebar"; - -// Map of icon names to their components -export const iconMap: Record = { - BookOpen, - Cable, - Database, - FileStack, - Undo2, - MessageCircleMore, - Settings2, - SquareLibrary, - FileText, - SquareTerminal, - AlertCircle, - Info, - ExternalLink, - Trash2, - Podcast, - Users, - RefreshCw, - MessageCircle, - Logs, -}; - -const defaultData = { - user: { - name: "Surf", - email: "m@example.com", - avatar: "/icon-128.png", - }, - navMain: [ - { - title: "Chat", - url: "#", - icon: "SquareTerminal", - isActive: true, - items: [], - }, - { - title: "Sources", - url: "#", - icon: "Database", - items: [ - { - title: "Manage Documents", - url: "#", - }, - { - title: "Manage Connectors", - url: "#", - }, - ], - }, - ], - navSecondary: [ - { - title: "SEARCH SPACE", - url: "#", - icon: "LifeBuoy", - }, - ], - RecentChats: [ - { - name: "Design Engineering", - url: "#", - icon: "MessageCircleMore", - id: 1001, - }, - { - name: "Sales & Marketing", - url: "#", - icon: "MessageCircleMore", - id: 1002, - }, - { - name: "Travel", - url: "#", - icon: "MessageCircleMore", - id: 1003, - }, - ], - RecentNotes: [ - { - name: "Meeting Notes", - url: "#", - icon: "FileText", - id: 2001, - }, - { - name: "Project Ideas", - url: "#", - icon: "FileText", - id: 2002, - }, - ], -}; - -interface AppSidebarProps extends React.ComponentProps { - searchSpaceId?: string; - navMain?: { - title: string; - url: string; - icon: string; - isActive?: boolean; - items?: { - title: string; - url: string; - }[]; - }[]; - navSecondary?: { - title: string; - url: string; - icon: string; - }[]; - RecentChats?: { - name: string; - url: string; - icon: string; - id?: number; - search_space_id?: number; - actions?: { - name: string; - icon: string; - onClick: () => void; - }[]; - }[]; - RecentNotes?: { - name: string; - url: string; - icon: string; - id?: number; - search_space_id?: number; - actions?: { - name: string; - icon: string; - onClick: () => void; - }[]; - }[]; - user?: { - name: string; - email: string; - avatar: string; - }; - pageUsage?: { - pagesUsed: number; - pagesLimit: number; - }; - onAddNote?: () => void; -} - -// Memoized AppSidebar component for better performance -export const AppSidebar = memo(function AppSidebar({ - searchSpaceId, - navMain = defaultData.navMain, - navSecondary = defaultData.navSecondary, - RecentChats = defaultData.RecentChats, - RecentNotes = defaultData.RecentNotes, - pageUsage, - onAddNote, - ...props -}: AppSidebarProps) { - const router = useRouter(); - const { theme, setTheme } = useTheme(); - const { data: user, isPending: isLoadingUser } = useAtomValue(currentUserAtom); - const [isClient, setIsClient] = useState(false); - - useEffect(() => { - setIsClient(true); - }, []); - - // Process navMain to resolve icon names to components - const processedNavMain = useMemo(() => { - return navMain.map((item) => ({ - ...item, - icon: iconMap[item.icon] || SquareTerminal, - })); - }, [navMain]); - - // Process navSecondary to resolve icon names to components - const processedNavSecondary = useMemo(() => { - return navSecondary.map((item) => ({ - ...item, - icon: iconMap[item.icon] || Undo2, - })); - }, [navSecondary]); - - // Process RecentChats to resolve icon names to components - const processedRecentChats = useMemo(() => { - return ( - RecentChats?.map((item) => ({ - ...item, - icon: iconMap[item.icon] || MessageCircleMore, - })) || [] - ); - }, [RecentChats]); - - // Process RecentNotes to resolve icon names to components - const processedRecentNotes = useMemo(() => { - return ( - RecentNotes?.map((item) => ({ - ...item, - icon: iconMap[item.icon] || FileText, - })) || [] - ); - }, [RecentNotes]); - - // Get user display name from email - const userDisplayName = user?.email ? user.email.split("@")[0] : "User"; - const userEmail = user?.email || (isLoadingUser ? "Loading..." : "Unknown"); - - const handleLogout = () => { - try { - // Track logout event and reset PostHog identity - trackLogout(); - resetUser(); - - if (typeof window !== "undefined") { - localStorage.removeItem("surfsense_bearer_token"); - router.push("/"); - } - } catch (error) { - console.error("Error during logout:", error); - router.push("/"); - } - }; - - return ( - - - - - - - -
- {user?.email ? ( - - ) : ( -
- )} -
-
- {userDisplayName} - {userEmail} -
- - - - - -
-
- {user?.email ? ( - - ) : ( -
- )} -
-
- {userDisplayName} - {userEmail} -
-
- - - - {searchSpaceId && ( - <> - router.push(`/dashboard/${searchSpaceId}/settings`)} - > - - Settings - - router.push(`/dashboard/${searchSpaceId}/team`)} - > - - Invite members - - - )} - router.push("/dashboard")}> - - Switch workspace - - - - - {isClient && ( - setTheme(theme === "dark" ? "light" : "dark")}> - {theme === "dark" ? ( - - ) : ( - - )} - {theme === "dark" ? "Light mode" : "Dark mode"} - - )} - - - - - Logout - - - - - - - - - - - - - - - - {pageUsage && ( - - )} - - - - ); -}); diff --git a/surfsense_web/components/sidebar/nav-chats.tsx b/surfsense_web/components/sidebar/nav-chats.tsx deleted file mode 100644 index ba0004fc8..000000000 --- a/surfsense_web/components/sidebar/nav-chats.tsx +++ /dev/null @@ -1,237 +0,0 @@ -"use client"; - -import { - ChevronRight, - FolderOpen, - Loader2, - type LucideIcon, - MessageCircleMore, - MoreHorizontal, - RefreshCw, - Trash2, -} from "lucide-react"; -import { usePathname, useRouter } from "next/navigation"; -import { useTranslations } from "next-intl"; -import { useCallback, useState } from "react"; -import { Button } from "@/components/ui/button"; -import { Collapsible, CollapsibleContent, CollapsibleTrigger } from "@/components/ui/collapsible"; -import { - DropdownMenu, - DropdownMenuContent, - DropdownMenuItem, - DropdownMenuTrigger, -} from "@/components/ui/dropdown-menu"; -import { - SidebarGroup, - SidebarGroupContent, - SidebarGroupLabel, - SidebarMenu, - SidebarMenuButton, - SidebarMenuItem, - useSidebar, -} from "@/components/ui/sidebar"; -import { cn } from "@/lib/utils"; -import { AllChatsSidebar } from "./all-chats-sidebar"; - -interface ChatAction { - name: string; - icon: string; - onClick: () => void; -} - -interface ChatItem { - name: string; - url: string; - icon: LucideIcon; - id?: number; - search_space_id?: number; - actions?: ChatAction[]; -} - -interface NavChatsProps { - chats: ChatItem[]; - defaultOpen?: boolean; - searchSpaceId?: string; -} - -// Map of icon names to their components -const actionIconMap: Record = { - MessageCircleMore, - Trash2, - MoreHorizontal, - RefreshCw, -}; - -export function NavChats({ chats, defaultOpen = true, searchSpaceId }: NavChatsProps) { - const t = useTranslations("sidebar"); - const router = useRouter(); - const pathname = usePathname(); - const { setOpenMobile } = useSidebar(); - const [isDeleting, setIsDeleting] = useState(null); - const [isOpen, setIsOpen] = useState(defaultOpen); - const [isAllChatsSidebarOpen, setIsAllChatsSidebarOpen] = useState(false); - - // Handle chat deletion with loading state - const handleDeleteChat = useCallback(async (chatId: number, deleteAction: () => void) => { - setIsDeleting(chatId); - try { - await deleteAction(); - } finally { - setIsDeleting(null); - } - }, []); - - // Handle chat navigation - const handleChatClick = useCallback( - (url: string) => { - router.push(url); - }, - [router] - ); - - return ( - - -
- - - - {t("recent_chats") || "Recent Chats"} - - - - {/* Action buttons - always visible on hover */} -
- {searchSpaceId && chats.length > 0 && ( - - )} -
-
- - - {chats.length > 0 ? ( - - - {chats.map((chat) => { - const isDeletingChat = isDeleting === chat.id; - const isActive = pathname === chat.url; - - return ( - - {/* Main navigation button */} - handleChatClick(chat.url)} - disabled={isDeletingChat} - className={cn( - "pr-8", // Make room for the action button - isActive && "bg-sidebar-accent text-sidebar-accent-foreground", - isDeletingChat && "opacity-50" - )} - > - - {chat.name} - - - {/* Actions dropdown - positioned absolutely */} - {chat.actions && chat.actions.length > 0 && ( -
- - - - - - {chat.actions.map((action, actionIndex) => { - const ActionIcon = actionIconMap[action.icon] || MessageCircleMore; - const isDeleteAction = action.name.toLowerCase().includes("delete"); - - return ( - { - if (isDeleteAction) { - handleDeleteChat(chat.id || 0, action.onClick); - } else { - action.onClick(); - } - }} - disabled={isDeletingChat} - className={ - isDeleteAction - ? "text-destructive focus:text-destructive" - : "" - } - > - - - {isDeletingChat && isDeleteAction - ? t("deleting") || "Deleting..." - : action.name} - - - ); - })} - - -
- )} -
- ); - })} -
-
- ) : ( -
- - {t("no_recent_chats") || "No recent chats"} -
- )} -
-
- - {/* All Chats Sheet */} - {searchSpaceId && ( - setOpenMobile(false)} - /> - )} -
- ); -} diff --git a/surfsense_web/components/sidebar/nav-main.tsx b/surfsense_web/components/sidebar/nav-main.tsx deleted file mode 100644 index a0dbe912f..000000000 --- a/surfsense_web/components/sidebar/nav-main.tsx +++ /dev/null @@ -1,207 +0,0 @@ -"use client"; - -import { ChevronRight, type LucideIcon } from "lucide-react"; -import { usePathname } from "next/navigation"; -import { useTranslations } from "next-intl"; -import { useCallback, useMemo, useState } from "react"; - -import { Collapsible, CollapsibleContent, CollapsibleTrigger } from "@/components/ui/collapsible"; -import { - SidebarGroup, - SidebarGroupLabel, - SidebarMenu, - SidebarMenuAction, - SidebarMenuButton, - SidebarMenuItem, - SidebarMenuSub, - SidebarMenuSubButton, - SidebarMenuSubItem, -} from "@/components/ui/sidebar"; - -interface NavItem { - title: string; - url: string; - icon: LucideIcon; - isActive?: boolean; - items?: { - title: string; - url: string; - }[]; -} - -interface NavMainProps { - items: NavItem[]; -} - -export function NavMain({ items }: NavMainProps) { - const t = useTranslations("nav_menu"); - const pathname = usePathname(); - - // Translation function that handles both exact matches and fallback to original - const translateTitle = (title: string): string => { - const titleMap: Record = { - Researcher: "researcher", - "Manage LLMs": "manage_llms", - Sources: "sources", - "Manage Documents": "manage_documents", - "Manage Connectors": "manage_connectors", - Podcasts: "podcasts", - Logs: "logs", - Platform: "platform", - Team: "team", - }; - - const key = titleMap[title]; - return key ? t(key) : title; - }; - - // Check if an item is active based on pathname - const isItemActive = useCallback( - (item: NavItem): boolean => { - if (!pathname) return false; - - // For items without sub-items, check if pathname matches or starts with the URL - if (!item.items?.length) { - // Chat item: active ONLY when on new-chat page without a specific chat ID - // (i.e., exactly /dashboard/{id}/new-chat, not /dashboard/{id}/new-chat/123) - if (item.url.includes("/new-chat")) { - // Match exactly the new-chat base URL (ends with /new-chat) - return pathname.endsWith("/new-chat"); - } - // Logs item: active when on logs page - if (item.url.includes("/logs")) { - return pathname.includes("/logs"); - } - // Check exact match or prefix match - return pathname === item.url || pathname.startsWith(`${item.url}/`); - } - - // For items with sub-items (like Sources), check if any sub-item URL matches - return item.items.some( - (subItem) => pathname === subItem.url || pathname.startsWith(subItem.url) - ); - }, - [pathname] - ); - - // Memoize items to prevent unnecessary re-renders - const memoizedItems = useMemo(() => items, [items]); - - // Track expanded state for items with sub-menus (like Sources) - const [expandedItems, setExpandedItems] = useState>(() => { - const initial: Record = {}; - items.forEach((item) => { - if (item.items?.length) { - initial[item.title] = item.isActive ?? false; - } - }); - return initial; - }); - - // Handle collapsible state change - const handleOpenChange = useCallback((title: string, isOpen: boolean) => { - setExpandedItems((prev) => ({ ...prev, [title]: isOpen })); - }, []); - - return ( - - {translateTitle("Platform")} - - {memoizedItems.map((item, index) => { - const translatedTitle = translateTitle(item.title); - const hasSub = !!item.items?.length; - const isActive = isItemActive(item); - const isItemOpen = expandedItems[item.title] ?? isActive ?? false; - return ( - handleOpenChange(item.title, open) : undefined} - defaultOpen={!hasSub ? isActive : undefined} - > - - {hasSub ? ( - // When the item has children, make the whole row a collapsible trigger - <> - - - - - - - - - - Toggle submenu - - - - - - {item.items?.map((subItem, subIndex) => { - const translatedSubTitle = translateTitle(subItem.title); - const isDocumentsLink = - subItem.title === "Manage Documents" || - translatedSubTitle.toLowerCase().includes("documents"); - return ( - - - - {translatedSubTitle} - - - - ); - })} - - - - ) : ( - // Leaf item: treat as a normal link - - - - {translatedTitle} - - - )} - - - ); - })} - - - ); -} diff --git a/surfsense_web/components/sidebar/nav-notes.tsx b/surfsense_web/components/sidebar/nav-notes.tsx deleted file mode 100644 index e9f94fe80..000000000 --- a/surfsense_web/components/sidebar/nav-notes.tsx +++ /dev/null @@ -1,287 +0,0 @@ -"use client"; - -import { - ChevronRight, - FileText, - FolderOpen, - Loader2, - type LucideIcon, - MoreHorizontal, - Plus, - Trash2, -} from "lucide-react"; -import { usePathname, useRouter } from "next/navigation"; -import { useTranslations } from "next-intl"; -import { useCallback, useMemo, useState } from "react"; -import { Button } from "@/components/ui/button"; -import { Collapsible, CollapsibleContent, CollapsibleTrigger } from "@/components/ui/collapsible"; -import { - DropdownMenu, - DropdownMenuContent, - DropdownMenuItem, - DropdownMenuTrigger, -} from "@/components/ui/dropdown-menu"; -import { - SidebarGroup, - SidebarGroupContent, - SidebarGroupLabel, - SidebarMenu, - SidebarMenuButton, - SidebarMenuItem, - useSidebar, -} from "@/components/ui/sidebar"; -import { useLogsSummary } from "@/hooks/use-logs"; -import { cn } from "@/lib/utils"; -import { AllNotesSidebar } from "./all-notes-sidebar"; - -interface NoteAction { - name: string; - icon: string; - onClick: () => void; -} - -interface NoteItem { - name: string; - url: string; - icon: LucideIcon; - id?: number; - search_space_id?: number; - actions?: NoteAction[]; -} - -interface NavNotesProps { - notes: NoteItem[]; - onAddNote?: () => void; - defaultOpen?: boolean; - searchSpaceId?: string; -} - -// Map of icon names to their components -const actionIconMap: Record = { - FileText, - Trash2, - MoreHorizontal, -}; - -export function NavNotes({ notes, onAddNote, defaultOpen = true, searchSpaceId }: NavNotesProps) { - const t = useTranslations("sidebar"); - const router = useRouter(); - const pathname = usePathname(); - const { setOpenMobile } = useSidebar(); - const [isDeleting, setIsDeleting] = useState(null); - const [isOpen, setIsOpen] = useState(defaultOpen); - const [isAllNotesSidebarOpen, setIsAllNotesSidebarOpen] = useState(false); - - // Poll for active reindexing tasks to show inline loading indicators - // Smart polling: only polls when there are active tasks, stops when idle - const { summary } = useLogsSummary(searchSpaceId ? Number(searchSpaceId) : 0, 24, { - enablePolling: true, - refetchInterval: 5000, // Poll every 5 seconds when tasks are active - }); - - // Create a Set of document IDs that are currently being reindexed - const reindexingDocumentIds = useMemo(() => { - if (!summary?.active_tasks) return new Set(); - return new Set( - summary.active_tasks - .filter((task) => task.document_id != null) - .map((task) => task.document_id as number) - ); - }, [summary?.active_tasks]); - - // Handle note deletion with loading state - const handleDeleteNote = useCallback(async (noteId: number, deleteAction: () => void) => { - setIsDeleting(noteId); - try { - await deleteAction(); - } finally { - setIsDeleting(null); - } - }, []); - - // Handle note navigation - const handleNoteClick = useCallback( - (url: string) => { - router.push(url); - }, - [router] - ); - - return ( - - -
- - - - {t("notes") || "Notes"} - - - - {/* Action buttons - always visible on hover */} -
- {searchSpaceId && notes.length > 0 && ( - - )} - {onAddNote && ( - - )} -
-
- - - - - {notes.length > 0 ? ( - notes.map((note) => { - const isDeletingNote = isDeleting === note.id; - const isActive = pathname === note.url; - const isReindexing = note.id ? reindexingDocumentIds.has(note.id) : false; - - return ( - - {/* Main navigation button */} - handleNoteClick(note.url)} - disabled={isDeletingNote} - className={cn( - "pr-8", // Make room for the action button - isActive && "bg-sidebar-accent text-sidebar-accent-foreground", - isDeletingNote && "opacity-50" - )} - > - {isReindexing ? ( - - ) : ( - - )} - {note.name} - - - {/* Actions dropdown - positioned absolutely */} - {note.actions && note.actions.length > 0 && ( -
- - - - - - {note.actions.map((action, actionIndex) => { - const ActionIcon = actionIconMap[action.icon] || FileText; - const isDeleteAction = action.name.toLowerCase().includes("delete"); - - return ( - { - if (isDeleteAction) { - handleDeleteNote(note.id || 0, action.onClick); - } else { - action.onClick(); - } - }} - disabled={isDeletingNote} - className={ - isDeleteAction - ? "text-destructive focus:text-destructive" - : "" - } - > - - - {isDeletingNote && isDeleteAction - ? t("deleting") || "Deleting..." - : action.name} - - - ); - })} - - -
- )} -
- ); - }) - ) : ( - - {onAddNote ? ( - - - {t("create_new_note") || "Create a new note"} - - ) : ( - - - {t("no_notes") || "No notes yet"} - - )} - - )} -
-
-
-
- - {/* All Notes Sheet */} - {searchSpaceId && ( - setOpenMobile(false)} - /> - )} -
- ); -} diff --git a/surfsense_web/components/sidebar/nav-secondary.tsx b/surfsense_web/components/sidebar/nav-secondary.tsx deleted file mode 100644 index 23aeabc38..000000000 --- a/surfsense_web/components/sidebar/nav-secondary.tsx +++ /dev/null @@ -1,59 +0,0 @@ -"use client"; - -import type { LucideIcon } from "lucide-react"; -import { useTranslations } from "next-intl"; -import type * as React from "react"; -import { useMemo } from "react"; - -import { - SidebarGroup, - SidebarGroupLabel, - SidebarMenu, - SidebarMenuButton, - SidebarMenuItem, -} from "@/components/ui/sidebar"; - -interface NavSecondaryItem { - title: string; - url: string; - icon: LucideIcon; -} - -export function NavSecondary({ - items, - ...props -}: { - items: NavSecondaryItem[]; -} & React.ComponentPropsWithoutRef) { - const t = useTranslations("sidebar"); - - // Memoize items to prevent unnecessary re-renders - const memoizedItems = useMemo(() => items, [items]); - - return ( - - {t("search_space")} - - {memoizedItems.map((item, index) => ( - - {item.url === "#" ? ( - // Non-interactive display item (e.g., search space name) -
- - {item.title} -
- ) : ( - // Interactive link item - - - - {item.title} - - - )} -
- ))} -
-
- ); -} diff --git a/surfsense_web/components/sidebar/page-usage-display.tsx b/surfsense_web/components/sidebar/page-usage-display.tsx deleted file mode 100644 index 6c640c0aa..000000000 --- a/surfsense_web/components/sidebar/page-usage-display.tsx +++ /dev/null @@ -1,57 +0,0 @@ -"use client"; - -import { Mail } from "lucide-react"; -import { Progress } from "@/components/ui/progress"; -import { - SidebarGroup, - SidebarGroupContent, - SidebarGroupLabel, - useSidebar, -} from "@/components/ui/sidebar"; - -interface PageUsageDisplayProps { - pagesUsed: number; - pagesLimit: number; -} - -export function PageUsageDisplay({ pagesUsed, pagesLimit }: PageUsageDisplayProps) { - const { state } = useSidebar(); - const usagePercentage = (pagesUsed / pagesLimit) * 100; - const isCollapsed = state === "collapsed"; - - return ( - - - Page Usage - - -
- {isCollapsed ? ( - // Show only a compact progress indicator when collapsed -
- -
- ) : ( - // Show full details when expanded - <> -
- - {pagesUsed.toLocaleString()} / {pagesLimit.toLocaleString()} pages - - {usagePercentage.toFixed(0)}% -
- - - - Contact to increase limits - - - )} -
-
-
- ); -} From d689a8728aa351cefe7686f3fd0ed82f57abef04 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Thu, 8 Jan 2026 19:11:26 +0200 Subject: [PATCH 11/28] fix: query key caching for threads --- surfsense_web/components/onboarding-tour.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/surfsense_web/components/onboarding-tour.tsx b/surfsense_web/components/onboarding-tour.tsx index 958bb43b0..717a27607 100644 --- a/surfsense_web/components/onboarding-tour.tsx +++ b/surfsense_web/components/onboarding-tour.tsx @@ -407,7 +407,7 @@ export function OnboardingTour() { // Fetch threads data const { data: threadsData } = useQuery({ - queryKey: ["threads", searchSpaceId], + queryKey: ["threads", searchSpaceId, { limit: 1 }], queryFn: () => fetchThreads(Number(searchSpaceId), 1), // Only need to check if any exist enabled: !!searchSpaceId, }); From 4aeb05e2e529fdfb6a21fcfea14c973c94047c9b Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Fri, 9 Jan 2026 13:20:12 +0530 Subject: [PATCH 12/28] feat: allow future dates for Google Calendar and Luma connectors - Updated date handling in indexing functions to permit future dates for Google Calendar and Luma connectors. - Enhanced UI components to support future date selection, including a new button for selecting the next 30 days. - Adjusted documentation and descriptions to clarify date range options for users. --- .../routes/search_source_connectors_routes.py | 16 +++++++- .../google_calendar_indexer.py | 27 ++++++------- .../tasks/connector_indexers/luma_indexer.py | 27 ++++++------- .../components/date-range-selector.tsx | 40 +++++++++++++++---- .../components/luma-connect-form.tsx | 1 + .../views/connector-edit-view.tsx | 4 ++ .../views/indexing-configuration-view.tsx | 4 ++ 7 files changed, 80 insertions(+), 39 deletions(-) diff --git a/surfsense_backend/app/routes/search_source_connectors_routes.py b/surfsense_backend/app/routes/search_source_connectors_routes.py index d6fdedd7c..c4b438cd7 100644 --- a/surfsense_backend/app/routes/search_source_connectors_routes.py +++ b/surfsense_backend/app/routes/search_source_connectors_routes.py @@ -541,7 +541,7 @@ async def index_connector_content( ), end_date: str = Query( None, - description="End date for indexing (YYYY-MM-DD format). If not provided, uses today's date", + description="End date for indexing (YYYY-MM-DD format). If not provided, uses today's date. For calendar connectors (Google Calendar, Luma), future dates can be selected to index upcoming events.", ), drive_items: GoogleDriveIndexRequest | None = Body( None, @@ -614,7 +614,19 @@ async def index_connector_content( else: indexing_from = start_date - indexing_to = end_date if end_date else today_str + # For calendar connectors, default to today but allow future dates if explicitly provided + if connector.connector_type in [ + SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR, + SearchSourceConnectorType.LUMA_CONNECTOR, + ]: + # Default to today if no end_date provided (users can manually select future dates) + if end_date is None: + indexing_to = today_str + else: + indexing_to = end_date + else: + # For non-calendar connectors, cap at today + indexing_to = end_date if end_date else today_str if connector.connector_type == SearchSourceConnectorType.SLACK_CONNECTOR: from app.tasks.celery_tasks.connector_tasks import ( diff --git a/surfsense_backend/app/tasks/connector_indexers/google_calendar_indexer.py b/surfsense_backend/app/tasks/connector_indexers/google_calendar_indexer.py index 499f01d66..b8c0e564d 100644 --- a/surfsense_backend/app/tasks/connector_indexers/google_calendar_indexer.py +++ b/surfsense_backend/app/tasks/connector_indexers/google_calendar_indexer.py @@ -45,8 +45,9 @@ async def index_google_calendar_events( connector_id: ID of the Google Calendar connector search_space_id: ID of the search space to store documents in user_id: User ID - start_date: Start date for indexing (YYYY-MM-DD format) - end_date: End date for indexing (YYYY-MM-DD format) + start_date: Start date for indexing (YYYY-MM-DD format). Can be in the past or future. + end_date: End date for indexing (YYYY-MM-DD format). Can be in the future to index upcoming events. + Defaults to today if not provided. update_last_indexed: Whether to update the last_indexed_at timestamp (default: True) Returns: @@ -165,8 +166,10 @@ async def index_google_calendar_events( end_date = None # Calculate date range + # For calendar connectors, allow future dates to index upcoming events if start_date is None or end_date is None: # Fall back to calculating dates based on last_indexed_at + # Default to today (users can manually select future dates if needed) calculated_end_date = datetime.now() # Use last_indexed_at as start date if available, otherwise use 30 days ago @@ -178,19 +181,13 @@ async def index_google_calendar_events( else connector.last_indexed_at ) - # Check if last_indexed_at is in the future or after end_date - if last_indexed_naive > calculated_end_date: - logger.warning( - f"Last indexed date ({last_indexed_naive.strftime('%Y-%m-%d')}) is in the future. Using 30 days ago instead." - ) - calculated_start_date = calculated_end_date - timedelta(days=30) - else: - calculated_start_date = last_indexed_naive - logger.info( - f"Using last_indexed_at ({calculated_start_date.strftime('%Y-%m-%d')}) as start date" - ) + # Allow future dates - use last_indexed_at as start date + calculated_start_date = last_indexed_naive + logger.info( + f"Using last_indexed_at ({calculated_start_date.strftime('%Y-%m-%d')}) as start date" + ) else: - calculated_start_date = calculated_end_date - timedelta( + calculated_start_date = datetime.now() - timedelta( days=30 ) # Use 30 days as default for calendar events logger.info( @@ -205,7 +202,7 @@ async def index_google_calendar_events( end_date if end_date else calculated_end_date.strftime("%Y-%m-%d") ) else: - # Use provided dates + # Use provided dates (including future dates) start_date_str = start_date end_date_str = end_date diff --git a/surfsense_backend/app/tasks/connector_indexers/luma_indexer.py b/surfsense_backend/app/tasks/connector_indexers/luma_indexer.py index 4d5ddc47c..91f81ac20 100644 --- a/surfsense_backend/app/tasks/connector_indexers/luma_indexer.py +++ b/surfsense_backend/app/tasks/connector_indexers/luma_indexer.py @@ -45,8 +45,9 @@ async def index_luma_events( connector_id: ID of the Luma connector search_space_id: ID of the search space to store documents in user_id: User ID - start_date: Start date for indexing (YYYY-MM-DD format) - end_date: End date for indexing (YYYY-MM-DD format) + start_date: Start date for indexing (YYYY-MM-DD format). Can be in the past or future. + end_date: End date for indexing (YYYY-MM-DD format). Can be in the future to index upcoming events. + Defaults to today if not provided. update_last_indexed: Whether to update the last_indexed_at timestamp (default: True) Returns: @@ -116,8 +117,10 @@ async def index_luma_events( luma_client = LumaConnector(api_key=api_key) # Calculate date range + # For calendar connectors, allow future dates to index upcoming events if start_date is None or end_date is None: # Fall back to calculating dates based on last_indexed_at + # Default to today (users can manually select future dates if needed) calculated_end_date = datetime.now() # Use last_indexed_at as start date if available, otherwise use 30 days ago @@ -129,19 +132,13 @@ async def index_luma_events( else connector.last_indexed_at ) - # Check if last_indexed_at is in the future or after end_date - if last_indexed_naive > calculated_end_date: - logger.warning( - f"Last indexed date ({last_indexed_naive.strftime('%Y-%m-%d')}) is in the future. Using 30 days ago instead." - ) - calculated_start_date = calculated_end_date - timedelta(days=30) - else: - calculated_start_date = last_indexed_naive - logger.info( - f"Using last_indexed_at ({calculated_start_date.strftime('%Y-%m-%d')}) as start date" - ) + # Allow future dates - use last_indexed_at as start date + calculated_start_date = last_indexed_naive + logger.info( + f"Using last_indexed_at ({calculated_start_date.strftime('%Y-%m-%d')}) as start date" + ) else: - calculated_start_date = calculated_end_date - timedelta(days=30) + calculated_start_date = datetime.now() - timedelta(days=30) logger.info( f"No last_indexed_at found, using {calculated_start_date.strftime('%Y-%m-%d')} (30 days ago) as start date" ) @@ -154,7 +151,7 @@ async def index_luma_events( end_date if end_date else calculated_end_date.strftime("%Y-%m-%d") ) else: - # Use provided dates + # Use provided dates (including future dates) start_date_str = start_date end_date_str = end_date diff --git a/surfsense_web/components/assistant-ui/connector-popup/components/date-range-selector.tsx b/surfsense_web/components/assistant-ui/connector-popup/components/date-range-selector.tsx index bbb2ea482..322cf7cc1 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/components/date-range-selector.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/components/date-range-selector.tsx @@ -1,6 +1,6 @@ "use client"; -import { format, subDays, subYears } from "date-fns"; +import { addDays, format, subDays, subYears } from "date-fns"; import { Calendar as CalendarIcon } from "lucide-react"; import type { FC } from "react"; import { Button } from "@/components/ui/button"; @@ -14,6 +14,7 @@ interface DateRangeSelectorProps { endDate: Date | undefined; onStartDateChange: (date: Date | undefined) => void; onEndDateChange: (date: Date | undefined) => void; + allowFutureDates?: boolean; // Allow future dates for calendar connectors } export const DateRangeSelector: FC = ({ @@ -21,6 +22,7 @@ export const DateRangeSelector: FC = ({ endDate, onStartDateChange, onEndDateChange, + allowFutureDates = false, }) => { const handleLast30Days = () => { const today = new Date(); @@ -28,6 +30,12 @@ export const DateRangeSelector: FC = ({ onEndDateChange(today); }; + const handleNext30Days = () => { + const today = new Date(); + onStartDateChange(today); + onEndDateChange(addDays(today, 30)); + }; + const handleLastYear = () => { const today = new Date(); onStartDateChange(subYears(today, 1)); @@ -43,8 +51,9 @@ export const DateRangeSelector: FC = ({

Select Date Range

- Choose how far back you want to sync your data. You can always re-index later with different - dates. + {allowFutureDates + ? "Choose the date range to sync your data. You can select future dates to index upcoming events." + : "Choose how far back you want to sync your data. You can always re-index later with different dates."}

@@ -72,7 +81,7 @@ export const DateRangeSelector: FC = ({ mode="single" selected={startDate} onSelect={onStartDateChange} - disabled={(date) => date > new Date()} + disabled={allowFutureDates ? false : (date) => date > new Date()} /> @@ -93,8 +102,10 @@ export const DateRangeSelector: FC = ({ !endDate && "text-muted-foreground" )} > - - {endDate ? format(endDate, "PPP") : "Default (Today)"} + + {endDate + ? format(endDate, "PPP") + : "Default (Today)"} @@ -102,7 +113,11 @@ export const DateRangeSelector: FC = ({ mode="single" selected={endDate} onSelect={onEndDateChange} - disabled={(date) => date > new Date() || (startDate ? date < startDate : false)} + disabled={ + allowFutureDates + ? (date) => (startDate ? date < startDate : false) + : (date) => date > new Date() || (startDate ? date < startDate : false) + } /> @@ -129,6 +144,17 @@ export const DateRangeSelector: FC = ({ > Last 30 Days + {allowFutureDates && ( + + )} -
-
- {getConnectorIcon(connectorType, "size-5")} -
-
-

{connectorTitle} Accounts

-

- {typeConnectors.length} connected account{typeConnectors.length !== 1 ? "s" : ""} -

-
+
+ {/* Back button */} + + + {/* Connector header */} +
+
+
+ {getConnectorIcon(connectorType, "size-7")} +
+
+

+ {connectorTitle} +

+

+ Manage your connector settings and sync configuration +

{/* Add Account Button with dashed border */} @@ -100,7 +103,7 @@ export const ConnectorAccountsListView: FC = ({ onClick={onAddAccount} disabled={isConnecting} className={cn( - "flex items-center gap-2 px-3 py-2 rounded-lg mr-4 border-2 border-dashed border-border/70 text-left transition-all duration-200", + "flex items-center gap-2 px-3 py-2 rounded-lg border-2 border-dashed border-border/70 text-left transition-all duration-200 shrink-0 self-center sm:self-auto sm:w-auto", "border-primary/50 hover:bg-primary/5", isConnecting && "opacity-50 cursor-not-allowed" )} @@ -120,7 +123,7 @@ export const ConnectorAccountsListView: FC = ({
{/* Content */} -
+
{/* Connected Accounts Grid */}
{typeConnectors.map((connector) => { From a099bcf5fb35ae9094ebb0090267b14c4b016f89 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Fri, 9 Jan 2026 14:47:00 +0200 Subject: [PATCH 16/28] refactor: remove frontend auto-create search space logic - Removed auto-creation of default search space from frontend dashboard - Frontend now only handles auto-redirect for users with exactly 1 search space - All default search space creation should be handled by backend on_after_register --- surfsense_web/app/dashboard/page.tsx | 54 ++++++---------------------- 1 file changed, 10 insertions(+), 44 deletions(-) diff --git a/surfsense_web/app/dashboard/page.tsx b/surfsense_web/app/dashboard/page.tsx index fbf567cba..951e17a8c 100644 --- a/surfsense_web/app/dashboard/page.tsx +++ b/surfsense_web/app/dashboard/page.tsx @@ -7,11 +7,8 @@ import Image from "next/image"; import Link from "next/link"; import { useRouter } from "next/navigation"; import { useTranslations } from "next-intl"; -import { useEffect, useRef, useState } from "react"; -import { - createSearchSpaceMutationAtom, - deleteSearchSpaceMutationAtom, -} from "@/atoms/search-spaces/search-space-mutation.atoms"; +import { useEffect } from "react"; +import { deleteSearchSpaceMutationAtom } from "@/atoms/search-spaces/search-space-mutation.atoms"; import { searchSpacesAtom } from "@/atoms/search-spaces/search-space-query.atoms"; import { currentUserAtom } from "@/atoms/user/user-query.atoms"; import { Logo } from "@/components/Logo"; @@ -135,10 +132,6 @@ const DashboardPage = () => { const tCommon = useTranslations("common"); const router = useRouter(); - // State for auto-creating search space - const [isAutoCreating, setIsAutoCreating] = useState(false); - const hasAttemptedAutoCreate = useRef(false); - // Animation variants const containerVariants: Variants = { hidden: { opacity: 0 }, @@ -170,44 +163,17 @@ const DashboardPage = () => { refetch: refreshSearchSpaces, } = useAtomValue(searchSpacesAtom); const { mutateAsync: deleteSearchSpace } = useAtomValue(deleteSearchSpaceMutationAtom); - const { mutateAsync: createSearchSpace } = useAtomValue(createSearchSpaceMutationAtom); const { data: user, isPending: isLoadingUser, error: userError } = useAtomValue(currentUserAtom); - // Auto-redirect to chat for users with exactly 1 search space, or auto-create if none + // Auto-redirect to chat for users with exactly 1 search space useEffect(() => { - const handleAutoRedirect = async () => { - - if (loading || hasAttemptedAutoCreate.current) return; + if (loading) return; - - if (searchSpaces.length === 1) { - router.replace(`/dashboard/${searchSpaces[0].id}/new-chat`); - return; - } - - if (searchSpaces.length > 1) { - return; - } - - - hasAttemptedAutoCreate.current = true; - setIsAutoCreating(true); - - try { - const newSearchSpace = await createSearchSpace({ - name: "My Search Space", - description: "Your personal search space", - }); - router.replace(`/dashboard/${newSearchSpace.id}/new-chat`); - } catch (err) { - console.error("Failed to auto-create search space:", err); - setIsAutoCreating(false); - } - }; - - handleAutoRedirect(); - }, [loading, searchSpaces, router, createSearchSpace]); + if (searchSpaces.length === 1) { + router.replace(`/dashboard/${searchSpaces[0].id}/new-chat`); + } + }, [loading, searchSpaces, router]); // Create user object for UserDropdown const customUser = { @@ -218,8 +184,8 @@ const DashboardPage = () => { avatar: "/icon-128.png", // Default avatar }; - // Show loading while loading, auto-redirecting (single search space), or auto-creating - if (loading || isAutoCreating || (searchSpaces.length === 1 && !error)) return ; + // Show loading while loading or auto-redirecting (single search space) + if (loading || (searchSpaces.length === 1 && !error)) return ; if (error) return ; const handleDeleteSearchSpace = async (id: number) => { From 61560e5d4f85698c97efbadfbc8e73b3c23c8884 Mon Sep 17 00:00:00 2001 From: Anish Sarkar <104695310+AnishSarkar22@users.noreply.github.com> Date: Fri, 9 Jan 2026 18:22:54 +0530 Subject: [PATCH 17/28] chore: ran frontend linting --- surfsense_web/components/Logo.tsx | 8 +++++++- .../connector-popup/components/date-range-selector.tsx | 6 ++---- .../connector-popup/hooks/use-connector-dialog.ts | 2 +- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/surfsense_web/components/Logo.tsx b/surfsense_web/components/Logo.tsx index 79799942b..58f8d1c9f 100644 --- a/surfsense_web/components/Logo.tsx +++ b/surfsense_web/components/Logo.tsx @@ -7,7 +7,13 @@ import { cn } from "@/lib/utils"; export const Logo = ({ className }: { className?: string }) => { return ( - logo + logo ); }; diff --git a/surfsense_web/components/assistant-ui/connector-popup/components/date-range-selector.tsx b/surfsense_web/components/assistant-ui/connector-popup/components/date-range-selector.tsx index 322cf7cc1..48dc2a6c2 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/components/date-range-selector.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/components/date-range-selector.tsx @@ -102,10 +102,8 @@ export const DateRangeSelector: FC = ({ !endDate && "text-muted-foreground" )} > - - {endDate - ? format(endDate, "PPP") - : "Default (Today)"} + + {endDate ? format(endDate, "PPP") : "Default (Today)"} diff --git a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts index fcce90e9c..99bd11f53 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts @@ -79,7 +79,7 @@ export const useConnectorDialog = () => { connectorType: string; connectorTitle: string; } | null>(null); - + // Track if we came from accounts list when entering edit mode const [cameFromAccountsList, setCameFromAccountsList] = useState<{ connectorType: string; From 101dd5745c0a46bcb6a344ebaad9117fa2587bc5 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Fri, 9 Jan 2026 15:00:15 +0200 Subject: [PATCH 18/28] merge dev --- surfsense_web/components/Logo.tsx | 8 +++++++- surfsense_web/components/TokenHandler.tsx | 11 +++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/surfsense_web/components/Logo.tsx b/surfsense_web/components/Logo.tsx index 79799942b..58f8d1c9f 100644 --- a/surfsense_web/components/Logo.tsx +++ b/surfsense_web/components/Logo.tsx @@ -7,7 +7,13 @@ import { cn } from "@/lib/utils"; export const Logo = ({ className }: { className?: string }) => { return ( - logo + logo ); }; diff --git a/surfsense_web/components/TokenHandler.tsx b/surfsense_web/components/TokenHandler.tsx index a190fe73f..24260f485 100644 --- a/surfsense_web/components/TokenHandler.tsx +++ b/surfsense_web/components/TokenHandler.tsx @@ -3,6 +3,7 @@ import { useSearchParams } from "next/navigation"; import { useEffect } from "react"; import { getAndClearRedirectPath, setBearerToken } from "@/lib/auth-utils"; +import { trackLoginSuccess } from "@/lib/posthog/events"; interface TokenHandlerProps { redirectPath?: string; // Default path to redirect after storing token (if no saved path) @@ -35,6 +36,16 @@ const TokenHandler = ({ if (token) { try { + // Track login success for OAuth flows (e.g., Google) + // Local login already tracks success before redirecting here + const alreadyTracked = sessionStorage.getItem("login_success_tracked"); + if (!alreadyTracked) { + // This is an OAuth flow (Google login) - track success + trackLoginSuccess("google"); + } + // Clear the flag for future logins + sessionStorage.removeItem("login_success_tracked"); + // Store token in localStorage using both methods for compatibility localStorage.setItem(storageKey, token); setBearerToken(token); From 532f0039d59c3306ccb5a64fded5a021d4322506 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Fri, 9 Jan 2026 15:01:33 +0200 Subject: [PATCH 19/28] merge dev --- surfsense_web/components/TokenHandler.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/surfsense_web/components/TokenHandler.tsx b/surfsense_web/components/TokenHandler.tsx index 24260f485..b4ca36298 100644 --- a/surfsense_web/components/TokenHandler.tsx +++ b/surfsense_web/components/TokenHandler.tsx @@ -45,7 +45,7 @@ const TokenHandler = ({ } // Clear the flag for future logins sessionStorage.removeItem("login_success_tracked"); - + // Store token in localStorage using both methods for compatibility localStorage.setItem(storageKey, token); setBearerToken(token); @@ -56,7 +56,7 @@ const TokenHandler = ({ // Use the saved path if available, otherwise use the default redirectPath const finalRedirectPath = savedRedirectPath || redirectPath; - // Use hard navigation to clear all React/jotai state from previous session + // Redirect to the appropriate path window.location.href = finalRedirectPath; } catch (error) { console.error("Error storing token in localStorage:", error); From 786fd63e5bee4cbcce04af9539cb3b595f07275f Mon Sep 17 00:00:00 2001 From: Manoj Aggarwal <64570816+manojag115@users.noreply.github.com> Date: Fri, 9 Jan 2026 12:33:26 -0800 Subject: [PATCH 20/28] Revert "Add Microsoft Teams Connector" --- surfsense_backend/.env.example | 5 - .../versions/59_add_teams_connector_enums.py | 160 ------ .../agents/new_chat/tools/knowledge_base.py | 2 - surfsense_backend/app/config/__init__.py | 5 - .../app/connectors/teams_connector.py | 338 ------------- .../app/connectors/teams_history.py | 254 ---------- surfsense_backend/app/db.py | 2 - surfsense_backend/app/routes/__init__.py | 2 - .../routes/search_source_connectors_routes.py | 72 --- .../app/routes/teams_add_connector_route.py | 474 ------------------ .../app/schemas/teams_auth_credentials.py | 79 --- .../app/services/connector_service.py | 74 --- .../app/tasks/celery_tasks/connector_tasks.py | 43 -- .../tasks/connector_indexers/teams_indexer.py | 473 ----------------- .../app/utils/connector_naming.py | 4 - .../app/utils/periodic_scheduler.py | 1 - .../components/teams-config.tsx | 29 -- .../connector-configs/index.tsx | 3 - .../constants/connector-constants.ts | 7 - .../utils/connector-document-mapping.ts | 1 - surfsense_web/contracts/enums/connector.ts | 1 - .../contracts/enums/connectorIcons.tsx | 2 - .../contracts/types/connector.types.ts | 1 - 23 files changed, 2032 deletions(-) delete mode 100644 surfsense_backend/alembic/versions/59_add_teams_connector_enums.py delete mode 100644 surfsense_backend/app/connectors/teams_connector.py delete mode 100644 surfsense_backend/app/connectors/teams_history.py delete mode 100644 surfsense_backend/app/routes/teams_add_connector_route.py delete mode 100644 surfsense_backend/app/schemas/teams_auth_credentials.py delete mode 100644 surfsense_backend/app/tasks/connector_indexers/teams_indexer.py delete mode 100644 surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx diff --git a/surfsense_backend/.env.example b/surfsense_backend/.env.example index 6ac7c55de..2c2fec48b 100644 --- a/surfsense_backend/.env.example +++ b/surfsense_backend/.env.example @@ -76,11 +76,6 @@ SLACK_CLIENT_ID=your_slack_client_id_here SLACK_CLIENT_SECRET=your_slack_client_secret_here SLACK_REDIRECT_URI=http://localhost:8000/api/v1/auth/slack/connector/callback -# Teams OAuth Configuration -TEAMS_CLIENT_ID=your_teams_client_id_here -TEAMS_CLIENT_SECRET=your_teams_client_secret_here -TEAMS_REDIRECT_URI=http://localhost:8000/api/v1/auth/teams/connector/callback - # Embedding Model # Examples: # # Get sentence transformers embeddings diff --git a/surfsense_backend/alembic/versions/59_add_teams_connector_enums.py b/surfsense_backend/alembic/versions/59_add_teams_connector_enums.py deleted file mode 100644 index f13fbe9e5..000000000 --- a/surfsense_backend/alembic/versions/59_add_teams_connector_enums.py +++ /dev/null @@ -1,160 +0,0 @@ -"""Add TEAMS_CONNECTOR to SearchSourceConnectorType and DocumentType enums - -Revision ID: 59 -Revises: 58 -""" - -from collections.abc import Sequence - -from alembic import op - -# revision identifiers, used by Alembic. -revision: str = "59" -down_revision: str | None = "58" -branch_labels: str | Sequence[str] | None = None -depends_on: str | Sequence[str] | None = None - -# Define the ENUM type name and the new value -CONNECTOR_ENUM = "searchsourceconnectortype" -CONNECTOR_NEW_VALUE = "TEAMS_CONNECTOR" -DOCUMENT_ENUM = "documenttype" -DOCUMENT_NEW_VALUE = "TEAMS_CONNECTOR" - - -def upgrade() -> None: - """Upgrade schema - add TEAMS_CONNECTOR to connector and document enum safely.""" - # Add TEAMS_CONNECTOR to searchsourceconnectortype only if not exists - op.execute( - f""" - DO $$ - BEGIN - IF NOT EXISTS ( - SELECT 1 FROM pg_enum - WHERE enumlabel = '{CONNECTOR_NEW_VALUE}' - AND enumtypid = (SELECT oid FROM pg_type WHERE typname = '{CONNECTOR_ENUM}') - ) THEN - ALTER TYPE {CONNECTOR_ENUM} ADD VALUE '{CONNECTOR_NEW_VALUE}'; - END IF; - END$$; - """ - ) - - # Add TEAMS_CONNECTOR to documenttype only if not exists - op.execute( - f""" - DO $$ - BEGIN - IF NOT EXISTS ( - SELECT 1 FROM pg_enum - WHERE enumlabel = '{DOCUMENT_NEW_VALUE}' - AND enumtypid = (SELECT oid FROM pg_type WHERE typname = '{DOCUMENT_ENUM}') - ) THEN - ALTER TYPE {DOCUMENT_ENUM} ADD VALUE '{DOCUMENT_NEW_VALUE}'; - END IF; - END$$; - """ - ) - - -def downgrade() -> None: - """Downgrade schema - remove TEAMS_CONNECTOR from connector and document enum.""" - - # Old enum name - old_connector_enum_name = f"{CONNECTOR_ENUM}_old" - old_document_enum_name = f"{DOCUMENT_ENUM}_old" - - # All connector values except TEAMS_CONNECTOR - old_connector_values = ( - "SERPER_API", - "TAVILY_API", - "SEARXNG_API", - "LINKUP_API", - "BAIDU_SEARCH_API", - "SLACK_CONNECTOR", - "NOTION_CONNECTOR", - "GITHUB_CONNECTOR", - "LINEAR_CONNECTOR", - "DISCORD_CONNECTOR", - "JIRA_CONNECTOR", - "CONFLUENCE_CONNECTOR", - "CLICKUP_CONNECTOR", - "GOOGLE_CALENDAR_CONNECTOR", - "GOOGLE_GMAIL_CONNECTOR", - "GOOGLE_DRIVE_CONNECTOR", - "AIRTABLE_CONNECTOR", - "LUMA_CONNECTOR", - "ELASTICSEARCH_CONNECTOR", - "WEBCRAWLER_CONNECTOR", - ) - - # All document values except TEAMS_CONNECTOR - old_document_values = ( - "EXTENSION", - "CRAWLED_URL", - "FILE", - "SLACK_CONNECTOR", - "NOTION_CONNECTOR", - "YOUTUBE_VIDEO", - "GITHUB_CONNECTOR", - "LINEAR_CONNECTOR", - "DISCORD_CONNECTOR", - "JIRA_CONNECTOR", - "CONFLUENCE_CONNECTOR", - "CLICKUP_CONNECTOR", - "GOOGLE_CALENDAR_CONNECTOR", - "GOOGLE_GMAIL_CONNECTOR", - "GOOGLE_DRIVE_FILE", - "AIRTABLE_CONNECTOR", - "LUMA_CONNECTOR", - "ELASTICSEARCH_CONNECTOR", - "BOOKSTACK_CONNECTOR", - "CIRCLEBACK", - "NOTE", - ) - - old_connector_values_sql = ", ".join([f"'{v}'" for v in old_connector_values]) - old_document_values_sql = ", ".join([f"'{v}'" for v in old_document_values]) - - # Table and column names - connector_table_name = "search_source_connectors" - connector_column_name = "connector_type" - document_table_name = "documents" - document_column_name = "document_type" - - # Connector Enum Downgrade Steps - # 1. Rename the current connector enum type - op.execute(f"ALTER TYPE {CONNECTOR_ENUM} RENAME TO {old_connector_enum_name}") - - # 2. Create the new connector enum type with the old values - op.execute(f"CREATE TYPE {CONNECTOR_ENUM} AS ENUM({old_connector_values_sql})") - - # 3. Alter the column to use the new connector enum type - op.execute( - f""" - ALTER TABLE {connector_table_name} - ALTER COLUMN {connector_column_name} TYPE {CONNECTOR_ENUM} - USING {connector_column_name}::text::{CONNECTOR_ENUM} - """ - ) - - # 4. Drop the old connector enum type - op.execute(f"DROP TYPE {old_connector_enum_name}") - - # Document Enum Downgrade Steps - # 1. Rename the current document enum type - op.execute(f"ALTER TYPE {DOCUMENT_ENUM} RENAME TO {old_document_enum_name}") - - # 2. Create the new document enum type with the old values - op.execute(f"CREATE TYPE {DOCUMENT_ENUM} AS ENUM({old_document_values_sql})") - - # 3. Alter the column to use the new document enum type - op.execute( - f""" - ALTER TABLE {document_table_name} - ALTER COLUMN {document_column_name} TYPE {DOCUMENT_ENUM} - USING {document_column_name}::text::{DOCUMENT_ENUM} - """ - ) - - # 4. Drop the old document enum type - op.execute(f"DROP TYPE {old_document_enum_name}") diff --git a/surfsense_backend/app/agents/new_chat/tools/knowledge_base.py b/surfsense_backend/app/agents/new_chat/tools/knowledge_base.py index e91d865fa..a3cdad359 100644 --- a/surfsense_backend/app/agents/new_chat/tools/knowledge_base.py +++ b/surfsense_backend/app/agents/new_chat/tools/knowledge_base.py @@ -26,7 +26,6 @@ _ALL_CONNECTORS: list[str] = [ "EXTENSION", "FILE", "SLACK_CONNECTOR", - "TEAMS_CONNECTOR", "NOTION_CONNECTOR", "YOUTUBE_VIDEO", "GITHUB_CONNECTOR", @@ -574,7 +573,6 @@ def create_search_knowledge_base_tool( - FILE: "User-uploaded documents (PDFs, Word, etc.)" (personal files) - NOTE: "SurfSense Notes" (notes created inside SurfSense) - SLACK_CONNECTOR: "Slack conversations and shared content" (personal workspace communications) - - TEAMS_CONNECTOR: "Microsoft Teams messages and conversations" (personal Teams communications) - NOTION_CONNECTOR: "Notion workspace pages and databases" (personal knowledge management) - YOUTUBE_VIDEO: "YouTube video transcripts and metadata" (personally saved videos) - GITHUB_CONNECTOR: "GitHub repository content and issues" (personal repositories and interactions) diff --git a/surfsense_backend/app/config/__init__.py b/surfsense_backend/app/config/__init__.py index 448e2c253..e76e69e94 100644 --- a/surfsense_backend/app/config/__init__.py +++ b/surfsense_backend/app/config/__init__.py @@ -117,11 +117,6 @@ class Config: DISCORD_REDIRECT_URI = os.getenv("DISCORD_REDIRECT_URI") DISCORD_BOT_TOKEN = os.getenv("DISCORD_BOT_TOKEN") - # Microsoft Teams OAuth - TEAMS_CLIENT_ID = os.getenv("TEAMS_CLIENT_ID") - TEAMS_CLIENT_SECRET = os.getenv("TEAMS_CLIENT_SECRET") - TEAMS_REDIRECT_URI = os.getenv("TEAMS_REDIRECT_URI") - # ClickUp OAuth CLICKUP_CLIENT_ID = os.getenv("CLICKUP_CLIENT_ID") CLICKUP_CLIENT_SECRET = os.getenv("CLICKUP_CLIENT_SECRET") diff --git a/surfsense_backend/app/connectors/teams_connector.py b/surfsense_backend/app/connectors/teams_connector.py deleted file mode 100644 index 29c2db127..000000000 --- a/surfsense_backend/app/connectors/teams_connector.py +++ /dev/null @@ -1,338 +0,0 @@ -""" -Microsoft Teams Connector - -A module for interacting with Microsoft Teams Graph API to retrieve teams, channels, and message history. - -Supports OAuth-based authentication with token refresh. -""" - -import logging -from datetime import datetime, timezone -from typing import Any - -import httpx -from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.future import select - -from app.config import config -from app.db import SearchSourceConnector -from app.routes.teams_add_connector_route import refresh_teams_token -from app.schemas.teams_auth_credentials import TeamsAuthCredentialsBase -from app.utils.oauth_security import TokenEncryption - -logger = logging.getLogger(__name__) - - -class TeamsConnector: - """Class for retrieving teams, channels, and message history from Microsoft Teams.""" - - # Microsoft Graph API endpoints - GRAPH_API_BASE = "https://graph.microsoft.com/v1.0" - - def __init__( - self, - access_token: str | None = None, - session: AsyncSession | None = None, - connector_id: int | None = None, - credentials: TeamsAuthCredentialsBase | None = None, - ): - """ - Initialize the TeamsConnector with an access token or OAuth credentials. - - Args: - access_token: Microsoft Graph API access token (optional, for backward compatibility) - session: Database session for token refresh (optional) - connector_id: Connector ID for token refresh (optional) - credentials: Teams OAuth credentials (optional, will be loaded from DB if not provided) - """ - self._session = session - self._connector_id = connector_id - self._credentials = credentials - self._access_token = access_token - - async def _get_valid_token(self) -> str: - """ - Get valid Microsoft Teams access token, refreshing if needed. - - Returns: - Valid access token - - Raises: - ValueError: If credentials are missing or invalid - Exception: If token refresh fails - """ - # If we have a direct token (backward compatibility), use it - if ( - self._access_token - and self._session is None - and self._connector_id is None - and self._credentials is None - ): - return self._access_token - - # Load credentials from DB if not provided - if self._credentials is None: - if not self._session or not self._connector_id: - raise ValueError( - "Cannot load credentials: session and connector_id required" - ) - - result = await self._session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == self._connector_id - ) - ) - connector = result.scalars().first() - - if not connector: - raise ValueError(f"Connector {self._connector_id} not found") - - config_data = connector.config.copy() - - # Decrypt credentials if they are encrypted - token_encrypted = config_data.get("_token_encrypted", False) - if token_encrypted and config.SECRET_KEY: - try: - token_encryption = TokenEncryption(config.SECRET_KEY) - - # Decrypt sensitive fields - if config_data.get("access_token"): - config_data["access_token"] = token_encryption.decrypt_token( - config_data["access_token"] - ) - if config_data.get("refresh_token"): - config_data["refresh_token"] = token_encryption.decrypt_token( - config_data["refresh_token"] - ) - - logger.info( - "Decrypted Teams credentials for connector %s", - self._connector_id, - ) - except Exception as e: - logger.error( - "Failed to decrypt Teams credentials for connector %s: %s", - self._connector_id, - str(e), - ) - raise ValueError( - f"Failed to decrypt Teams credentials: {e!s}" - ) from e - - try: - self._credentials = TeamsAuthCredentialsBase.from_dict(config_data) - except Exception as e: - raise ValueError(f"Invalid Teams credentials: {e!s}") from e - - # Check if token is expired and refreshable - if self._credentials.is_expired and self._credentials.is_refreshable: - try: - logger.info( - "Teams token expired for connector %s, refreshing...", - self._connector_id, - ) - - # Get connector for refresh - result = await self._session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.id == self._connector_id - ) - ) - connector = result.scalars().first() - - if not connector: - raise RuntimeError( - f"Connector {self._connector_id} not found; cannot refresh token." - ) - - # Refresh token - connector = await refresh_teams_token(self._session, connector) - - # Reload credentials after refresh - config_data = connector.config.copy() - token_encrypted = config_data.get("_token_encrypted", False) - if token_encrypted and config.SECRET_KEY: - token_encryption = TokenEncryption(config.SECRET_KEY) - if config_data.get("access_token"): - config_data["access_token"] = token_encryption.decrypt_token( - config_data["access_token"] - ) - if config_data.get("refresh_token"): - config_data["refresh_token"] = token_encryption.decrypt_token( - config_data["refresh_token"] - ) - - self._credentials = TeamsAuthCredentialsBase.from_dict(config_data) - - logger.info( - "Successfully refreshed Teams token for connector %s", - self._connector_id, - ) - except Exception as e: - logger.error( - "Failed to refresh Teams token for connector %s: %s", - self._connector_id, - str(e), - ) - raise ValueError( - f"Failed to refresh Teams OAuth credentials: {e!s}" - ) from e - - return self._credentials.access_token - - async def get_joined_teams(self) -> list[dict[str, Any]]: - """ - Get list of all teams the user is a member of. - - Returns: - List of team objects with id, display_name, etc. - """ - access_token = await self._get_valid_token() - - async with httpx.AsyncClient() as client: - response = await client.get( - f"{self.GRAPH_API_BASE}/me/joinedTeams", - headers={"Authorization": f"Bearer {access_token}"}, - timeout=30.0, - ) - - if response.status_code != 200: - raise ValueError( - f"Failed to get joined teams: {response.status_code} - {response.text}" - ) - - data = response.json() - return data.get("value", []) - - async def get_team_channels(self, team_id: str) -> list[dict[str, Any]]: - """ - Get list of all channels in a team. - - Args: - team_id: The team ID - - Returns: - List of channel objects - """ - access_token = await self._get_valid_token() - - async with httpx.AsyncClient() as client: - response = await client.get( - f"{self.GRAPH_API_BASE}/teams/{team_id}/channels", - headers={"Authorization": f"Bearer {access_token}"}, - timeout=30.0, - ) - - if response.status_code != 200: - raise ValueError( - f"Failed to get channels for team {team_id}: {response.status_code} - {response.text}" - ) - - data = response.json() - return data.get("value", []) - - async def get_channel_messages( - self, - team_id: str, - channel_id: str, - start_date: datetime | None = None, - end_date: datetime | None = None, - ) -> list[dict[str, Any]]: - """ - Get messages from a specific channel with optional date filtering. - - Args: - team_id: The team ID - channel_id: The channel ID - start_date: Optional start date for filtering messages - end_date: Optional end date for filtering messages - - Returns: - List of message objects - """ - access_token = await self._get_valid_token() - - async with httpx.AsyncClient() as client: - url = f"{self.GRAPH_API_BASE}/teams/{team_id}/channels/{channel_id}/messages" - - # Note: The Graph API for channel messages doesn't support $filter parameter - # We fetch all messages and filter them client-side - response = await client.get( - url, - headers={"Authorization": f"Bearer {access_token}"}, - timeout=30.0, - ) - - if response.status_code != 200: - raise ValueError( - f"Failed to get messages from channel {channel_id}: {response.status_code} - {response.text}" - ) - - data = response.json() - messages = data.get("value", []) - - # Filter messages by date if needed (client-side filtering) - if start_date or end_date: - # Make sure comparison dates are timezone-aware (UTC) - if start_date and start_date.tzinfo is None: - start_date = start_date.replace(tzinfo=timezone.utc) - if end_date and end_date.tzinfo is None: - end_date = end_date.replace(tzinfo=timezone.utc) - - filtered_messages = [] - for message in messages: - created_at_str = message.get("createdDateTime") - if not created_at_str: - continue - - # Parse the ISO 8601 datetime string (already timezone-aware) - created_at = datetime.fromisoformat(created_at_str.replace('Z', '+00:00')) - - # Check if message is within date range - if start_date and created_at < start_date: - continue - if end_date and created_at > end_date: - continue - - filtered_messages.append(message) - - return filtered_messages - - return messages - - async def get_message_replies( - self, team_id: str, channel_id: str, message_id: str - ) -> list[dict[str, Any]]: - """ - Get replies to a specific message. - - Args: - team_id: The team ID - channel_id: The channel ID - message_id: The message ID - - Returns: - List of reply message objects - """ - access_token = await self._get_valid_token() - - async with httpx.AsyncClient() as client: - url = f"{self.GRAPH_API_BASE}/teams/{team_id}/channels/{channel_id}/messages/{message_id}/replies" - - response = await client.get( - url, - headers={"Authorization": f"Bearer {access_token}"}, - timeout=30.0, - ) - - if response.status_code != 200: - logger.warning( - "Failed to get replies for message %s: %s - %s", - message_id, - response.status_code, - response.text, - ) - return [] - - data = response.json() - return data.get("value", []) diff --git a/surfsense_backend/app/connectors/teams_history.py b/surfsense_backend/app/connectors/teams_history.py deleted file mode 100644 index 314ee6304..000000000 --- a/surfsense_backend/app/connectors/teams_history.py +++ /dev/null @@ -1,254 +0,0 @@ -""" -Microsoft Teams History Module - -A module for retrieving conversation history from Microsoft Teams channels. -Allows fetching team lists, channel lists, and message history with date range filtering. -""" - -import logging -from datetime import datetime -from typing import Any - -from sqlalchemy.ext.asyncio import AsyncSession - -from app.connectors.teams_connector import TeamsConnector -from app.schemas.teams_auth_credentials import TeamsAuthCredentialsBase - -logger = logging.getLogger(__name__) - - -class TeamsHistory: - """Class for retrieving conversation history from Microsoft Teams channels.""" - - def __init__( - self, - access_token: str | None = None, - session: AsyncSession | None = None, - connector_id: int | None = None, - credentials: TeamsAuthCredentialsBase | None = None, - ): - """ - Initialize the TeamsHistory class. - - Args: - access_token: Microsoft Graph API access token (optional, for backward compatibility) - session: Database session for token refresh (optional) - connector_id: Connector ID for token refresh (optional) - credentials: Teams OAuth credentials (optional, will be loaded from DB if not provided) - """ - self.connector = TeamsConnector( - access_token=access_token, - session=session, - connector_id=connector_id, - credentials=credentials, - ) - - async def get_all_teams(self) -> list[dict[str, Any]]: - """ - Get list of all teams the user has access to. - - Returns: - List of team objects containing team metadata. - """ - try: - teams = await self.connector.get_joined_teams() - logger.info("Retrieved %s teams", len(teams)) - return teams - except Exception as e: - logger.error("Error fetching teams: %s", str(e)) - raise - - async def get_channels_for_team(self, team_id: str) -> list[dict[str, Any]]: - """ - Get list of all channels in a specific team. - - Args: - team_id: The ID of the team - - Returns: - List of channel objects containing channel metadata. - """ - try: - channels = await self.connector.get_team_channels(team_id) - logger.info("Retrieved %s channels for team %s", len(channels), team_id) - return channels - except Exception as e: - logger.error("Error fetching channels for team %s: %s", team_id, str(e)) - raise - - async def get_messages_from_channel( - self, - team_id: str, - channel_id: str, - start_date: datetime | None = None, - end_date: datetime | None = None, - include_replies: bool = True, - ) -> list[dict[str, Any]]: - """ - Get messages from a specific channel with optional date filtering. - - Args: - team_id: The ID of the team - channel_id: The ID of the channel - start_date: Optional start date for filtering messages - end_date: Optional end date for filtering messages - include_replies: Whether to include reply messages (default: True) - - Returns: - List of message objects with content and metadata. - """ - try: - messages = await self.connector.get_channel_messages( - team_id, channel_id, start_date, end_date - ) - - logger.info( - "Retrieved %s messages from channel %s in team %s", - len(messages), - channel_id, - team_id, - ) - - # Fetch replies if requested - if include_replies: - all_messages = [] - for message in messages: - all_messages.append(message) - # Get replies for this message - try: - replies = await self.connector.get_message_replies( - team_id, channel_id, message.get("id") - ) - all_messages.extend(replies) - except Exception: - logger.warning( - "Failed to get replies for message %s", - message.get("id"), - exc_info=True, - ) - # Continue without replies for this message - - logger.info( - "Total messages including replies: %s for channel %s", - len(all_messages), - channel_id, - ) - return all_messages - - return messages - - except Exception as e: - logger.error( - "Error fetching messages from channel %s in team %s: %s", - channel_id, - team_id, - str(e), - ) - raise - - async def get_all_messages_from_team( - self, - team_id: str, - start_date: datetime | None = None, - end_date: datetime | None = None, - include_replies: bool = True, - ) -> dict[str, list[dict[str, Any]]]: - """ - Get all messages from all channels in a team. - - Args: - team_id: The ID of the team - start_date: Optional start date for filtering messages - end_date: Optional end date for filtering messages - include_replies: Whether to include reply messages (default: True) - - Returns: - Dictionary mapping channel IDs to lists of messages. - """ - try: - channels = await self.get_channels_for_team(team_id) - all_channel_messages = {} - - for channel in channels: - channel_id = channel.get("id") - channel_name = channel.get("displayName", "Unknown") - - try: - messages = await self.get_messages_from_channel( - team_id, channel_id, start_date, end_date, include_replies - ) - all_channel_messages[channel_id] = messages - logger.info( - "Fetched %s messages from channel '%s' (%s)", - len(messages), - channel_name, - channel_id, - ) - except Exception: - logger.error( - "Failed to fetch messages from channel '%s' (%s)", - channel_name, - channel_id, - exc_info=True, - ) - all_channel_messages[channel_id] = [] - - return all_channel_messages - - except Exception as e: - logger.error("Error fetching messages from team %s: %s", team_id, str(e)) - raise - - async def get_all_messages( - self, - start_date: datetime | None = None, - end_date: datetime | None = None, - include_replies: bool = True, - ) -> dict[str, dict[str, list[dict[str, Any]]]]: - """ - Get all messages from all teams and channels the user has access to. - - Args: - start_date: Optional start date for filtering messages - end_date: Optional end date for filtering messages - include_replies: Whether to include reply messages (default: True) - - Returns: - Nested dictionary: team_id -> channel_id -> list of messages. - """ - try: - teams = await self.get_all_teams() - all_messages = {} - - for team in teams: - team_id = team.get("id") - team_name = team.get("displayName", "Unknown") - - try: - team_messages = await self.get_all_messages_from_team( - team_id, start_date, end_date, include_replies - ) - all_messages[team_id] = team_messages - total_messages = sum( - len(messages) for messages in team_messages.values() - ) - logger.info( - "Fetched %s total messages from team '%s' (%s)", - total_messages, - team_name, - team_id, - ) - except Exception: - logger.error( - "Failed to fetch messages from team '%s' (%s)", - team_name, - team_id, - exc_info=True, - ) - all_messages[team_id] = {} - - return all_messages - - except Exception as e: - logger.error("Error fetching all messages: %s", str(e)) - raise diff --git a/surfsense_backend/app/db.py b/surfsense_backend/app/db.py index d54254f9c..fbd53bd06 100644 --- a/surfsense_backend/app/db.py +++ b/surfsense_backend/app/db.py @@ -36,7 +36,6 @@ class DocumentType(str, Enum): CRAWLED_URL = "CRAWLED_URL" FILE = "FILE" SLACK_CONNECTOR = "SLACK_CONNECTOR" - TEAMS_CONNECTOR = "TEAMS_CONNECTOR" NOTION_CONNECTOR = "NOTION_CONNECTOR" YOUTUBE_VIDEO = "YOUTUBE_VIDEO" GITHUB_CONNECTOR = "GITHUB_CONNECTOR" @@ -63,7 +62,6 @@ class SearchSourceConnectorType(str, Enum): LINKUP_API = "LINKUP_API" BAIDU_SEARCH_API = "BAIDU_SEARCH_API" # Baidu AI Search API for Chinese web search SLACK_CONNECTOR = "SLACK_CONNECTOR" - TEAMS_CONNECTOR = "TEAMS_CONNECTOR" NOTION_CONNECTOR = "NOTION_CONNECTOR" GITHUB_CONNECTOR = "GITHUB_CONNECTOR" LINEAR_CONNECTOR = "LINEAR_CONNECTOR" diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py index b4e94c732..47d540e7d 100644 --- a/surfsense_backend/app/routes/__init__.py +++ b/surfsense_backend/app/routes/__init__.py @@ -31,7 +31,6 @@ from .rbac_routes import router as rbac_router from .search_source_connectors_routes import router as search_source_connectors_router from .search_spaces_routes import router as search_spaces_router from .slack_add_connector_route import router as slack_add_connector_router -from .teams_add_connector_route import router as teams_add_connector_router router = APIRouter() @@ -51,7 +50,6 @@ router.include_router(linear_add_connector_router) router.include_router(luma_add_connector_router) router.include_router(notion_add_connector_router) router.include_router(slack_add_connector_router) -router.include_router(teams_add_connector_router) router.include_router(discord_add_connector_router) router.include_router(jira_add_connector_router) router.include_router(confluence_add_connector_router) diff --git a/surfsense_backend/app/routes/search_source_connectors_routes.py b/surfsense_backend/app/routes/search_source_connectors_routes.py index c9831484d..58a50a6f8 100644 --- a/surfsense_backend/app/routes/search_source_connectors_routes.py +++ b/surfsense_backend/app/routes/search_source_connectors_routes.py @@ -558,7 +558,6 @@ async def index_connector_content( Currently supports: - SLACK_CONNECTOR: Indexes messages from all accessible Slack channels - - TEAMS_CONNECTOR: Indexes messages from all accessible Microsoft Teams channels - NOTION_CONNECTOR: Indexes pages from all accessible Notion pages - GITHUB_CONNECTOR: Indexes code and documentation from GitHub repositories - LINEAR_CONNECTOR: Indexes issues and comments from Linear @@ -632,19 +631,6 @@ async def index_connector_content( ) response_message = "Slack indexing started in the background." - elif connector.connector_type == SearchSourceConnectorType.TEAMS_CONNECTOR: - from app.tasks.celery_tasks.connector_tasks import ( - index_teams_messages_task, - ) - - logger.info( - f"Triggering Teams indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" - ) - index_teams_messages_task.delay( - connector_id, search_space_id, str(user.id), indexing_from, indexing_to - ) - response_message = "Teams indexing started in the background." - elif connector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR: from app.tasks.celery_tasks.connector_tasks import index_notion_pages_task @@ -1202,64 +1188,6 @@ async def run_discord_indexing( logger.error(f"Error in background Discord indexing task: {e!s}") -async def run_teams_indexing_with_new_session( - connector_id: int, - search_space_id: int, - user_id: str, - start_date: str, - end_date: str, -): - """ - Create a new session and run the Microsoft Teams indexing task. - This prevents session leaks by creating a dedicated session for the background task. - """ - async with async_session_maker() as session: - await run_teams_indexing( - session, connector_id, search_space_id, user_id, start_date, end_date - ) - - -async def run_teams_indexing( - session: AsyncSession, - connector_id: int, - search_space_id: int, - user_id: str, - start_date: str, - end_date: str, -): - """ - Background task to run Microsoft Teams indexing. - Args: - session: Database session - connector_id: ID of the Teams connector - search_space_id: ID of the search space - user_id: ID of the user - start_date: Start date for indexing - end_date: End date for indexing - """ - try: - from app.tasks.connector_indexers.teams_indexer import index_teams_messages - - # Index Teams messages without updating last_indexed_at (we'll do it separately) - documents_processed, error_or_warning = await index_teams_messages( - session=session, - connector_id=connector_id, - search_space_id=search_space_id, - user_id=user_id, - start_date=start_date, - end_date=end_date, - update_last_indexed=False, # Don't update timestamp in the indexing function - ) - - # Update last_indexed_at after successful indexing (even if 0 new docs - they were checked) - await update_connector_last_indexed(session, connector_id) - logger.info( - f"Teams indexing completed successfully: {documents_processed} documents processed. {error_or_warning or ''}" - ) - except Exception as e: - logger.error(f"Error in background Teams indexing task: {e!s}") - - # Add new helper functions for Jira indexing async def run_jira_indexing_with_new_session( connector_id: int, diff --git a/surfsense_backend/app/routes/teams_add_connector_route.py b/surfsense_backend/app/routes/teams_add_connector_route.py deleted file mode 100644 index ce014be0d..000000000 --- a/surfsense_backend/app/routes/teams_add_connector_route.py +++ /dev/null @@ -1,474 +0,0 @@ -""" -Microsoft Teams Connector OAuth Routes. - -Handles OAuth 2.0 authentication flow for Microsoft Teams connector using Microsoft Graph API. -""" - -import logging -from datetime import UTC, datetime, timedelta -from uuid import UUID - -import httpx -from fastapi import APIRouter, Depends, HTTPException -from fastapi.responses import RedirectResponse -from sqlalchemy.exc import IntegrityError -from sqlalchemy.ext.asyncio import AsyncSession - -from app.config import config -from app.db import ( - SearchSourceConnector, - SearchSourceConnectorType, - User, - get_async_session, -) -from app.schemas.teams_auth_credentials import TeamsAuthCredentialsBase -from app.users import current_active_user -from app.utils.connector_naming import ( - check_duplicate_connector, - extract_identifier_from_credentials, - generate_unique_connector_name, -) -from app.utils.oauth_security import OAuthStateManager, TokenEncryption - -logger = logging.getLogger(__name__) - -router = APIRouter() - -# Microsoft identity platform endpoints -AUTHORIZATION_URL = "https://login.microsoftonline.com/common/oauth2/v2.0/authorize" -TOKEN_URL = "https://login.microsoftonline.com/common/oauth2/v2.0/token" - -# OAuth scopes for Microsoft Teams (Graph API) -SCOPES = [ - "offline_access", # Required for refresh tokens - "User.Read", # Read user profile - "Team.ReadBasic.All", # Read basic team information - "Channel.ReadBasic.All", # Read basic channel information - "ChannelMessage.Read.All", # Read messages in channels -] - -# Initialize security utilities -_state_manager = None -_token_encryption = None - - -def get_state_manager() -> OAuthStateManager: - """Get or create OAuth state manager instance.""" - global _state_manager - if _state_manager is None: - if not config.SECRET_KEY: - raise ValueError("SECRET_KEY must be set for OAuth security") - _state_manager = OAuthStateManager(config.SECRET_KEY) - return _state_manager - - -def get_token_encryption() -> TokenEncryption: - """Get or create token encryption instance.""" - global _token_encryption - if _token_encryption is None: - if not config.SECRET_KEY: - raise ValueError("SECRET_KEY must be set for token encryption") - _token_encryption = TokenEncryption(config.SECRET_KEY) - return _token_encryption - - -@router.get("/auth/teams/connector/add") -async def connect_teams(space_id: int, user: User = Depends(current_active_user)): - """ - Initiate Microsoft Teams OAuth flow. - - Args: - space_id: The search space ID - user: Current authenticated user - - Returns: - Authorization URL for redirect - """ - try: - if not space_id: - raise HTTPException(status_code=400, detail="space_id is required") - - if not config.TEAMS_CLIENT_ID: - raise HTTPException( - status_code=500, detail="Microsoft Teams OAuth not configured." - ) - - if not config.SECRET_KEY: - raise HTTPException( - status_code=500, detail="SECRET_KEY not configured for OAuth security." - ) - - # Generate secure state parameter with HMAC signature - state_manager = get_state_manager() - state_encoded = state_manager.generate_secure_state(space_id, user.id) - - # Build authorization URL - from urllib.parse import urlencode - - auth_params = { - "client_id": config.TEAMS_CLIENT_ID, - "response_type": "code", - "redirect_uri": config.TEAMS_REDIRECT_URI, - "response_mode": "query", - "scope": " ".join(SCOPES), - "state": state_encoded, - } - - auth_url = f"{AUTHORIZATION_URL}?{urlencode(auth_params)}" - - logger.info( - "Generated Microsoft Teams OAuth URL for user %s, space %s", - user.id, - space_id, - ) - return {"auth_url": auth_url} - - except Exception as e: - logger.error( - "Failed to initiate Microsoft Teams OAuth: %s", str(e), exc_info=True - ) - raise HTTPException( - status_code=500, - detail=f"Failed to initiate Microsoft Teams OAuth: {e!s}", - ) from e - - -@router.get("/auth/teams/connector/callback") -async def teams_callback( - code: str | None = None, - error: str | None = None, - error_description: str | None = None, - state: str | None = None, - session: AsyncSession = Depends(get_async_session), -): - """ - Handle Microsoft Teams OAuth callback. - - Args: - code: Authorization code from Microsoft (if user granted access) - error: Error code from Microsoft (if user denied access or error occurred) - error_description: Human-readable error description - state: State parameter containing user/space info - session: Database session - - Returns: - Redirect response to frontend - """ - try: - # Handle OAuth errors (e.g., user denied access) - if error: - error_msg = error_description or error - logger.warning("Microsoft Teams OAuth error: %s", error_msg) - redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=teams_auth_failed&message={error_msg}" - return RedirectResponse(url=redirect_url) - - # Validate required parameters - if not code or not state: - raise HTTPException( - status_code=400, detail="Missing required OAuth parameters" - ) - - # Verify and decode state parameter - state_manager = get_state_manager() - try: - data = state_manager.validate_state(state) - space_id = data["space_id"] - user_id = UUID(data["user_id"]) - except (HTTPException, ValueError, KeyError) as e: - logger.error("Invalid OAuth state: %s", str(e)) - redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=invalid_state" - return RedirectResponse(url=redirect_url) - - # Exchange authorization code for access token - token_data = { - "client_id": config.TEAMS_CLIENT_ID, - "client_secret": config.TEAMS_CLIENT_SECRET, - "code": code, - "redirect_uri": config.TEAMS_REDIRECT_URI, - "grant_type": "authorization_code", - } - - async with httpx.AsyncClient() as client: - token_response = await client.post( - TOKEN_URL, - data=token_data, - headers={"Content-Type": "application/x-www-form-urlencoded"}, - timeout=30.0, - ) - - if token_response.status_code != 200: - error_detail = token_response.text - try: - error_json = token_response.json() - error_detail = error_json.get("error_description", error_detail) - except Exception: - pass - raise HTTPException( - status_code=400, detail=f"Token exchange failed: {error_detail}" - ) - - token_json = token_response.json() - - # Extract tokens from response - access_token = token_json.get("access_token") - refresh_token = token_json.get("refresh_token") - - if not access_token: - raise HTTPException( - status_code=400, detail="No access token received from Microsoft" - ) - - # Encrypt sensitive tokens before storing - token_encryption = get_token_encryption() - - # Calculate expiration time (UTC, tz-aware) - expires_at = None - if token_json.get("expires_in"): - now_utc = datetime.now(UTC) - expires_at = now_utc + timedelta(seconds=int(token_json["expires_in"])) - - # Fetch user info from Microsoft Graph API - user_info = {} - tenant_info = {} - try: - async with httpx.AsyncClient() as client: - # Get user profile - user_response = await client.get( - "https://graph.microsoft.com/v1.0/me", - headers={"Authorization": f"Bearer {access_token}"}, - timeout=30.0, - ) - if user_response.status_code == 200: - user_data = user_response.json() - user_info = { - "user_id": user_data.get("id"), - "user_name": user_data.get("displayName"), - "user_email": user_data.get("mail") - or user_data.get("userPrincipalName"), - } - - # Get organization/tenant info - org_response = await client.get( - "https://graph.microsoft.com/v1.0/organization", - headers={"Authorization": f"Bearer {access_token}"}, - timeout=30.0, - ) - if org_response.status_code == 200: - org_data = org_response.json() - if org_data.get("value") and len(org_data["value"]) > 0: - org = org_data["value"][0] - tenant_info = { - "tenant_id": org.get("id"), - "tenant_name": org.get("displayName"), - } - except Exception as e: - logger.warning( - "Failed to fetch user/tenant info from Microsoft Graph: %s", str(e) - ) - - # Store the encrypted tokens and user/tenant info in connector config - connector_config = { - "access_token": token_encryption.encrypt_token(access_token), - "refresh_token": token_encryption.encrypt_token(refresh_token) - if refresh_token - else None, - "token_type": token_json.get("token_type", "Bearer"), - "expires_in": token_json.get("expires_in"), - "expires_at": expires_at.isoformat() if expires_at else None, - "scope": token_json.get("scope"), - "tenant_id": tenant_info.get("tenant_id"), - "tenant_name": tenant_info.get("tenant_name"), - "user_id": user_info.get("user_id"), - # Mark that token is encrypted for backward compatibility - "_token_encrypted": True, - } - - # Extract unique identifier from connector credentials - connector_identifier = extract_identifier_from_credentials( - SearchSourceConnectorType.TEAMS_CONNECTOR, connector_config - ) - - # Check for duplicate connector (same tenant already connected) - is_duplicate = await check_duplicate_connector( - session, - SearchSourceConnectorType.TEAMS_CONNECTOR, - space_id, - user_id, - connector_identifier, - ) - - if is_duplicate: - logger.warning( - "Duplicate Microsoft Teams connector for user %s, space %s, tenant %s", - user_id, - space_id, - tenant_info.get("tenant_name"), - ) - redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=duplicate_connector&message=This Microsoft Teams tenant is already connected to this space" - return RedirectResponse(url=redirect_url) - - # Generate unique connector name - connector_name = await generate_unique_connector_name( - session, - SearchSourceConnectorType.TEAMS_CONNECTOR, - space_id, - user_id, - connector_identifier, - ) - - # Create new connector - new_connector = SearchSourceConnector( - name=connector_name, - connector_type=SearchSourceConnectorType.TEAMS_CONNECTOR, - is_indexable=True, - config=connector_config, - search_space_id=space_id, - user_id=user_id, - ) - - try: - session.add(new_connector) - await session.commit() - await session.refresh(new_connector) - - logger.info( - "Successfully created Microsoft Teams connector %s for user %s", - new_connector.id, - user_id, - ) - - # Redirect to frontend with success - redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?success=teams_connected&connector_id={new_connector.id}" - return RedirectResponse(url=redirect_url) - - except IntegrityError as e: - await session.rollback() - logger.error("Database integrity error creating Teams connector: %s", str(e)) - redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=connector_creation_failed" - return RedirectResponse(url=redirect_url) - - except HTTPException: - raise - except (IntegrityError, ValueError) as e: - logger.error("Teams OAuth callback error: %s", str(e), exc_info=True) - redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=teams_auth_error" - return RedirectResponse(url=redirect_url) - - -async def refresh_teams_token( - session: AsyncSession, connector: SearchSourceConnector -) -> SearchSourceConnector: - """ - Refresh Microsoft Teams OAuth tokens. - - Args: - session: Database session - connector: The connector to refresh - - Returns: - Updated connector with refreshed tokens - - Raises: - HTTPException: If token refresh fails - """ - logger.info( - "Refreshing Microsoft Teams OAuth tokens for connector %s", connector.id - ) - - credentials = TeamsAuthCredentialsBase.from_dict(connector.config) - - # Decrypt tokens if they are encrypted - token_encryption = get_token_encryption() - is_encrypted = connector.config.get("_token_encrypted", False) - refresh_token = credentials.refresh_token - - if is_encrypted and refresh_token: - try: - refresh_token = token_encryption.decrypt_token(refresh_token) - except Exception as e: - logger.error("Failed to decrypt refresh token: %s", str(e)) - raise HTTPException( - status_code=500, detail="Failed to decrypt stored refresh token" - ) from e - - if not refresh_token: - raise HTTPException( - status_code=400, - detail=f"No refresh token available for connector {connector.id}", - ) - - # Microsoft uses oauth2/v2.0/token for token refresh - refresh_data = { - "client_id": config.TEAMS_CLIENT_ID, - "client_secret": config.TEAMS_CLIENT_SECRET, - "grant_type": "refresh_token", - "refresh_token": refresh_token, - "scope": " ".join(SCOPES), - } - - async with httpx.AsyncClient() as client: - token_response = await client.post( - TOKEN_URL, - data=refresh_data, - headers={"Content-Type": "application/x-www-form-urlencoded"}, - timeout=30.0, - ) - - if token_response.status_code != 200: - error_detail = token_response.text - try: - error_json = token_response.json() - error_detail = error_json.get("error_description", error_detail) - except Exception: - pass - raise HTTPException( - status_code=400, detail=f"Token refresh failed: {error_detail}" - ) - - token_json = token_response.json() - - # Extract new tokens - access_token = token_json.get("access_token") - new_refresh_token = token_json.get("refresh_token") - - if not access_token: - raise HTTPException( - status_code=400, detail="No access token received from Microsoft refresh" - ) - - # Calculate expiration time (UTC, tz-aware) - expires_at = None - expires_in = token_json.get("expires_in") - if expires_in: - now_utc = datetime.now(UTC) - expires_at = now_utc + timedelta(seconds=int(expires_in)) - - # Update credentials object with encrypted tokens - credentials.access_token = token_encryption.encrypt_token(access_token) - if new_refresh_token: - credentials.refresh_token = token_encryption.encrypt_token(new_refresh_token) - credentials.expires_in = expires_in - credentials.expires_at = expires_at - credentials.scope = token_json.get("scope") - - # Preserve tenant/user info - if not credentials.tenant_id: - credentials.tenant_id = connector.config.get("tenant_id") - if not credentials.tenant_name: - credentials.tenant_name = connector.config.get("tenant_name") - if not credentials.user_id: - credentials.user_id = connector.config.get("user_id") - - # Update connector config with encrypted tokens - credentials_dict = credentials.to_dict() - credentials_dict["_token_encrypted"] = True - connector.config = credentials_dict - - await session.commit() - await session.refresh(connector) - - logger.info( - "Successfully refreshed Microsoft Teams tokens for connector %s", connector.id - ) - - return connector diff --git a/surfsense_backend/app/schemas/teams_auth_credentials.py b/surfsense_backend/app/schemas/teams_auth_credentials.py deleted file mode 100644 index 41688b102..000000000 --- a/surfsense_backend/app/schemas/teams_auth_credentials.py +++ /dev/null @@ -1,79 +0,0 @@ -""" -Microsoft Teams OAuth credentials schema. -""" - -from datetime import UTC, datetime - -from pydantic import BaseModel, field_validator - - -class TeamsAuthCredentialsBase(BaseModel): - """Microsoft Teams OAuth credentials.""" - - access_token: str - refresh_token: str | None = None - token_type: str = "Bearer" - expires_in: int | None = None - expires_at: datetime | None = None - scope: str | None = None - tenant_id: str | None = None - tenant_name: str | None = None - user_id: str | None = None - - @property - def is_expired(self) -> bool: - """Check if the credentials have expired.""" - if self.expires_at is None: - return False - return self.expires_at <= datetime.now(UTC) - - @property - def is_refreshable(self) -> bool: - """Check if the credentials can be refreshed.""" - return self.refresh_token is not None - - def to_dict(self) -> dict: - """Convert credentials to dictionary for storage.""" - return { - "access_token": self.access_token, - "refresh_token": self.refresh_token, - "token_type": self.token_type, - "expires_in": self.expires_in, - "expires_at": self.expires_at.isoformat() if self.expires_at else None, - "scope": self.scope, - "tenant_id": self.tenant_id, - "tenant_name": self.tenant_name, - "user_id": self.user_id, - } - - @classmethod - def from_dict(cls, data: dict) -> "TeamsAuthCredentialsBase": - """Create credentials from dictionary.""" - expires_at = None - if data.get("expires_at"): - expires_at = datetime.fromisoformat(data["expires_at"]) - - return cls( - access_token=data.get("access_token", ""), - refresh_token=data.get("refresh_token"), - token_type=data.get("token_type", "Bearer"), - expires_in=data.get("expires_in"), - expires_at=expires_at, - scope=data.get("scope"), - tenant_id=data.get("tenant_id"), - tenant_name=data.get("tenant_name"), - user_id=data.get("user_id"), - ) - - @field_validator("expires_at", mode="before") - @classmethod - def ensure_aware_utc(cls, v): - """Ensure datetime is timezone-aware (UTC).""" - if isinstance(v, str): - if v.endswith("Z"): - return datetime.fromisoformat(v.replace("Z", "+00:00")) - dt = datetime.fromisoformat(v) - return dt if dt.tzinfo else dt.replace(tzinfo=UTC) - if isinstance(v, datetime): - return v if v.tzinfo else v.replace(tzinfo=UTC) - return v diff --git a/surfsense_backend/app/services/connector_service.py b/surfsense_backend/app/services/connector_service.py index 832aee4cc..4e874729c 100644 --- a/surfsense_backend/app/services/connector_service.py +++ b/surfsense_backend/app/services/connector_service.py @@ -2269,80 +2269,6 @@ class ConnectorService: return result_object, discord_docs - async def search_teams( - self, - user_query: str, - search_space_id: int, - top_k: int = 20, - start_date: datetime | None = None, - end_date: datetime | None = None, - ) -> tuple: - """ - Search for Microsoft Teams messages and return both the source information and langchain documents. - - Uses combined chunk-level and document-level hybrid search with RRF fusion. - - Args: - user_query: The user's query - search_space_id: The search space ID to search in - top_k: Maximum number of results to return - start_date: Optional start date for filtering documents by updated_at - end_date: Optional end date for filtering documents by updated_at - - Returns: - tuple: (sources_info, langchain_documents) - """ - teams_docs = await self._combined_rrf_search( - query_text=user_query, - search_space_id=search_space_id, - document_type="TEAMS_CONNECTOR", - top_k=top_k, - start_date=start_date, - end_date=end_date, - ) - - # Early return if no results - if not teams_docs: - return { - "id": 53, - "name": "Microsoft Teams", - "type": "TEAMS_CONNECTOR", - "sources": [], - }, [] - - def _title_fn(_doc_info: dict[str, Any], metadata: dict[str, Any]) -> str: - team_name = metadata.get("team_name", "Unknown Team") - channel_name = metadata.get("channel_name", "Unknown Channel") - message_date = metadata.get("start_date", "") - title = f"Teams: {team_name} - {channel_name}" - if message_date: - title += f" ({message_date})" - return title - - def _url_fn(_doc_info: dict[str, Any], metadata: dict[str, Any]) -> str: - team_id = metadata.get("team_id", "") - channel_id = metadata.get("channel_id", "") - if team_id and channel_id: - return f"https://teams.microsoft.com/l/channel/{channel_id}/General?groupId={team_id}" - return "" - - sources_list = self._build_chunk_sources_from_documents( - teams_docs, - title_fn=_title_fn, - url_fn=_url_fn, - description_fn=lambda chunk, _doc_info, _metadata: chunk.get("content", ""), - ) - - # Create result object - result_object = { - "id": 53, - "name": "Microsoft Teams", - "type": "TEAMS_CONNECTOR", - "sources": sources_list, - } - - return result_object, teams_docs - async def search_luma( self, user_query: str, diff --git a/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py b/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py index 1d1cbe361..3cae1bbdb 100644 --- a/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py +++ b/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py @@ -564,49 +564,6 @@ async def _index_discord_messages( ) -@celery_app.task(name="index_teams_messages", bind=True) -def index_teams_messages_task( - self, - connector_id: int, - search_space_id: int, - user_id: str, - start_date: str, - end_date: str, -): - """Celery task to index Microsoft Teams messages.""" - import asyncio - - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - - try: - loop.run_until_complete( - _index_teams_messages( - connector_id, search_space_id, user_id, start_date, end_date - ) - ) - finally: - loop.close() - - -async def _index_teams_messages( - connector_id: int, - search_space_id: int, - user_id: str, - start_date: str, - end_date: str, -): - """Index Microsoft Teams messages with new session.""" - from app.routes.search_source_connectors_routes import ( - run_teams_indexing, - ) - - async with get_celery_session_maker()() as session: - await run_teams_indexing( - session, connector_id, search_space_id, user_id, start_date, end_date - ) - - @celery_app.task(name="index_luma_events", bind=True) def index_luma_events_task( self, diff --git a/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py b/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py deleted file mode 100644 index c1e778768..000000000 --- a/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py +++ /dev/null @@ -1,473 +0,0 @@ -""" -Microsoft Teams connector indexer. -""" - -from sqlalchemy.exc import SQLAlchemyError -from sqlalchemy.ext.asyncio import AsyncSession - -from app.config import config -from app.connectors.teams_history import TeamsHistory -from app.db import Document, DocumentType, SearchSourceConnectorType -from app.services.task_logging_service import TaskLoggingService -from app.utils.document_converters import ( - create_document_chunks, - generate_content_hash, - generate_unique_identifier_hash, -) - -from .base import ( - build_document_metadata_markdown, - calculate_date_range, - check_document_by_unique_identifier, - get_connector_by_id, - get_current_timestamp, - logger, - update_connector_last_indexed, -) - - -async def index_teams_messages( - session: AsyncSession, - connector_id: int, - search_space_id: int, - user_id: str, - start_date: str | None = None, - end_date: str | None = None, - update_last_indexed: bool = True, -) -> tuple[int, str | None]: - """ - Index Microsoft Teams messages from all accessible teams and channels. - - Args: - session: Database session - connector_id: ID of the Teams connector - search_space_id: ID of the search space to store documents in - user_id: ID of the user - start_date: Start date for indexing (YYYY-MM-DD format) - end_date: End date for indexing (YYYY-MM-DD format) - update_last_indexed: Whether to update the last_indexed_at timestamp (default: True) - - Returns: - Tuple containing (number of documents indexed, error message or None) - """ - task_logger = TaskLoggingService(session, search_space_id) - - # Log task start - log_entry = await task_logger.log_task_start( - task_name="teams_messages_indexing", - source="connector_indexing_task", - message=f"Starting Microsoft Teams messages indexing for connector {connector_id}", - metadata={ - "connector_id": connector_id, - "user_id": str(user_id), - "start_date": start_date, - "end_date": end_date, - }, - ) - - try: - # Get the connector - await task_logger.log_task_progress( - log_entry, - f"Retrieving Teams connector {connector_id} from database", - {"stage": "connector_retrieval"}, - ) - - connector = await get_connector_by_id( - session, connector_id, SearchSourceConnectorType.TEAMS_CONNECTOR - ) - - if not connector: - await task_logger.log_task_failure( - log_entry, - f"Connector with ID {connector_id} not found or is not a Teams connector", - "Connector not found", - {"error_type": "ConnectorNotFound"}, - ) - return ( - 0, - f"Connector with ID {connector_id} not found or is not a Teams connector", - ) - - # Initialize Teams client with auto-refresh support - await task_logger.log_task_progress( - log_entry, - f"Initializing Teams client for connector {connector_id}", - {"stage": "client_initialization"}, - ) - - teams_client = TeamsHistory(session=session, connector_id=connector_id) - - # Handle 'undefined' string from frontend (treat as None) - if start_date == "undefined" or start_date == "": - start_date = None - if end_date == "undefined" or end_date == "": - end_date = None - - # Calculate date range - await task_logger.log_task_progress( - log_entry, - "Calculating date range for Teams indexing", - { - "stage": "date_calculation", - "provided_start_date": start_date, - "provided_end_date": end_date, - }, - ) - - start_date_str, end_date_str = calculate_date_range( - connector, start_date, end_date, default_days_back=365 - ) - - logger.info( - "Indexing Teams messages from %s to %s", start_date_str, end_date_str - ) - - await task_logger.log_task_progress( - log_entry, - f"Fetching Teams from {start_date_str} to {end_date_str}", - { - "stage": "fetch_teams", - "start_date": start_date_str, - "end_date": end_date_str, - }, - ) - - # Get all teams - try: - teams = await teams_client.get_all_teams() - except Exception as e: - await task_logger.log_task_failure( - log_entry, - f"Failed to get Teams for connector {connector_id}", - str(e), - {"error_type": "TeamsFetchError"}, - ) - return 0, f"Failed to get Teams: {e!s}" - - if not teams: - await task_logger.log_task_success( - log_entry, - f"No Teams found for connector {connector_id}", - {"teams_found": 0}, - ) - return 0, "No Teams found" - - # Track the number of documents indexed - documents_indexed = 0 - documents_skipped = 0 - skipped_channels = [] - - await task_logger.log_task_progress( - log_entry, - f"Starting to process {len(teams)} Teams", - {"stage": "process_teams", "total_teams": len(teams)}, - ) - - # Convert date strings to datetime objects for filtering - from datetime import datetime, timezone - - start_datetime = None - end_datetime = None - if start_date_str: - # Parse as naive datetime and make it timezone-aware (UTC) - start_datetime = datetime.strptime(start_date_str, "%Y-%m-%d").replace(tzinfo=timezone.utc) - if end_date_str: - # Parse as naive datetime, set to end of day, and make it timezone-aware (UTC) - end_datetime = datetime.strptime(end_date_str, "%Y-%m-%d").replace(hour=23, minute=59, second=59, tzinfo=timezone.utc) - - # Process each team - for team in teams: - team_id = team.get("id") - team_name = team.get("displayName", "Unknown Team") - - try: - # Get channels for this team - channels = await teams_client.get_channels_for_team(team_id) - - if not channels: - logger.info("No channels found in team %s", team_name) - continue - - # Process each channel in the team - for channel in channels: - channel_id = channel.get("id") - channel_name = channel.get("displayName", "Unknown Channel") - - try: - # Get messages for this channel - messages = await teams_client.get_messages_from_channel( - team_id, - channel_id, - start_datetime, - end_datetime, - include_replies=True, - ) - - if not messages: - logger.info( - "No messages found in channel %s of team %s for the specified date range.", - channel_name, - team_name, - ) - documents_skipped += 1 - continue - - # Process each message - for msg in messages: - # Skip deleted messages or empty content - if msg.get("deletedDateTime"): - continue - - # Extract message details - message_id = msg.get("id", "") - created_datetime = msg.get("createdDateTime", "") - from_user = msg.get("from", {}) - user_name = from_user.get("user", {}).get( - "displayName", "Unknown User" - ) - user_email = from_user.get("user", {}).get( - "userPrincipalName", "Unknown Email" - ) - - # Extract message content - body = msg.get("body", {}) - content_type = body.get("contentType", "text") - msg_text = body.get("content", "") - - # Skip empty messages - if not msg_text or msg_text.strip() == "": - continue - - # Format document metadata - metadata_sections = [ - ( - "METADATA", - [ - f"TEAM_NAME: {team_name}", - f"TEAM_ID: {team_id}", - f"CHANNEL_NAME: {channel_name}", - f"CHANNEL_ID: {channel_id}", - f"MESSAGE_TIMESTAMP: {created_datetime}", - f"MESSAGE_USER_NAME: {user_name}", - f"MESSAGE_USER_EMAIL: {user_email}", - f"CONTENT_TYPE: {content_type}", - ], - ), - ( - "CONTENT", - [ - f"FORMAT: {content_type}", - "TEXT_START", - msg_text, - "TEXT_END", - ], - ), - ] - - # Build the document string - combined_document_string = build_document_metadata_markdown( - metadata_sections - ) - - # Generate unique identifier hash for this Teams message - unique_identifier = f"{team_id}_{channel_id}_{message_id}" - unique_identifier_hash = generate_unique_identifier_hash( - DocumentType.TEAMS_CONNECTOR, - unique_identifier, - search_space_id, - ) - - # Generate content hash - content_hash = generate_content_hash( - combined_document_string, search_space_id - ) - - # Check if document with this unique identifier already exists - existing_document = ( - await check_document_by_unique_identifier( - session, unique_identifier_hash - ) - ) - - if existing_document: - # Document exists - check if content has changed - if existing_document.content_hash == content_hash: - logger.info( - "Document for Teams message %s in channel %s unchanged. Skipping.", - message_id, - channel_name, - ) - documents_skipped += 1 - continue - else: - # Content has changed - update the existing document - logger.info( - "Content changed for Teams message %s in channel %s. Updating document.", - message_id, - channel_name, - ) - - # Update chunks and embedding - chunks = await create_document_chunks( - combined_document_string - ) - doc_embedding = config.embedding_model_instance.embed( - combined_document_string - ) - - # Update existing document - existing_document.content = combined_document_string - existing_document.content_hash = content_hash - existing_document.embedding = doc_embedding - existing_document.document_metadata = { - "team_name": team_name, - "team_id": team_id, - "channel_name": channel_name, - "channel_id": channel_id, - "start_date": start_date_str, - "end_date": end_date_str, - "message_count": len(messages), - "indexed_at": datetime.now().strftime( - "%Y-%m-%d %H:%M:%S" - ), - } - - # Delete old chunks and add new ones - existing_document.chunks = chunks - existing_document.updated_at = get_current_timestamp() - - documents_indexed += 1 - logger.info( - "Successfully updated Teams message %s", message_id - ) - continue - - # Document doesn't exist - create new one - # Process chunks - chunks = await create_document_chunks( - combined_document_string - ) - doc_embedding = config.embedding_model_instance.embed( - combined_document_string - ) - - # Create and store new document - document = Document( - search_space_id=search_space_id, - title=f"Teams - {team_name} - {channel_name}", - document_type=DocumentType.TEAMS_CONNECTOR, - document_metadata={ - "team_name": team_name, - "team_id": team_id, - "channel_name": channel_name, - "channel_id": channel_id, - "start_date": start_date_str, - "end_date": end_date_str, - "message_count": len(messages), - "indexed_at": datetime.now().strftime( - "%Y-%m-%d %H:%M:%S" - ), - }, - content=combined_document_string, - embedding=doc_embedding, - chunks=chunks, - content_hash=content_hash, - unique_identifier_hash=unique_identifier_hash, - updated_at=get_current_timestamp(), - ) - - session.add(document) - documents_indexed += 1 - - # Batch commit every 10 documents - if documents_indexed % 10 == 0: - logger.info( - "Committing batch: %s Teams messages processed so far", - documents_indexed, - ) - await session.commit() - - logger.info( - "Successfully indexed channel %s in team %s with %s messages", - channel_name, - team_name, - len(messages), - ) - - except Exception as e: - logger.error( - "Error processing channel %s in team %s: %s", - channel_name, - team_name, - str(e), - ) - skipped_channels.append( - f"{team_name}/{channel_name} (processing error)" - ) - documents_skipped += 1 - continue - - except Exception as e: - logger.error("Error processing team %s: %s", team_name, str(e)) - continue - - # Update the last_indexed_at timestamp for the connector only if requested - # and if we successfully indexed at least one document - total_processed = documents_indexed - if total_processed > 0: - await update_connector_last_indexed(session, connector, update_last_indexed) - - # Final commit for any remaining documents not yet committed in batches - logger.info( - "Final commit: Total %s Teams messages processed", documents_indexed - ) - await session.commit() - - # Prepare result message - result_message = None - if skipped_channels: - result_message = f"Processed {total_processed} messages. Skipped {len(skipped_channels)} channels: {', '.join(skipped_channels)}" - else: - result_message = f"Processed {total_processed} messages." - - # Log success - await task_logger.log_task_success( - log_entry, - f"Successfully completed Teams indexing for connector {connector_id}", - { - "messages_processed": total_processed, - "documents_indexed": documents_indexed, - "documents_skipped": documents_skipped, - "skipped_channels_count": len(skipped_channels), - "result_message": result_message, - }, - ) - - logger.info( - "Teams indexing completed: %s new messages, %s skipped", - documents_indexed, - documents_skipped, - ) - return total_processed, result_message - - except SQLAlchemyError as db_error: - await session.rollback() - await task_logger.log_task_failure( - log_entry, - f"Database error during Teams indexing for connector {connector_id}", - str(db_error), - {"error_type": "SQLAlchemyError"}, - ) - logger.error("Database error: %s", str(db_error)) - return 0, f"Database error: {db_error!s}" - except Exception as e: - await session.rollback() - await task_logger.log_task_failure( - log_entry, - f"Failed to index Teams messages for connector {connector_id}", - str(e), - {"error_type": type(e).__name__}, - ) - logger.error("Failed to index Teams messages: %s", str(e)) - return 0, f"Failed to index Teams messages: {e!s}" diff --git a/surfsense_backend/app/utils/connector_naming.py b/surfsense_backend/app/utils/connector_naming.py index 731f419d6..f9f1fdd21 100644 --- a/surfsense_backend/app/utils/connector_naming.py +++ b/surfsense_backend/app/utils/connector_naming.py @@ -20,7 +20,6 @@ BASE_NAME_FOR_TYPE = { SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR: "Google Drive", SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR: "Google Calendar", SearchSourceConnectorType.SLACK_CONNECTOR: "Slack", - SearchSourceConnectorType.TEAMS_CONNECTOR: "Microsoft Teams", SearchSourceConnectorType.NOTION_CONNECTOR: "Notion", SearchSourceConnectorType.LINEAR_CONNECTOR: "Linear", SearchSourceConnectorType.JIRA_CONNECTOR: "Jira", @@ -54,9 +53,6 @@ def extract_identifier_from_credentials( if connector_type == SearchSourceConnectorType.SLACK_CONNECTOR: return credentials.get("team_name") - if connector_type == SearchSourceConnectorType.TEAMS_CONNECTOR: - return credentials.get("tenant_name") - if connector_type == SearchSourceConnectorType.NOTION_CONNECTOR: return credentials.get("workspace_name") diff --git a/surfsense_backend/app/utils/periodic_scheduler.py b/surfsense_backend/app/utils/periodic_scheduler.py index 219641933..c95f407a4 100644 --- a/surfsense_backend/app/utils/periodic_scheduler.py +++ b/surfsense_backend/app/utils/periodic_scheduler.py @@ -19,7 +19,6 @@ logger = logging.getLogger(__name__) # Mapping of connector types to their corresponding Celery task names CONNECTOR_TASK_MAP = { SearchSourceConnectorType.SLACK_CONNECTOR: "index_slack_messages", - SearchSourceConnectorType.TEAMS_CONNECTOR: "index_teams_messages", SearchSourceConnectorType.NOTION_CONNECTOR: "index_notion_pages", SearchSourceConnectorType.GITHUB_CONNECTOR: "index_github_repos", SearchSourceConnectorType.LINEAR_CONNECTOR: "index_linear_issues", diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx deleted file mode 100644 index ac08a6c03..000000000 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx +++ /dev/null @@ -1,29 +0,0 @@ -"use client"; - -import { Info } from "lucide-react"; -import type { FC } from "react"; -import type { ConnectorConfigProps } from "../index"; - -export interface TeamsConfigProps extends ConnectorConfigProps { - onNameChange?: (name: string) => void; -} - -export const TeamsConfig: FC = () => { - return ( -
-
-
- -
-
-

Microsoft Teams Access

-

- SurfSense will index messages from Teams channels that you have access to. The app can - only read messages from teams and channels where you are a member. Make sure you're a - member of the teams you want to index before connecting. -

-
-
-
- ); -}; diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/index.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/index.tsx index 267e85115..2575b3a69 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/index.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/index.tsx @@ -17,7 +17,6 @@ import { LumaConfig } from "./components/luma-config"; import { SearxngConfig } from "./components/searxng-config"; import { SlackConfig } from "./components/slack-config"; import { TavilyApiConfig } from "./components/tavily-api-config"; -import { TeamsConfig } from "./components/teams-config"; import { WebcrawlerConfig } from "./components/webcrawler-config"; export interface ConnectorConfigProps { @@ -53,8 +52,6 @@ export function getConnectorConfigComponent( return SlackConfig; case "DISCORD_CONNECTOR": return DiscordConfig; - case "TEAMS_CONNECTOR": - return TeamsConfig; case "CONFLUENCE_CONNECTOR": return ConfluenceConfig; case "BOOKSTACK_CONNECTOR": diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index 23982e6f3..287bc30f4 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -51,13 +51,6 @@ export const OAUTH_CONNECTORS = [ connectorType: EnumConnectorName.SLACK_CONNECTOR, authEndpoint: "/api/v1/auth/slack/connector/add/", }, - { - id: "teams-connector", - title: "Microsoft Teams", - description: "Search Teams messages", - connectorType: EnumConnectorName.TEAMS_CONNECTOR, - authEndpoint: "/api/v1/auth/teams/connector/add/", - }, { id: "discord-connector", title: "Discord", diff --git a/surfsense_web/components/assistant-ui/connector-popup/utils/connector-document-mapping.ts b/surfsense_web/components/assistant-ui/connector-popup/utils/connector-document-mapping.ts index 433a51e8c..a0b271eb6 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/utils/connector-document-mapping.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/utils/connector-document-mapping.ts @@ -11,7 +11,6 @@ export const CONNECTOR_TO_DOCUMENT_TYPE: Record = { // Direct mappings (connector type matches document type) SLACK_CONNECTOR: "SLACK_CONNECTOR", - TEAMS_CONNECTOR: "TEAMS_CONNECTOR", NOTION_CONNECTOR: "NOTION_CONNECTOR", GITHUB_CONNECTOR: "GITHUB_CONNECTOR", LINEAR_CONNECTOR: "LINEAR_CONNECTOR", diff --git a/surfsense_web/contracts/enums/connector.ts b/surfsense_web/contracts/enums/connector.ts index fc65585e2..ae80cf871 100644 --- a/surfsense_web/contracts/enums/connector.ts +++ b/surfsense_web/contracts/enums/connector.ts @@ -4,7 +4,6 @@ export enum EnumConnectorName { LINKUP_API = "LINKUP_API", BAIDU_SEARCH_API = "BAIDU_SEARCH_API", SLACK_CONNECTOR = "SLACK_CONNECTOR", - TEAMS_CONNECTOR = "TEAMS_CONNECTOR", NOTION_CONNECTOR = "NOTION_CONNECTOR", GITHUB_CONNECTOR = "GITHUB_CONNECTOR", LINEAR_CONNECTOR = "LINEAR_CONNECTOR", diff --git a/surfsense_web/contracts/enums/connectorIcons.tsx b/surfsense_web/contracts/enums/connectorIcons.tsx index befe132f9..22bc734aa 100644 --- a/surfsense_web/contracts/enums/connectorIcons.tsx +++ b/surfsense_web/contracts/enums/connectorIcons.tsx @@ -31,8 +31,6 @@ export const getConnectorIcon = (connectorType: EnumConnectorName | string, clas return Baidu; case EnumConnectorName.SLACK_CONNECTOR: return Slack; - case EnumConnectorName.TEAMS_CONNECTOR: - return Microsoft Teams; case EnumConnectorName.NOTION_CONNECTOR: return Notion; case EnumConnectorName.DISCORD_CONNECTOR: diff --git a/surfsense_web/contracts/types/connector.types.ts b/surfsense_web/contracts/types/connector.types.ts index f864ae16f..5b67297ae 100644 --- a/surfsense_web/contracts/types/connector.types.ts +++ b/surfsense_web/contracts/types/connector.types.ts @@ -8,7 +8,6 @@ export const searchSourceConnectorTypeEnum = z.enum([ "LINKUP_API", "BAIDU_SEARCH_API", "SLACK_CONNECTOR", - "TEAMS_CONNECTOR", "NOTION_CONNECTOR", "GITHUB_CONNECTOR", "LINEAR_CONNECTOR", From fa35b7152213b6f2c137272bb6c5bb84424a11b2 Mon Sep 17 00:00:00 2001 From: Manoj Aggarwal Date: Fri, 9 Jan 2026 13:20:30 -0800 Subject: [PATCH 21/28] Add teams connector similar to slack --- .vscode/settings.json | 3 +- surfsense_backend/.env.example | 5 + surfsense_backend/app/config/__init__.py | 5 + .../app/connectors/teams_connector.py | 323 ++++++++++++ .../app/connectors/teams_history.py | 254 ++++++++++ surfsense_backend/app/db.py | 2 + surfsense_backend/app/routes/__init__.py | 2 + .../routes/search_source_connectors_routes.py | 63 +++ .../app/routes/teams_add_connector_route.py | 473 ++++++++++++++++++ .../app/schemas/teams_auth_credentials.py | 79 +++ .../app/services/connector_service.py | 74 +++ .../app/tasks/celery_tasks/connector_tasks.py | 43 ++ .../tasks/connector_indexers/teams_indexer.py | 471 +++++++++++++++++ .../app/utils/connector_naming.py | 4 + .../app/utils/periodic_scheduler.py | 1 + .../components/teams-config.tsx | 29 ++ .../connector-configs/index.tsx | 3 + .../constants/connector-constants.ts | 7 + .../utils/connector-document-mapping.ts | 1 + surfsense_web/contracts/enums/connector.ts | 1 + .../contracts/enums/connectorIcons.tsx | 2 + 21 files changed, 1844 insertions(+), 1 deletion(-) create mode 100644 surfsense_backend/app/connectors/teams_connector.py create mode 100644 surfsense_backend/app/connectors/teams_history.py create mode 100644 surfsense_backend/app/routes/teams_add_connector_route.py create mode 100644 surfsense_backend/app/schemas/teams_auth_credentials.py create mode 100644 surfsense_backend/app/tasks/connector_indexers/teams_indexer.py create mode 100644 surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx diff --git a/.vscode/settings.json b/.vscode/settings.json index f134660b6..42d09dcad 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,3 +1,4 @@ { - "biome.configurationPath": "./surfsense_web/biome.json" + "biome.configurationPath": "./surfsense_web/biome.json", + "python-envs.pythonProjects": [] } \ No newline at end of file diff --git a/surfsense_backend/.env.example b/surfsense_backend/.env.example index 2c2fec48b..6ac7c55de 100644 --- a/surfsense_backend/.env.example +++ b/surfsense_backend/.env.example @@ -76,6 +76,11 @@ SLACK_CLIENT_ID=your_slack_client_id_here SLACK_CLIENT_SECRET=your_slack_client_secret_here SLACK_REDIRECT_URI=http://localhost:8000/api/v1/auth/slack/connector/callback +# Teams OAuth Configuration +TEAMS_CLIENT_ID=your_teams_client_id_here +TEAMS_CLIENT_SECRET=your_teams_client_secret_here +TEAMS_REDIRECT_URI=http://localhost:8000/api/v1/auth/teams/connector/callback + # Embedding Model # Examples: # # Get sentence transformers embeddings diff --git a/surfsense_backend/app/config/__init__.py b/surfsense_backend/app/config/__init__.py index e76e69e94..448e2c253 100644 --- a/surfsense_backend/app/config/__init__.py +++ b/surfsense_backend/app/config/__init__.py @@ -117,6 +117,11 @@ class Config: DISCORD_REDIRECT_URI = os.getenv("DISCORD_REDIRECT_URI") DISCORD_BOT_TOKEN = os.getenv("DISCORD_BOT_TOKEN") + # Microsoft Teams OAuth + TEAMS_CLIENT_ID = os.getenv("TEAMS_CLIENT_ID") + TEAMS_CLIENT_SECRET = os.getenv("TEAMS_CLIENT_SECRET") + TEAMS_REDIRECT_URI = os.getenv("TEAMS_REDIRECT_URI") + # ClickUp OAuth CLICKUP_CLIENT_ID = os.getenv("CLICKUP_CLIENT_ID") CLICKUP_CLIENT_SECRET = os.getenv("CLICKUP_CLIENT_SECRET") diff --git a/surfsense_backend/app/connectors/teams_connector.py b/surfsense_backend/app/connectors/teams_connector.py new file mode 100644 index 000000000..e11a2aad0 --- /dev/null +++ b/surfsense_backend/app/connectors/teams_connector.py @@ -0,0 +1,323 @@ +""" +Microsoft Teams Connector + +A module for interacting with Microsoft Teams Graph API to retrieve teams, channels, and message history. + +Supports OAuth-based authentication with token refresh. +""" + +import logging +from datetime import datetime +from typing import Any + +import httpx +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.config import config +from app.db import SearchSourceConnector +from app.routes.teams_add_connector_route import refresh_teams_token +from app.schemas.teams_auth_credentials import TeamsAuthCredentialsBase +from app.utils.oauth_security import TokenEncryption + +logger = logging.getLogger(__name__) + + +class TeamsConnector: + """Class for retrieving teams, channels, and message history from Microsoft Teams.""" + + # Microsoft Graph API endpoints + GRAPH_API_BASE = "https://graph.microsoft.com/v1.0" + + def __init__( + self, + access_token: str | None = None, + session: AsyncSession | None = None, + connector_id: int | None = None, + credentials: TeamsAuthCredentialsBase | None = None, + ): + """ + Initialize the TeamsConnector with an access token or OAuth credentials. + + Args: + access_token: Microsoft Graph API access token (optional, for backward compatibility) + session: Database session for token refresh (optional) + connector_id: Connector ID for token refresh (optional) + credentials: Teams OAuth credentials (optional, will be loaded from DB if not provided) + """ + self._session = session + self._connector_id = connector_id + self._credentials = credentials + self._access_token = access_token + + async def _get_valid_token(self) -> str: + """ + Get valid Microsoft Teams access token, refreshing if needed. + + Returns: + Valid access token + + Raises: + ValueError: If credentials are missing or invalid + Exception: If token refresh fails + """ + # If we have a direct token (backward compatibility), use it + if ( + self._access_token + and self._session is None + and self._connector_id is None + and self._credentials is None + ): + return self._access_token + + # Load credentials from DB if not provided + if self._credentials is None: + if not self._session or not self._connector_id: + raise ValueError( + "Cannot load credentials: session and connector_id required" + ) + + result = await self._session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == self._connector_id + ) + ) + connector = result.scalars().first() + + if not connector: + raise ValueError(f"Connector {self._connector_id} not found") + + config_data = connector.config.copy() + + # Decrypt credentials if they are encrypted + token_encrypted = config_data.get("_token_encrypted", False) + if token_encrypted and config.SECRET_KEY: + try: + token_encryption = TokenEncryption(config.SECRET_KEY) + + # Decrypt sensitive fields + if config_data.get("access_token"): + config_data["access_token"] = token_encryption.decrypt_token( + config_data["access_token"] + ) + if config_data.get("refresh_token"): + config_data["refresh_token"] = token_encryption.decrypt_token( + config_data["refresh_token"] + ) + + logger.info( + "Decrypted Teams credentials for connector %s", + self._connector_id, + ) + except Exception as e: + logger.error( + "Failed to decrypt Teams credentials for connector %s: %s", + self._connector_id, + str(e), + ) + raise ValueError( + f"Failed to decrypt Teams credentials: {e!s}" + ) from e + + try: + self._credentials = TeamsAuthCredentialsBase.from_dict(config_data) + except Exception as e: + raise ValueError(f"Invalid Teams credentials: {e!s}") from e + + # Check if token is expired and refreshable + if self._credentials.is_expired and self._credentials.is_refreshable: + try: + logger.info( + "Teams token expired for connector %s, refreshing...", + self._connector_id, + ) + + # Get connector for refresh + result = await self._session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == self._connector_id + ) + ) + connector = result.scalars().first() + + if not connector: + raise RuntimeError( + f"Connector {self._connector_id} not found; cannot refresh token." + ) + + # Refresh token + connector = await refresh_teams_token(self._session, connector) + + # Reload credentials after refresh + config_data = connector.config.copy() + token_encrypted = config_data.get("_token_encrypted", False) + if token_encrypted and config.SECRET_KEY: + token_encryption = TokenEncryption(config.SECRET_KEY) + if config_data.get("access_token"): + config_data["access_token"] = token_encryption.decrypt_token( + config_data["access_token"] + ) + if config_data.get("refresh_token"): + config_data["refresh_token"] = token_encryption.decrypt_token( + config_data["refresh_token"] + ) + + self._credentials = TeamsAuthCredentialsBase.from_dict(config_data) + + logger.info( + "Successfully refreshed Teams token for connector %s", + self._connector_id, + ) + except Exception as e: + logger.error( + "Failed to refresh Teams token for connector %s: %s", + self._connector_id, + str(e), + ) + raise ValueError( + f"Failed to refresh Teams OAuth credentials: {e!s}" + ) from e + + return self._credentials.access_token + + async def get_joined_teams(self) -> list[dict[str, Any]]: + """ + Get list of all teams the user is a member of. + + Returns: + List of team objects with id, display_name, etc. + """ + access_token = await self._get_valid_token() + + async with httpx.AsyncClient() as client: + response = await client.get( + f"{self.GRAPH_API_BASE}/me/joinedTeams", + headers={"Authorization": f"Bearer {access_token}"}, + timeout=30.0, + ) + + if response.status_code != 200: + raise ValueError( + f"Failed to get joined teams: {response.status_code} - {response.text}" + ) + + data = response.json() + return data.get("value", []) + + async def get_team_channels(self, team_id: str) -> list[dict[str, Any]]: + """ + Get list of all channels in a team. + + Args: + team_id: The team ID + + Returns: + List of channel objects + """ + access_token = await self._get_valid_token() + + async with httpx.AsyncClient() as client: + response = await client.get( + f"{self.GRAPH_API_BASE}/teams/{team_id}/channels", + headers={"Authorization": f"Bearer {access_token}"}, + timeout=30.0, + ) + + if response.status_code != 200: + raise ValueError( + f"Failed to get channels for team {team_id}: {response.status_code} - {response.text}" + ) + + data = response.json() + return data.get("value", []) + + async def get_channel_messages( + self, + team_id: str, + channel_id: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + ) -> list[dict[str, Any]]: + """ + Get messages from a specific channel with optional date filtering. + + Args: + team_id: The team ID + channel_id: The channel ID + start_date: Optional start date for filtering messages + end_date: Optional end date for filtering messages + + Returns: + List of message objects + """ + access_token = await self._get_valid_token() + + async with httpx.AsyncClient() as client: + url = f"{self.GRAPH_API_BASE}/teams/{team_id}/channels/{channel_id}/messages" + + # Build query parameters for date filtering if needed + params = {} + if start_date or end_date: + filter_parts = [] + if start_date: + filter_parts.append( + f"createdDateTime ge {start_date.strftime('%Y-%m-%dT%H:%M:%SZ')}" + ) + if end_date: + filter_parts.append( + f"createdDateTime le {end_date.strftime('%Y-%m-%dT%H:%M:%SZ')}" + ) + if filter_parts: + params["$filter"] = " and ".join(filter_parts) + + response = await client.get( + url, + headers={"Authorization": f"Bearer {access_token}"}, + params=params, + timeout=30.0, + ) + + if response.status_code != 200: + raise ValueError( + f"Failed to get messages from channel {channel_id}: {response.status_code} - {response.text}" + ) + + data = response.json() + return data.get("value", []) + + async def get_message_replies( + self, team_id: str, channel_id: str, message_id: str + ) -> list[dict[str, Any]]: + """ + Get replies to a specific message. + + Args: + team_id: The team ID + channel_id: The channel ID + message_id: The message ID + + Returns: + List of reply message objects + """ + access_token = await self._get_valid_token() + + async with httpx.AsyncClient() as client: + url = f"{self.GRAPH_API_BASE}/teams/{team_id}/channels/{channel_id}/messages/{message_id}/replies" + + response = await client.get( + url, + headers={"Authorization": f"Bearer {access_token}"}, + timeout=30.0, + ) + + if response.status_code != 200: + logger.warning( + "Failed to get replies for message %s: %s - %s", + message_id, + response.status_code, + response.text, + ) + return [] + + data = response.json() + return data.get("value", []) diff --git a/surfsense_backend/app/connectors/teams_history.py b/surfsense_backend/app/connectors/teams_history.py new file mode 100644 index 000000000..314ee6304 --- /dev/null +++ b/surfsense_backend/app/connectors/teams_history.py @@ -0,0 +1,254 @@ +""" +Microsoft Teams History Module + +A module for retrieving conversation history from Microsoft Teams channels. +Allows fetching team lists, channel lists, and message history with date range filtering. +""" + +import logging +from datetime import datetime +from typing import Any + +from sqlalchemy.ext.asyncio import AsyncSession + +from app.connectors.teams_connector import TeamsConnector +from app.schemas.teams_auth_credentials import TeamsAuthCredentialsBase + +logger = logging.getLogger(__name__) + + +class TeamsHistory: + """Class for retrieving conversation history from Microsoft Teams channels.""" + + def __init__( + self, + access_token: str | None = None, + session: AsyncSession | None = None, + connector_id: int | None = None, + credentials: TeamsAuthCredentialsBase | None = None, + ): + """ + Initialize the TeamsHistory class. + + Args: + access_token: Microsoft Graph API access token (optional, for backward compatibility) + session: Database session for token refresh (optional) + connector_id: Connector ID for token refresh (optional) + credentials: Teams OAuth credentials (optional, will be loaded from DB if not provided) + """ + self.connector = TeamsConnector( + access_token=access_token, + session=session, + connector_id=connector_id, + credentials=credentials, + ) + + async def get_all_teams(self) -> list[dict[str, Any]]: + """ + Get list of all teams the user has access to. + + Returns: + List of team objects containing team metadata. + """ + try: + teams = await self.connector.get_joined_teams() + logger.info("Retrieved %s teams", len(teams)) + return teams + except Exception as e: + logger.error("Error fetching teams: %s", str(e)) + raise + + async def get_channels_for_team(self, team_id: str) -> list[dict[str, Any]]: + """ + Get list of all channels in a specific team. + + Args: + team_id: The ID of the team + + Returns: + List of channel objects containing channel metadata. + """ + try: + channels = await self.connector.get_team_channels(team_id) + logger.info("Retrieved %s channels for team %s", len(channels), team_id) + return channels + except Exception as e: + logger.error("Error fetching channels for team %s: %s", team_id, str(e)) + raise + + async def get_messages_from_channel( + self, + team_id: str, + channel_id: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + include_replies: bool = True, + ) -> list[dict[str, Any]]: + """ + Get messages from a specific channel with optional date filtering. + + Args: + team_id: The ID of the team + channel_id: The ID of the channel + start_date: Optional start date for filtering messages + end_date: Optional end date for filtering messages + include_replies: Whether to include reply messages (default: True) + + Returns: + List of message objects with content and metadata. + """ + try: + messages = await self.connector.get_channel_messages( + team_id, channel_id, start_date, end_date + ) + + logger.info( + "Retrieved %s messages from channel %s in team %s", + len(messages), + channel_id, + team_id, + ) + + # Fetch replies if requested + if include_replies: + all_messages = [] + for message in messages: + all_messages.append(message) + # Get replies for this message + try: + replies = await self.connector.get_message_replies( + team_id, channel_id, message.get("id") + ) + all_messages.extend(replies) + except Exception: + logger.warning( + "Failed to get replies for message %s", + message.get("id"), + exc_info=True, + ) + # Continue without replies for this message + + logger.info( + "Total messages including replies: %s for channel %s", + len(all_messages), + channel_id, + ) + return all_messages + + return messages + + except Exception as e: + logger.error( + "Error fetching messages from channel %s in team %s: %s", + channel_id, + team_id, + str(e), + ) + raise + + async def get_all_messages_from_team( + self, + team_id: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + include_replies: bool = True, + ) -> dict[str, list[dict[str, Any]]]: + """ + Get all messages from all channels in a team. + + Args: + team_id: The ID of the team + start_date: Optional start date for filtering messages + end_date: Optional end date for filtering messages + include_replies: Whether to include reply messages (default: True) + + Returns: + Dictionary mapping channel IDs to lists of messages. + """ + try: + channels = await self.get_channels_for_team(team_id) + all_channel_messages = {} + + for channel in channels: + channel_id = channel.get("id") + channel_name = channel.get("displayName", "Unknown") + + try: + messages = await self.get_messages_from_channel( + team_id, channel_id, start_date, end_date, include_replies + ) + all_channel_messages[channel_id] = messages + logger.info( + "Fetched %s messages from channel '%s' (%s)", + len(messages), + channel_name, + channel_id, + ) + except Exception: + logger.error( + "Failed to fetch messages from channel '%s' (%s)", + channel_name, + channel_id, + exc_info=True, + ) + all_channel_messages[channel_id] = [] + + return all_channel_messages + + except Exception as e: + logger.error("Error fetching messages from team %s: %s", team_id, str(e)) + raise + + async def get_all_messages( + self, + start_date: datetime | None = None, + end_date: datetime | None = None, + include_replies: bool = True, + ) -> dict[str, dict[str, list[dict[str, Any]]]]: + """ + Get all messages from all teams and channels the user has access to. + + Args: + start_date: Optional start date for filtering messages + end_date: Optional end date for filtering messages + include_replies: Whether to include reply messages (default: True) + + Returns: + Nested dictionary: team_id -> channel_id -> list of messages. + """ + try: + teams = await self.get_all_teams() + all_messages = {} + + for team in teams: + team_id = team.get("id") + team_name = team.get("displayName", "Unknown") + + try: + team_messages = await self.get_all_messages_from_team( + team_id, start_date, end_date, include_replies + ) + all_messages[team_id] = team_messages + total_messages = sum( + len(messages) for messages in team_messages.values() + ) + logger.info( + "Fetched %s total messages from team '%s' (%s)", + total_messages, + team_name, + team_id, + ) + except Exception: + logger.error( + "Failed to fetch messages from team '%s' (%s)", + team_name, + team_id, + exc_info=True, + ) + all_messages[team_id] = {} + + return all_messages + + except Exception as e: + logger.error("Error fetching all messages: %s", str(e)) + raise diff --git a/surfsense_backend/app/db.py b/surfsense_backend/app/db.py index fbd53bd06..d54254f9c 100644 --- a/surfsense_backend/app/db.py +++ b/surfsense_backend/app/db.py @@ -36,6 +36,7 @@ class DocumentType(str, Enum): CRAWLED_URL = "CRAWLED_URL" FILE = "FILE" SLACK_CONNECTOR = "SLACK_CONNECTOR" + TEAMS_CONNECTOR = "TEAMS_CONNECTOR" NOTION_CONNECTOR = "NOTION_CONNECTOR" YOUTUBE_VIDEO = "YOUTUBE_VIDEO" GITHUB_CONNECTOR = "GITHUB_CONNECTOR" @@ -62,6 +63,7 @@ class SearchSourceConnectorType(str, Enum): LINKUP_API = "LINKUP_API" BAIDU_SEARCH_API = "BAIDU_SEARCH_API" # Baidu AI Search API for Chinese web search SLACK_CONNECTOR = "SLACK_CONNECTOR" + TEAMS_CONNECTOR = "TEAMS_CONNECTOR" NOTION_CONNECTOR = "NOTION_CONNECTOR" GITHUB_CONNECTOR = "GITHUB_CONNECTOR" LINEAR_CONNECTOR = "LINEAR_CONNECTOR" diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py index 47d540e7d..b4e94c732 100644 --- a/surfsense_backend/app/routes/__init__.py +++ b/surfsense_backend/app/routes/__init__.py @@ -31,6 +31,7 @@ from .rbac_routes import router as rbac_router from .search_source_connectors_routes import router as search_source_connectors_router from .search_spaces_routes import router as search_spaces_router from .slack_add_connector_route import router as slack_add_connector_router +from .teams_add_connector_route import router as teams_add_connector_router router = APIRouter() @@ -50,6 +51,7 @@ router.include_router(linear_add_connector_router) router.include_router(luma_add_connector_router) router.include_router(notion_add_connector_router) router.include_router(slack_add_connector_router) +router.include_router(teams_add_connector_router) router.include_router(discord_add_connector_router) router.include_router(jira_add_connector_router) router.include_router(confluence_add_connector_router) diff --git a/surfsense_backend/app/routes/search_source_connectors_routes.py b/surfsense_backend/app/routes/search_source_connectors_routes.py index 58a50a6f8..337e1af85 100644 --- a/surfsense_backend/app/routes/search_source_connectors_routes.py +++ b/surfsense_backend/app/routes/search_source_connectors_routes.py @@ -1188,6 +1188,69 @@ async def run_discord_indexing( logger.error(f"Error in background Discord indexing task: {e!s}") +async def run_teams_indexing_with_new_session( + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str, + end_date: str, +): + """ + Create a new session and run the Microsoft Teams indexing task. + This prevents session leaks by creating a dedicated session for the background task. + """ + async with async_session_maker() as session: + await run_teams_indexing( + session, connector_id, search_space_id, user_id, start_date, end_date + ) + + +async def run_teams_indexing( + session: AsyncSession, + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str, + end_date: str, +): + """ + Background task to run Microsoft Teams indexing. + Args: + session: Database session + connector_id: ID of the Teams connector + search_space_id: ID of the search space + user_id: ID of the user + start_date: Start date for indexing + end_date: End date for indexing + """ + try: + from app.tasks.connector_indexers.teams_indexer import index_teams_messages + + # Index Teams messages without updating last_indexed_at (we'll do it separately) + documents_processed, error_or_warning = await index_teams_messages( + session=session, + connector_id=connector_id, + search_space_id=search_space_id, + user_id=user_id, + start_date=start_date, + end_date=end_date, + update_last_indexed=False, # Don't update timestamp in the indexing function + ) + + # Only update last_indexed_at if indexing was successful (either new docs or updated docs) + if documents_processed > 0: + await update_connector_last_indexed(session, connector_id) + logger.info( + f"Teams indexing completed successfully: {documents_processed} documents processed" + ) + else: + logger.error( + f"Teams indexing failed or no documents processed: {error_or_warning}" + ) + except Exception as e: + logger.error(f"Error in background Teams indexing task: {e!s}") + + # Add new helper functions for Jira indexing async def run_jira_indexing_with_new_session( connector_id: int, diff --git a/surfsense_backend/app/routes/teams_add_connector_route.py b/surfsense_backend/app/routes/teams_add_connector_route.py new file mode 100644 index 000000000..a84db47c9 --- /dev/null +++ b/surfsense_backend/app/routes/teams_add_connector_route.py @@ -0,0 +1,473 @@ +""" +Microsoft Teams Connector OAuth Routes. + +Handles OAuth 2.0 authentication flow for Microsoft Teams connector using Microsoft Graph API. +""" + +import logging +from datetime import UTC, datetime, timedelta +from uuid import UUID + +import httpx +from fastapi import APIRouter, Depends, HTTPException +from fastapi.responses import RedirectResponse +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession + +from app.config import config +from app.db import ( + SearchSourceConnector, + SearchSourceConnectorType, + User, + get_async_session, +) +from app.schemas.teams_auth_credentials import TeamsAuthCredentialsBase +from app.users import current_active_user +from app.utils.connector_naming import ( + check_duplicate_connector, + extract_identifier_from_credentials, + generate_unique_connector_name, +) +from app.utils.oauth_security import OAuthStateManager, TokenEncryption + +logger = logging.getLogger(__name__) + +router = APIRouter() + +# Microsoft identity platform endpoints +AUTHORIZATION_URL = "https://login.microsoftonline.com/common/oauth2/v2.0/authorize" +TOKEN_URL = "https://login.microsoftonline.com/common/oauth2/v2.0/token" + +# OAuth scopes for Microsoft Teams (Graph API) +SCOPES = [ + "offline_access", # Required for refresh tokens + "User.Read", # Read user profile + "Team.ReadBasic.All", # Read basic team information + "Channel.ReadBasic.All", # Read basic channel information + "ChannelMessage.Read.All", # Read messages in channels +] + +# Initialize security utilities +_state_manager = None +_token_encryption = None + + +def get_state_manager() -> OAuthStateManager: + """Get or create OAuth state manager instance.""" + global _state_manager + if _state_manager is None: + if not config.SECRET_KEY: + raise ValueError("SECRET_KEY must be set for OAuth security") + _state_manager = OAuthStateManager(config.SECRET_KEY) + return _state_manager + + +def get_token_encryption() -> TokenEncryption: + """Get or create token encryption instance.""" + global _token_encryption + if _token_encryption is None: + if not config.SECRET_KEY: + raise ValueError("SECRET_KEY must be set for token encryption") + _token_encryption = TokenEncryption(config.SECRET_KEY) + return _token_encryption + + +@router.get("/auth/teams/connector/add") +async def connect_teams(space_id: int, user: User = Depends(current_active_user)): + """ + Initiate Microsoft Teams OAuth flow. + + Args: + space_id: The search space ID + user: Current authenticated user + + Returns: + Authorization URL for redirect + """ + try: + if not space_id: + raise HTTPException(status_code=400, detail="space_id is required") + + if not config.TEAMS_CLIENT_ID: + raise HTTPException( + status_code=500, detail="Microsoft Teams OAuth not configured." + ) + + if not config.SECRET_KEY: + raise HTTPException( + status_code=500, detail="SECRET_KEY not configured for OAuth security." + ) + + # Generate secure state parameter with HMAC signature + state_manager = get_state_manager() + state_encoded = state_manager.generate_secure_state(space_id, user.id) + + # Build authorization URL + from urllib.parse import urlencode + + auth_params = { + "client_id": config.TEAMS_CLIENT_ID, + "response_type": "code", + "redirect_uri": config.TEAMS_REDIRECT_URI, + "response_mode": "query", + "scope": " ".join(SCOPES), + "state": state_encoded, + } + + auth_url = f"{AUTHORIZATION_URL}?{urlencode(auth_params)}" + + logger.info( + "Generated Microsoft Teams OAuth URL for user %s, space %s", + user.id, + space_id, + ) + return {"auth_url": auth_url} + + except Exception as e: + logger.error( + "Failed to initiate Microsoft Teams OAuth: %s", str(e), exc_info=True + ) + raise HTTPException( + status_code=500, + detail=f"Failed to initiate Microsoft Teams OAuth: {e!s}", + ) from e + + +@router.get("/auth/teams/connector/callback") +async def teams_callback( + code: str | None = None, + error: str | None = None, + error_description: str | None = None, + state: str | None = None, + session: AsyncSession = Depends(get_async_session), +): + """ + Handle Microsoft Teams OAuth callback. + + Args: + code: Authorization code from Microsoft (if user granted access) + error: Error code from Microsoft (if user denied access or error occurred) + error_description: Human-readable error description + state: State parameter containing user/space info + session: Database session + + Returns: + Redirect response to frontend + """ + try: + # Handle OAuth errors (e.g., user denied access) + if error: + error_msg = error_description or error + logger.warning("Microsoft Teams OAuth error: %s", error_msg) + redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=teams_auth_failed&message={error_msg}" + return RedirectResponse(url=redirect_url) + + # Validate required parameters + if not code or not state: + raise HTTPException( + status_code=400, detail="Missing required OAuth parameters" + ) + + # Verify and decode state parameter + state_manager = get_state_manager() + try: + data = state_manager.validate_state(state) + space_id = data["space_id"] + user_id = UUID(data["user_id"]) + except (HTTPException, ValueError, KeyError) as e: + logger.error("Invalid OAuth state: %s", str(e)) + redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=invalid_state" + return RedirectResponse(url=redirect_url) + + # Exchange authorization code for access token + token_data = { + "client_id": config.TEAMS_CLIENT_ID, + "client_secret": config.TEAMS_CLIENT_SECRET, + "code": code, + "redirect_uri": config.TEAMS_REDIRECT_URI, + "grant_type": "authorization_code", + } + + async with httpx.AsyncClient() as client: + token_response = await client.post( + TOKEN_URL, + data=token_data, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + timeout=30.0, + ) + + if token_response.status_code != 200: + error_detail = token_response.text + try: + error_json = token_response.json() + error_detail = error_json.get("error_description", error_detail) + except Exception: + pass + raise HTTPException( + status_code=400, detail=f"Token exchange failed: {error_detail}" + ) + + token_json = token_response.json() + + # Extract tokens from response + access_token = token_json.get("access_token") + refresh_token = token_json.get("refresh_token") + + if not access_token: + raise HTTPException( + status_code=400, detail="No access token received from Microsoft" + ) + + # Encrypt sensitive tokens before storing + token_encryption = get_token_encryption() + + # Calculate expiration time (UTC, tz-aware) + expires_at = None + if token_json.get("expires_in"): + now_utc = datetime.now(UTC) + expires_at = now_utc + timedelta(seconds=int(token_json["expires_in"])) + + # Fetch user info from Microsoft Graph API + user_info = {} + tenant_info = {} + try: + async with httpx.AsyncClient() as client: + # Get user profile + user_response = await client.get( + "https://graph.microsoft.com/v1.0/me", + headers={"Authorization": f"Bearer {access_token}"}, + timeout=30.0, + ) + if user_response.status_code == 200: + user_data = user_response.json() + user_info = { + "user_id": user_data.get("id"), + "user_name": user_data.get("displayName"), + "user_email": user_data.get("mail") + or user_data.get("userPrincipalName"), + } + + # Get organization/tenant info + org_response = await client.get( + "https://graph.microsoft.com/v1.0/organization", + headers={"Authorization": f"Bearer {access_token}"}, + timeout=30.0, + ) + if org_response.status_code == 200: + org_data = org_response.json() + if org_data.get("value") and len(org_data["value"]) > 0: + org = org_data["value"][0] + tenant_info = { + "tenant_id": org.get("id"), + "tenant_name": org.get("displayName"), + } + except Exception as e: + logger.warning( + "Failed to fetch user/tenant info from Microsoft Graph: %s", str(e) + ) + + # Store the encrypted tokens and user/tenant info in connector config + connector_config = { + "access_token": token_encryption.encrypt_token(access_token), + "refresh_token": token_encryption.encrypt_token(refresh_token) + if refresh_token + else None, + "token_type": token_json.get("token_type", "Bearer"), + "expires_in": token_json.get("expires_in"), + "expires_at": expires_at.isoformat() if expires_at else None, + "scope": token_json.get("scope"), + "tenant_id": tenant_info.get("tenant_id"), + "tenant_name": tenant_info.get("tenant_name"), + "user_id": user_info.get("user_id"), + # Mark that token is encrypted for backward compatibility + "_token_encrypted": True, + } + + # Extract unique identifier from connector credentials + connector_identifier = extract_identifier_from_credentials( + SearchSourceConnectorType.TEAMS_CONNECTOR, connector_config + ) + + # Check for duplicate connector (same tenant already connected) + is_duplicate = await check_duplicate_connector( + session, + SearchSourceConnectorType.TEAMS_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + + if is_duplicate: + logger.warning( + "Duplicate Microsoft Teams connector for user %s, space %s, tenant %s", + user_id, + space_id, + tenant_info.get("tenant_name"), + ) + redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=duplicate_connector&message=This Microsoft Teams tenant is already connected to this space" + return RedirectResponse(url=redirect_url) + + # Generate unique connector name + connector_name = await generate_unique_connector_name( + session, + SearchSourceConnectorType.TEAMS_CONNECTOR, + space_id, + connector_config, + ) + + # Create new connector + new_connector = SearchSourceConnector( + connector_type=SearchSourceConnectorType.TEAMS_CONNECTOR, + config=connector_config, + is_enabled=True, + search_space_id=space_id, + user_id=user_id, + connector_name=connector_name, + ) + + try: + session.add(new_connector) + await session.commit() + await session.refresh(new_connector) + + logger.info( + "Successfully created Microsoft Teams connector %s for user %s", + new_connector.id, + user_id, + ) + + # Redirect to frontend with success + redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?success=teams_connected&connector_id={new_connector.id}" + return RedirectResponse(url=redirect_url) + + except IntegrityError as e: + await session.rollback() + logger.error("Database integrity error creating Teams connector: %s", str(e)) + redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=connector_creation_failed" + return RedirectResponse(url=redirect_url) + + except HTTPException: + raise + except (IntegrityError, ValueError) as e: + logger.error("Teams OAuth callback error: %s", str(e), exc_info=True) + redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=teams_auth_error" + return RedirectResponse(url=redirect_url) + + +async def refresh_teams_token( + session: AsyncSession, connector: SearchSourceConnector +) -> SearchSourceConnector: + """ + Refresh Microsoft Teams OAuth tokens. + + Args: + session: Database session + connector: The connector to refresh + + Returns: + Updated connector with refreshed tokens + + Raises: + HTTPException: If token refresh fails + """ + logger.info( + "Refreshing Microsoft Teams OAuth tokens for connector %s", connector.id + ) + + credentials = TeamsAuthCredentialsBase.from_dict(connector.config) + + # Decrypt tokens if they are encrypted + token_encryption = get_token_encryption() + is_encrypted = connector.config.get("_token_encrypted", False) + refresh_token = credentials.refresh_token + + if is_encrypted and refresh_token: + try: + refresh_token = token_encryption.decrypt_token(refresh_token) + except Exception as e: + logger.error("Failed to decrypt refresh token: %s", str(e)) + raise HTTPException( + status_code=500, detail="Failed to decrypt stored refresh token" + ) from e + + if not refresh_token: + raise HTTPException( + status_code=400, + detail=f"No refresh token available for connector {connector.id}", + ) + + # Microsoft uses oauth2/v2.0/token for token refresh + refresh_data = { + "client_id": config.TEAMS_CLIENT_ID, + "client_secret": config.TEAMS_CLIENT_SECRET, + "grant_type": "refresh_token", + "refresh_token": refresh_token, + "scope": " ".join(SCOPES), + } + + async with httpx.AsyncClient() as client: + token_response = await client.post( + TOKEN_URL, + data=refresh_data, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + timeout=30.0, + ) + + if token_response.status_code != 200: + error_detail = token_response.text + try: + error_json = token_response.json() + error_detail = error_json.get("error_description", error_detail) + except Exception: + pass + raise HTTPException( + status_code=400, detail=f"Token refresh failed: {error_detail}" + ) + + token_json = token_response.json() + + # Extract new tokens + access_token = token_json.get("access_token") + new_refresh_token = token_json.get("refresh_token") + + if not access_token: + raise HTTPException( + status_code=400, detail="No access token received from Microsoft refresh" + ) + + # Calculate expiration time (UTC, tz-aware) + expires_at = None + expires_in = token_json.get("expires_in") + if expires_in: + now_utc = datetime.now(UTC) + expires_at = now_utc + timedelta(seconds=int(expires_in)) + + # Update credentials object with encrypted tokens + credentials.access_token = token_encryption.encrypt_token(access_token) + if new_refresh_token: + credentials.refresh_token = token_encryption.encrypt_token(new_refresh_token) + credentials.expires_in = expires_in + credentials.expires_at = expires_at + credentials.scope = token_json.get("scope") + + # Preserve tenant/user info + if not credentials.tenant_id: + credentials.tenant_id = connector.config.get("tenant_id") + if not credentials.tenant_name: + credentials.tenant_name = connector.config.get("tenant_name") + if not credentials.user_id: + credentials.user_id = connector.config.get("user_id") + + # Update connector config with encrypted tokens + credentials_dict = credentials.to_dict() + credentials_dict["_token_encrypted"] = True + connector.config = credentials_dict + + await session.commit() + await session.refresh(connector) + + logger.info( + "Successfully refreshed Microsoft Teams tokens for connector %s", connector.id + ) + + return connector diff --git a/surfsense_backend/app/schemas/teams_auth_credentials.py b/surfsense_backend/app/schemas/teams_auth_credentials.py new file mode 100644 index 000000000..41688b102 --- /dev/null +++ b/surfsense_backend/app/schemas/teams_auth_credentials.py @@ -0,0 +1,79 @@ +""" +Microsoft Teams OAuth credentials schema. +""" + +from datetime import UTC, datetime + +from pydantic import BaseModel, field_validator + + +class TeamsAuthCredentialsBase(BaseModel): + """Microsoft Teams OAuth credentials.""" + + access_token: str + refresh_token: str | None = None + token_type: str = "Bearer" + expires_in: int | None = None + expires_at: datetime | None = None + scope: str | None = None + tenant_id: str | None = None + tenant_name: str | None = None + user_id: str | None = None + + @property + def is_expired(self) -> bool: + """Check if the credentials have expired.""" + if self.expires_at is None: + return False + return self.expires_at <= datetime.now(UTC) + + @property + def is_refreshable(self) -> bool: + """Check if the credentials can be refreshed.""" + return self.refresh_token is not None + + def to_dict(self) -> dict: + """Convert credentials to dictionary for storage.""" + return { + "access_token": self.access_token, + "refresh_token": self.refresh_token, + "token_type": self.token_type, + "expires_in": self.expires_in, + "expires_at": self.expires_at.isoformat() if self.expires_at else None, + "scope": self.scope, + "tenant_id": self.tenant_id, + "tenant_name": self.tenant_name, + "user_id": self.user_id, + } + + @classmethod + def from_dict(cls, data: dict) -> "TeamsAuthCredentialsBase": + """Create credentials from dictionary.""" + expires_at = None + if data.get("expires_at"): + expires_at = datetime.fromisoformat(data["expires_at"]) + + return cls( + access_token=data.get("access_token", ""), + refresh_token=data.get("refresh_token"), + token_type=data.get("token_type", "Bearer"), + expires_in=data.get("expires_in"), + expires_at=expires_at, + scope=data.get("scope"), + tenant_id=data.get("tenant_id"), + tenant_name=data.get("tenant_name"), + user_id=data.get("user_id"), + ) + + @field_validator("expires_at", mode="before") + @classmethod + def ensure_aware_utc(cls, v): + """Ensure datetime is timezone-aware (UTC).""" + if isinstance(v, str): + if v.endswith("Z"): + return datetime.fromisoformat(v.replace("Z", "+00:00")) + dt = datetime.fromisoformat(v) + return dt if dt.tzinfo else dt.replace(tzinfo=UTC) + if isinstance(v, datetime): + return v if v.tzinfo else v.replace(tzinfo=UTC) + return v diff --git a/surfsense_backend/app/services/connector_service.py b/surfsense_backend/app/services/connector_service.py index 4e874729c..832aee4cc 100644 --- a/surfsense_backend/app/services/connector_service.py +++ b/surfsense_backend/app/services/connector_service.py @@ -2269,6 +2269,80 @@ class ConnectorService: return result_object, discord_docs + async def search_teams( + self, + user_query: str, + search_space_id: int, + top_k: int = 20, + start_date: datetime | None = None, + end_date: datetime | None = None, + ) -> tuple: + """ + Search for Microsoft Teams messages and return both the source information and langchain documents. + + Uses combined chunk-level and document-level hybrid search with RRF fusion. + + Args: + user_query: The user's query + search_space_id: The search space ID to search in + top_k: Maximum number of results to return + start_date: Optional start date for filtering documents by updated_at + end_date: Optional end date for filtering documents by updated_at + + Returns: + tuple: (sources_info, langchain_documents) + """ + teams_docs = await self._combined_rrf_search( + query_text=user_query, + search_space_id=search_space_id, + document_type="TEAMS_CONNECTOR", + top_k=top_k, + start_date=start_date, + end_date=end_date, + ) + + # Early return if no results + if not teams_docs: + return { + "id": 53, + "name": "Microsoft Teams", + "type": "TEAMS_CONNECTOR", + "sources": [], + }, [] + + def _title_fn(_doc_info: dict[str, Any], metadata: dict[str, Any]) -> str: + team_name = metadata.get("team_name", "Unknown Team") + channel_name = metadata.get("channel_name", "Unknown Channel") + message_date = metadata.get("start_date", "") + title = f"Teams: {team_name} - {channel_name}" + if message_date: + title += f" ({message_date})" + return title + + def _url_fn(_doc_info: dict[str, Any], metadata: dict[str, Any]) -> str: + team_id = metadata.get("team_id", "") + channel_id = metadata.get("channel_id", "") + if team_id and channel_id: + return f"https://teams.microsoft.com/l/channel/{channel_id}/General?groupId={team_id}" + return "" + + sources_list = self._build_chunk_sources_from_documents( + teams_docs, + title_fn=_title_fn, + url_fn=_url_fn, + description_fn=lambda chunk, _doc_info, _metadata: chunk.get("content", ""), + ) + + # Create result object + result_object = { + "id": 53, + "name": "Microsoft Teams", + "type": "TEAMS_CONNECTOR", + "sources": sources_list, + } + + return result_object, teams_docs + async def search_luma( self, user_query: str, diff --git a/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py b/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py index 3cae1bbdb..1d1cbe361 100644 --- a/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py +++ b/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py @@ -564,6 +564,49 @@ async def _index_discord_messages( ) +@celery_app.task(name="index_teams_messages", bind=True) +def index_teams_messages_task( + self, + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str, + end_date: str, +): + """Celery task to index Microsoft Teams messages.""" + import asyncio + + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + try: + loop.run_until_complete( + _index_teams_messages( + connector_id, search_space_id, user_id, start_date, end_date + ) + ) + finally: + loop.close() + + +async def _index_teams_messages( + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str, + end_date: str, +): + """Index Microsoft Teams messages with new session.""" + from app.routes.search_source_connectors_routes import ( + run_teams_indexing, + ) + + async with get_celery_session_maker()() as session: + await run_teams_indexing( + session, connector_id, search_space_id, user_id, start_date, end_date + ) + + @celery_app.task(name="index_luma_events", bind=True) def index_luma_events_task( self, diff --git a/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py b/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py new file mode 100644 index 000000000..4fb4d719d --- /dev/null +++ b/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py @@ -0,0 +1,471 @@ +""" +Microsoft Teams connector indexer. +""" + +from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy.ext.asyncio import AsyncSession + +from app.config import config +from app.connectors.teams_history import TeamsHistory +from app.db import Document, DocumentType, SearchSourceConnectorType +from app.services.task_logging_service import TaskLoggingService +from app.utils.document_converters import ( + create_document_chunks, + generate_content_hash, + generate_unique_identifier_hash, +) + +from .base import ( + build_document_metadata_markdown, + calculate_date_range, + check_document_by_unique_identifier, + get_connector_by_id, + get_current_timestamp, + logger, + update_connector_last_indexed, +) + + +async def index_teams_messages( + session: AsyncSession, + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str | None = None, + end_date: str | None = None, + update_last_indexed: bool = True, +) -> tuple[int, str | None]: + """ + Index Microsoft Teams messages from all accessible teams and channels. + + Args: + session: Database session + connector_id: ID of the Teams connector + search_space_id: ID of the search space to store documents in + user_id: ID of the user + start_date: Start date for indexing (YYYY-MM-DD format) + end_date: End date for indexing (YYYY-MM-DD format) + update_last_indexed: Whether to update the last_indexed_at timestamp (default: True) + + Returns: + Tuple containing (number of documents indexed, error message or None) + """ + task_logger = TaskLoggingService(session, search_space_id) + + # Log task start + log_entry = await task_logger.log_task_start( + task_name="teams_messages_indexing", + source="connector_indexing_task", + message=f"Starting Microsoft Teams messages indexing for connector {connector_id}", + metadata={ + "connector_id": connector_id, + "user_id": str(user_id), + "start_date": start_date, + "end_date": end_date, + }, + ) + + try: + # Get the connector + await task_logger.log_task_progress( + log_entry, + f"Retrieving Teams connector {connector_id} from database", + {"stage": "connector_retrieval"}, + ) + + connector = await get_connector_by_id( + session, connector_id, SearchSourceConnectorType.TEAMS_CONNECTOR + ) + + if not connector: + await task_logger.log_task_failure( + log_entry, + f"Connector with ID {connector_id} not found or is not a Teams connector", + "Connector not found", + {"error_type": "ConnectorNotFound"}, + ) + return ( + 0, + f"Connector with ID {connector_id} not found or is not a Teams connector", + ) + + # Initialize Teams client with auto-refresh support + await task_logger.log_task_progress( + log_entry, + f"Initializing Teams client for connector {connector_id}", + {"stage": "client_initialization"}, + ) + + teams_client = TeamsHistory(session=session, connector_id=connector_id) + + # Handle 'undefined' string from frontend (treat as None) + if start_date == "undefined" or start_date == "": + start_date = None + if end_date == "undefined" or end_date == "": + end_date = None + + # Calculate date range + await task_logger.log_task_progress( + log_entry, + "Calculating date range for Teams indexing", + { + "stage": "date_calculation", + "provided_start_date": start_date, + "provided_end_date": end_date, + }, + ) + + start_date_str, end_date_str = calculate_date_range( + connector, start_date, end_date, default_days_back=365 + ) + + logger.info( + "Indexing Teams messages from %s to %s", start_date_str, end_date_str + ) + + await task_logger.log_task_progress( + log_entry, + f"Fetching Teams from {start_date_str} to {end_date_str}", + { + "stage": "fetch_teams", + "start_date": start_date_str, + "end_date": end_date_str, + }, + ) + + # Get all teams + try: + teams = await teams_client.get_all_teams() + except Exception as e: + await task_logger.log_task_failure( + log_entry, + f"Failed to get Teams for connector {connector_id}", + str(e), + {"error_type": "TeamsFetchError"}, + ) + return 0, f"Failed to get Teams: {e!s}" + + if not teams: + await task_logger.log_task_success( + log_entry, + f"No Teams found for connector {connector_id}", + {"teams_found": 0}, + ) + return 0, "No Teams found" + + # Track the number of documents indexed + documents_indexed = 0 + documents_skipped = 0 + skipped_channels = [] + + await task_logger.log_task_progress( + log_entry, + f"Starting to process {len(teams)} Teams", + {"stage": "process_teams", "total_teams": len(teams)}, + ) + + # Convert date strings to datetime objects for filtering + from datetime import datetime + + start_datetime = None + end_datetime = None + if start_date_str: + start_datetime = datetime.strptime(start_date_str, "%Y-%m-%d") + if end_date_str: + end_datetime = datetime.strptime(end_date_str, "%Y-%m-%d") + + # Process each team + for team in teams: + team_id = team.get("id") + team_name = team.get("displayName", "Unknown Team") + + try: + # Get channels for this team + channels = await teams_client.get_channels_for_team(team_id) + + if not channels: + logger.info("No channels found in team %s", team_name) + continue + + # Process each channel in the team + for channel in channels: + channel_id = channel.get("id") + channel_name = channel.get("displayName", "Unknown Channel") + + try: + # Get messages for this channel + messages = await teams_client.get_messages_from_channel( + team_id, + channel_id, + start_datetime, + end_datetime, + include_replies=True, + ) + + if not messages: + logger.info( + "No messages found in channel %s of team %s for the specified date range.", + channel_name, + team_name, + ) + documents_skipped += 1 + continue + + # Process each message + for msg in messages: + # Skip deleted messages or empty content + if msg.get("deletedDateTime"): + continue + + # Extract message details + message_id = msg.get("id", "") + created_datetime = msg.get("createdDateTime", "") + from_user = msg.get("from", {}) + user_name = from_user.get("user", {}).get( + "displayName", "Unknown User" + ) + user_email = from_user.get("user", {}).get( + "userPrincipalName", "Unknown Email" + ) + + # Extract message content + body = msg.get("body", {}) + content_type = body.get("contentType", "text") + msg_text = body.get("content", "") + + # Skip empty messages + if not msg_text or msg_text.strip() == "": + continue + + # Format document metadata + metadata_sections = [ + ( + "METADATA", + [ + f"TEAM_NAME: {team_name}", + f"TEAM_ID: {team_id}", + f"CHANNEL_NAME: {channel_name}", + f"CHANNEL_ID: {channel_id}", + f"MESSAGE_TIMESTAMP: {created_datetime}", + f"MESSAGE_USER_NAME: {user_name}", + f"MESSAGE_USER_EMAIL: {user_email}", + f"CONTENT_TYPE: {content_type}", + ], + ), + ( + "CONTENT", + [ + f"FORMAT: {content_type}", + "TEXT_START", + msg_text, + "TEXT_END", + ], + ), + ] + + # Build the document string + combined_document_string = build_document_metadata_markdown( + metadata_sections + ) + + # Generate unique identifier hash for this Teams message + unique_identifier = f"{team_id}_{channel_id}_{message_id}" + unique_identifier_hash = generate_unique_identifier_hash( + DocumentType.TEAMS_CONNECTOR, + unique_identifier, + search_space_id, + ) + + # Generate content hash + content_hash = generate_content_hash( + combined_document_string, search_space_id + ) + + # Check if document with this unique identifier already exists + existing_document = ( + await check_document_by_unique_identifier( + session, unique_identifier_hash + ) + ) + + if existing_document: + # Document exists - check if content has changed + if existing_document.content_hash == content_hash: + logger.info( + "Document for Teams message %s in channel %s unchanged. Skipping.", + message_id, + channel_name, + ) + documents_skipped += 1 + continue + else: + # Content has changed - update the existing document + logger.info( + "Content changed for Teams message %s in channel %s. Updating document.", + message_id, + channel_name, + ) + + # Update chunks and embedding + chunks = await create_document_chunks( + combined_document_string + ) + doc_embedding = config.embedding_model_instance.embed( + combined_document_string + ) + + # Update existing document + existing_document.content = combined_document_string + existing_document.content_hash = content_hash + existing_document.embedding = doc_embedding + existing_document.document_metadata = { + "team_name": team_name, + "team_id": team_id, + "channel_name": channel_name, + "channel_id": channel_id, + "start_date": start_date_str, + "end_date": end_date_str, + "message_count": len(messages), + "indexed_at": datetime.now().strftime( + "%Y-%m-%d %H:%M:%S" + ), + } + + # Delete old chunks and add new ones + existing_document.chunks = chunks + existing_document.updated_at = get_current_timestamp() + + documents_indexed += 1 + logger.info( + "Successfully updated Teams message %s", message_id + ) + continue + + # Document doesn't exist - create new one + # Process chunks + chunks = await create_document_chunks( + combined_document_string + ) + doc_embedding = config.embedding_model_instance.embed( + combined_document_string + ) + + # Create and store new document + document = Document( + search_space_id=search_space_id, + title=f"Teams - {team_name} - {channel_name}", + document_type=DocumentType.TEAMS_CONNECTOR, + document_metadata={ + "team_name": team_name, + "team_id": team_id, + "channel_name": channel_name, + "channel_id": channel_id, + "start_date": start_date_str, + "end_date": end_date_str, + "message_count": len(messages), + "indexed_at": datetime.now().strftime( + "%Y-%m-%d %H:%M:%S" + ), + }, + content=combined_document_string, + embedding=doc_embedding, + chunks=chunks, + content_hash=content_hash, + unique_identifier_hash=unique_identifier_hash, + updated_at=get_current_timestamp(), + ) + + session.add(document) + documents_indexed += 1 + + # Batch commit every 10 documents + if documents_indexed % 10 == 0: + logger.info( + "Committing batch: %s Teams messages processed so far", + documents_indexed, + ) + await session.commit() + + logger.info( + "Successfully indexed channel %s in team %s with %s messages", + channel_name, + team_name, + len(messages), + ) + + except Exception as e: + logger.error( + "Error processing channel %s in team %s: %s", + channel_name, + team_name, + str(e), + ) + skipped_channels.append( + f"{team_name}/{channel_name} (processing error)" + ) + documents_skipped += 1 + continue + + except Exception as e: + logger.error("Error processing team %s: %s", team_name, str(e)) + continue + + # Update the last_indexed_at timestamp for the connector only if requested + # and if we successfully indexed at least one document + total_processed = documents_indexed + if total_processed > 0: + await update_connector_last_indexed(session, connector, update_last_indexed) + + # Final commit for any remaining documents not yet committed in batches + logger.info( + "Final commit: Total %s Teams messages processed", documents_indexed + ) + await session.commit() + + # Prepare result message + result_message = None + if skipped_channels: + result_message = f"Processed {total_processed} messages. Skipped {len(skipped_channels)} channels: {', '.join(skipped_channels)}" + else: + result_message = f"Processed {total_processed} messages." + + # Log success + await task_logger.log_task_success( + log_entry, + f"Successfully completed Teams indexing for connector {connector_id}", + { + "messages_processed": total_processed, + "documents_indexed": documents_indexed, + "documents_skipped": documents_skipped, + "skipped_channels_count": len(skipped_channels), + "result_message": result_message, + }, + ) + + logger.info( + "Teams indexing completed: %s new messages, %s skipped", + documents_indexed, + documents_skipped, + ) + return total_processed, result_message + + except SQLAlchemyError as db_error: + await session.rollback() + await task_logger.log_task_failure( + log_entry, + f"Database error during Teams indexing for connector {connector_id}", + str(db_error), + {"error_type": "SQLAlchemyError"}, + ) + logger.error("Database error: %s", str(db_error)) + return 0, f"Database error: {db_error!s}" + except Exception as e: + await session.rollback() + await task_logger.log_task_failure( + log_entry, + f"Failed to index Teams messages for connector {connector_id}", + str(e), + {"error_type": type(e).__name__}, + ) + logger.error("Failed to index Teams messages: %s", str(e)) + return 0, f"Failed to index Teams messages: {e!s}" diff --git a/surfsense_backend/app/utils/connector_naming.py b/surfsense_backend/app/utils/connector_naming.py index f9f1fdd21..731f419d6 100644 --- a/surfsense_backend/app/utils/connector_naming.py +++ b/surfsense_backend/app/utils/connector_naming.py @@ -20,6 +20,7 @@ BASE_NAME_FOR_TYPE = { SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR: "Google Drive", SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR: "Google Calendar", SearchSourceConnectorType.SLACK_CONNECTOR: "Slack", + SearchSourceConnectorType.TEAMS_CONNECTOR: "Microsoft Teams", SearchSourceConnectorType.NOTION_CONNECTOR: "Notion", SearchSourceConnectorType.LINEAR_CONNECTOR: "Linear", SearchSourceConnectorType.JIRA_CONNECTOR: "Jira", @@ -53,6 +54,9 @@ def extract_identifier_from_credentials( if connector_type == SearchSourceConnectorType.SLACK_CONNECTOR: return credentials.get("team_name") + if connector_type == SearchSourceConnectorType.TEAMS_CONNECTOR: + return credentials.get("tenant_name") + if connector_type == SearchSourceConnectorType.NOTION_CONNECTOR: return credentials.get("workspace_name") diff --git a/surfsense_backend/app/utils/periodic_scheduler.py b/surfsense_backend/app/utils/periodic_scheduler.py index c95f407a4..219641933 100644 --- a/surfsense_backend/app/utils/periodic_scheduler.py +++ b/surfsense_backend/app/utils/periodic_scheduler.py @@ -19,6 +19,7 @@ logger = logging.getLogger(__name__) # Mapping of connector types to their corresponding Celery task names CONNECTOR_TASK_MAP = { SearchSourceConnectorType.SLACK_CONNECTOR: "index_slack_messages", + SearchSourceConnectorType.TEAMS_CONNECTOR: "index_teams_messages", SearchSourceConnectorType.NOTION_CONNECTOR: "index_notion_pages", SearchSourceConnectorType.GITHUB_CONNECTOR: "index_github_repos", SearchSourceConnectorType.LINEAR_CONNECTOR: "index_linear_issues", diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx new file mode 100644 index 000000000..ac08a6c03 --- /dev/null +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx @@ -0,0 +1,29 @@ +"use client"; + +import { Info } from "lucide-react"; +import type { FC } from "react"; +import type { ConnectorConfigProps } from "../index"; + +export interface TeamsConfigProps extends ConnectorConfigProps { + onNameChange?: (name: string) => void; +} + +export const TeamsConfig: FC = () => { + return ( +
+
+
+ +
+
+

Microsoft Teams Access

+

+ SurfSense will index messages from Teams channels that you have access to. The app can + only read messages from teams and channels where you are a member. Make sure you're a + member of the teams you want to index before connecting. +

+
+
+
+ ); +}; diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/index.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/index.tsx index 2575b3a69..267e85115 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/index.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/index.tsx @@ -17,6 +17,7 @@ import { LumaConfig } from "./components/luma-config"; import { SearxngConfig } from "./components/searxng-config"; import { SlackConfig } from "./components/slack-config"; import { TavilyApiConfig } from "./components/tavily-api-config"; +import { TeamsConfig } from "./components/teams-config"; import { WebcrawlerConfig } from "./components/webcrawler-config"; export interface ConnectorConfigProps { @@ -52,6 +53,8 @@ export function getConnectorConfigComponent( return SlackConfig; case "DISCORD_CONNECTOR": return DiscordConfig; + case "TEAMS_CONNECTOR": + return TeamsConfig; case "CONFLUENCE_CONNECTOR": return ConfluenceConfig; case "BOOKSTACK_CONNECTOR": diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index 287bc30f4..23982e6f3 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -51,6 +51,13 @@ export const OAUTH_CONNECTORS = [ connectorType: EnumConnectorName.SLACK_CONNECTOR, authEndpoint: "/api/v1/auth/slack/connector/add/", }, + { + id: "teams-connector", + title: "Microsoft Teams", + description: "Search Teams messages", + connectorType: EnumConnectorName.TEAMS_CONNECTOR, + authEndpoint: "/api/v1/auth/teams/connector/add/", + }, { id: "discord-connector", title: "Discord", diff --git a/surfsense_web/components/assistant-ui/connector-popup/utils/connector-document-mapping.ts b/surfsense_web/components/assistant-ui/connector-popup/utils/connector-document-mapping.ts index a0b271eb6..433a51e8c 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/utils/connector-document-mapping.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/utils/connector-document-mapping.ts @@ -11,6 +11,7 @@ export const CONNECTOR_TO_DOCUMENT_TYPE: Record = { // Direct mappings (connector type matches document type) SLACK_CONNECTOR: "SLACK_CONNECTOR", + TEAMS_CONNECTOR: "TEAMS_CONNECTOR", NOTION_CONNECTOR: "NOTION_CONNECTOR", GITHUB_CONNECTOR: "GITHUB_CONNECTOR", LINEAR_CONNECTOR: "LINEAR_CONNECTOR", diff --git a/surfsense_web/contracts/enums/connector.ts b/surfsense_web/contracts/enums/connector.ts index ae80cf871..fc65585e2 100644 --- a/surfsense_web/contracts/enums/connector.ts +++ b/surfsense_web/contracts/enums/connector.ts @@ -4,6 +4,7 @@ export enum EnumConnectorName { LINKUP_API = "LINKUP_API", BAIDU_SEARCH_API = "BAIDU_SEARCH_API", SLACK_CONNECTOR = "SLACK_CONNECTOR", + TEAMS_CONNECTOR = "TEAMS_CONNECTOR", NOTION_CONNECTOR = "NOTION_CONNECTOR", GITHUB_CONNECTOR = "GITHUB_CONNECTOR", LINEAR_CONNECTOR = "LINEAR_CONNECTOR", diff --git a/surfsense_web/contracts/enums/connectorIcons.tsx b/surfsense_web/contracts/enums/connectorIcons.tsx index 22bc734aa..befe132f9 100644 --- a/surfsense_web/contracts/enums/connectorIcons.tsx +++ b/surfsense_web/contracts/enums/connectorIcons.tsx @@ -31,6 +31,8 @@ export const getConnectorIcon = (connectorType: EnumConnectorName | string, clas return Baidu; case EnumConnectorName.SLACK_CONNECTOR: return Slack; + case EnumConnectorName.TEAMS_CONNECTOR: + return Microsoft Teams; case EnumConnectorName.NOTION_CONNECTOR: return Notion; case EnumConnectorName.DISCORD_CONNECTOR: From 18035b3728b472318fc135d12a962729d3a71081 Mon Sep 17 00:00:00 2001 From: Manoj Aggarwal Date: Fri, 9 Jan 2026 13:20:47 -0800 Subject: [PATCH 22/28] Add MS Teams connector --- .../versions/59_add_teams_connector_enums.py | 160 ++++++++++++++++++ .../agents/new_chat/tools/knowledge_base.py | 2 + .../app/connectors/teams_connector.py | 51 ++++-- .../routes/search_source_connectors_routes.py | 22 ++- .../app/routes/teams_add_connector_route.py | 7 +- .../tasks/connector_indexers/teams_indexer.py | 8 +- .../contracts/types/connector.types.ts | 1 + 7 files changed, 222 insertions(+), 29 deletions(-) create mode 100644 surfsense_backend/alembic/versions/59_add_teams_connector_enums.py diff --git a/surfsense_backend/alembic/versions/59_add_teams_connector_enums.py b/surfsense_backend/alembic/versions/59_add_teams_connector_enums.py new file mode 100644 index 000000000..f13fbe9e5 --- /dev/null +++ b/surfsense_backend/alembic/versions/59_add_teams_connector_enums.py @@ -0,0 +1,160 @@ +"""Add TEAMS_CONNECTOR to SearchSourceConnectorType and DocumentType enums + +Revision ID: 59 +Revises: 58 +""" + +from collections.abc import Sequence + +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "59" +down_revision: str | None = "58" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None + +# Define the ENUM type name and the new value +CONNECTOR_ENUM = "searchsourceconnectortype" +CONNECTOR_NEW_VALUE = "TEAMS_CONNECTOR" +DOCUMENT_ENUM = "documenttype" +DOCUMENT_NEW_VALUE = "TEAMS_CONNECTOR" + + +def upgrade() -> None: + """Upgrade schema - add TEAMS_CONNECTOR to connector and document enum safely.""" + # Add TEAMS_CONNECTOR to searchsourceconnectortype only if not exists + op.execute( + f""" + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 FROM pg_enum + WHERE enumlabel = '{CONNECTOR_NEW_VALUE}' + AND enumtypid = (SELECT oid FROM pg_type WHERE typname = '{CONNECTOR_ENUM}') + ) THEN + ALTER TYPE {CONNECTOR_ENUM} ADD VALUE '{CONNECTOR_NEW_VALUE}'; + END IF; + END$$; + """ + ) + + # Add TEAMS_CONNECTOR to documenttype only if not exists + op.execute( + f""" + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 FROM pg_enum + WHERE enumlabel = '{DOCUMENT_NEW_VALUE}' + AND enumtypid = (SELECT oid FROM pg_type WHERE typname = '{DOCUMENT_ENUM}') + ) THEN + ALTER TYPE {DOCUMENT_ENUM} ADD VALUE '{DOCUMENT_NEW_VALUE}'; + END IF; + END$$; + """ + ) + + +def downgrade() -> None: + """Downgrade schema - remove TEAMS_CONNECTOR from connector and document enum.""" + + # Old enum name + old_connector_enum_name = f"{CONNECTOR_ENUM}_old" + old_document_enum_name = f"{DOCUMENT_ENUM}_old" + + # All connector values except TEAMS_CONNECTOR + old_connector_values = ( + "SERPER_API", + "TAVILY_API", + "SEARXNG_API", + "LINKUP_API", + "BAIDU_SEARCH_API", + "SLACK_CONNECTOR", + "NOTION_CONNECTOR", + "GITHUB_CONNECTOR", + "LINEAR_CONNECTOR", + "DISCORD_CONNECTOR", + "JIRA_CONNECTOR", + "CONFLUENCE_CONNECTOR", + "CLICKUP_CONNECTOR", + "GOOGLE_CALENDAR_CONNECTOR", + "GOOGLE_GMAIL_CONNECTOR", + "GOOGLE_DRIVE_CONNECTOR", + "AIRTABLE_CONNECTOR", + "LUMA_CONNECTOR", + "ELASTICSEARCH_CONNECTOR", + "WEBCRAWLER_CONNECTOR", + ) + + # All document values except TEAMS_CONNECTOR + old_document_values = ( + "EXTENSION", + "CRAWLED_URL", + "FILE", + "SLACK_CONNECTOR", + "NOTION_CONNECTOR", + "YOUTUBE_VIDEO", + "GITHUB_CONNECTOR", + "LINEAR_CONNECTOR", + "DISCORD_CONNECTOR", + "JIRA_CONNECTOR", + "CONFLUENCE_CONNECTOR", + "CLICKUP_CONNECTOR", + "GOOGLE_CALENDAR_CONNECTOR", + "GOOGLE_GMAIL_CONNECTOR", + "GOOGLE_DRIVE_FILE", + "AIRTABLE_CONNECTOR", + "LUMA_CONNECTOR", + "ELASTICSEARCH_CONNECTOR", + "BOOKSTACK_CONNECTOR", + "CIRCLEBACK", + "NOTE", + ) + + old_connector_values_sql = ", ".join([f"'{v}'" for v in old_connector_values]) + old_document_values_sql = ", ".join([f"'{v}'" for v in old_document_values]) + + # Table and column names + connector_table_name = "search_source_connectors" + connector_column_name = "connector_type" + document_table_name = "documents" + document_column_name = "document_type" + + # Connector Enum Downgrade Steps + # 1. Rename the current connector enum type + op.execute(f"ALTER TYPE {CONNECTOR_ENUM} RENAME TO {old_connector_enum_name}") + + # 2. Create the new connector enum type with the old values + op.execute(f"CREATE TYPE {CONNECTOR_ENUM} AS ENUM({old_connector_values_sql})") + + # 3. Alter the column to use the new connector enum type + op.execute( + f""" + ALTER TABLE {connector_table_name} + ALTER COLUMN {connector_column_name} TYPE {CONNECTOR_ENUM} + USING {connector_column_name}::text::{CONNECTOR_ENUM} + """ + ) + + # 4. Drop the old connector enum type + op.execute(f"DROP TYPE {old_connector_enum_name}") + + # Document Enum Downgrade Steps + # 1. Rename the current document enum type + op.execute(f"ALTER TYPE {DOCUMENT_ENUM} RENAME TO {old_document_enum_name}") + + # 2. Create the new document enum type with the old values + op.execute(f"CREATE TYPE {DOCUMENT_ENUM} AS ENUM({old_document_values_sql})") + + # 3. Alter the column to use the new document enum type + op.execute( + f""" + ALTER TABLE {document_table_name} + ALTER COLUMN {document_column_name} TYPE {DOCUMENT_ENUM} + USING {document_column_name}::text::{DOCUMENT_ENUM} + """ + ) + + # 4. Drop the old document enum type + op.execute(f"DROP TYPE {old_document_enum_name}") diff --git a/surfsense_backend/app/agents/new_chat/tools/knowledge_base.py b/surfsense_backend/app/agents/new_chat/tools/knowledge_base.py index a3cdad359..e91d865fa 100644 --- a/surfsense_backend/app/agents/new_chat/tools/knowledge_base.py +++ b/surfsense_backend/app/agents/new_chat/tools/knowledge_base.py @@ -26,6 +26,7 @@ _ALL_CONNECTORS: list[str] = [ "EXTENSION", "FILE", "SLACK_CONNECTOR", + "TEAMS_CONNECTOR", "NOTION_CONNECTOR", "YOUTUBE_VIDEO", "GITHUB_CONNECTOR", @@ -573,6 +574,7 @@ def create_search_knowledge_base_tool( - FILE: "User-uploaded documents (PDFs, Word, etc.)" (personal files) - NOTE: "SurfSense Notes" (notes created inside SurfSense) - SLACK_CONNECTOR: "Slack conversations and shared content" (personal workspace communications) + - TEAMS_CONNECTOR: "Microsoft Teams messages and conversations" (personal Teams communications) - NOTION_CONNECTOR: "Notion workspace pages and databases" (personal knowledge management) - YOUTUBE_VIDEO: "YouTube video transcripts and metadata" (personally saved videos) - GITHUB_CONNECTOR: "GitHub repository content and issues" (personal repositories and interactions) diff --git a/surfsense_backend/app/connectors/teams_connector.py b/surfsense_backend/app/connectors/teams_connector.py index e11a2aad0..29c2db127 100644 --- a/surfsense_backend/app/connectors/teams_connector.py +++ b/surfsense_backend/app/connectors/teams_connector.py @@ -7,7 +7,7 @@ Supports OAuth-based authentication with token refresh. """ import logging -from datetime import datetime +from datetime import datetime, timezone from typing import Any import httpx @@ -255,25 +255,11 @@ class TeamsConnector: async with httpx.AsyncClient() as client: url = f"{self.GRAPH_API_BASE}/teams/{team_id}/channels/{channel_id}/messages" - # Build query parameters for date filtering if needed - params = {} - if start_date or end_date: - filter_parts = [] - if start_date: - filter_parts.append( - f"createdDateTime ge {start_date.strftime('%Y-%m-%dT%H:%M:%SZ')}" - ) - if end_date: - filter_parts.append( - f"createdDateTime le {end_date.strftime('%Y-%m-%dT%H:%M:%SZ')}" - ) - if filter_parts: - params["$filter"] = " and ".join(filter_parts) - + # Note: The Graph API for channel messages doesn't support $filter parameter + # We fetch all messages and filter them client-side response = await client.get( url, headers={"Authorization": f"Bearer {access_token}"}, - params=params, timeout=30.0, ) @@ -283,7 +269,36 @@ class TeamsConnector: ) data = response.json() - return data.get("value", []) + messages = data.get("value", []) + + # Filter messages by date if needed (client-side filtering) + if start_date or end_date: + # Make sure comparison dates are timezone-aware (UTC) + if start_date and start_date.tzinfo is None: + start_date = start_date.replace(tzinfo=timezone.utc) + if end_date and end_date.tzinfo is None: + end_date = end_date.replace(tzinfo=timezone.utc) + + filtered_messages = [] + for message in messages: + created_at_str = message.get("createdDateTime") + if not created_at_str: + continue + + # Parse the ISO 8601 datetime string (already timezone-aware) + created_at = datetime.fromisoformat(created_at_str.replace('Z', '+00:00')) + + # Check if message is within date range + if start_date and created_at < start_date: + continue + if end_date and created_at > end_date: + continue + + filtered_messages.append(message) + + return filtered_messages + + return messages async def get_message_replies( self, team_id: str, channel_id: str, message_id: str diff --git a/surfsense_backend/app/routes/search_source_connectors_routes.py b/surfsense_backend/app/routes/search_source_connectors_routes.py index 337e1af85..73a593186 100644 --- a/surfsense_backend/app/routes/search_source_connectors_routes.py +++ b/surfsense_backend/app/routes/search_source_connectors_routes.py @@ -558,6 +558,7 @@ async def index_connector_content( Currently supports: - SLACK_CONNECTOR: Indexes messages from all accessible Slack channels + - TEAMS_CONNECTOR: Indexes messages from all accessible Microsoft Teams channels - NOTION_CONNECTOR: Indexes pages from all accessible Notion pages - GITHUB_CONNECTOR: Indexes code and documentation from GitHub repositories - LINEAR_CONNECTOR: Indexes issues and comments from Linear @@ -631,6 +632,19 @@ async def index_connector_content( ) response_message = "Slack indexing started in the background." + elif connector.connector_type == SearchSourceConnectorType.TEAMS_CONNECTOR: + from app.tasks.celery_tasks.connector_tasks import ( + index_teams_messages_task, + ) + + logger.info( + f"Triggering Teams indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" + ) + index_teams_messages_task.delay( + connector_id, search_space_id, str(user.id), indexing_from, indexing_to + ) + response_message = "Teams indexing started in the background." + elif connector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR: from app.tasks.celery_tasks.connector_tasks import index_notion_pages_task @@ -1237,16 +1251,14 @@ async def run_teams_indexing( update_last_indexed=False, # Don't update timestamp in the indexing function ) - # Only update last_indexed_at if indexing was successful (either new docs or updated docs) - if documents_processed > 0: + # Update last_indexed_at if indexing was successful (regardless of new/skipped docs) + if error_or_warning is None: await update_connector_last_indexed(session, connector_id) logger.info( f"Teams indexing completed successfully: {documents_processed} documents processed" ) else: - logger.error( - f"Teams indexing failed or no documents processed: {error_or_warning}" - ) + logger.error(f"Teams indexing failed: {error_or_warning}") except Exception as e: logger.error(f"Error in background Teams indexing task: {e!s}") diff --git a/surfsense_backend/app/routes/teams_add_connector_route.py b/surfsense_backend/app/routes/teams_add_connector_route.py index a84db47c9..ce014be0d 100644 --- a/surfsense_backend/app/routes/teams_add_connector_route.py +++ b/surfsense_backend/app/routes/teams_add_connector_route.py @@ -312,17 +312,18 @@ async def teams_callback( session, SearchSourceConnectorType.TEAMS_CONNECTOR, space_id, - connector_config, + user_id, + connector_identifier, ) # Create new connector new_connector = SearchSourceConnector( + name=connector_name, connector_type=SearchSourceConnectorType.TEAMS_CONNECTOR, + is_indexable=True, config=connector_config, - is_enabled=True, search_space_id=space_id, user_id=user_id, - connector_name=connector_name, ) try: diff --git a/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py b/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py index 4fb4d719d..c1e778768 100644 --- a/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py +++ b/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py @@ -165,14 +165,16 @@ async def index_teams_messages( ) # Convert date strings to datetime objects for filtering - from datetime import datetime + from datetime import datetime, timezone start_datetime = None end_datetime = None if start_date_str: - start_datetime = datetime.strptime(start_date_str, "%Y-%m-%d") + # Parse as naive datetime and make it timezone-aware (UTC) + start_datetime = datetime.strptime(start_date_str, "%Y-%m-%d").replace(tzinfo=timezone.utc) if end_date_str: - end_datetime = datetime.strptime(end_date_str, "%Y-%m-%d") + # Parse as naive datetime, set to end of day, and make it timezone-aware (UTC) + end_datetime = datetime.strptime(end_date_str, "%Y-%m-%d").replace(hour=23, minute=59, second=59, tzinfo=timezone.utc) # Process each team for team in teams: diff --git a/surfsense_web/contracts/types/connector.types.ts b/surfsense_web/contracts/types/connector.types.ts index 5b67297ae..f864ae16f 100644 --- a/surfsense_web/contracts/types/connector.types.ts +++ b/surfsense_web/contracts/types/connector.types.ts @@ -8,6 +8,7 @@ export const searchSourceConnectorTypeEnum = z.enum([ "LINKUP_API", "BAIDU_SEARCH_API", "SLACK_CONNECTOR", + "TEAMS_CONNECTOR", "NOTION_CONNECTOR", "GITHUB_CONNECTOR", "LINEAR_CONNECTOR", From 36c395ba72d3a6cc7bfb45e96e3cb3d8af6b0aa6 Mon Sep 17 00:00:00 2001 From: Manoj Aggarwal Date: Fri, 9 Jan 2026 13:20:54 -0800 Subject: [PATCH 23/28] nit --- .../app/routes/search_source_connectors_routes.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/surfsense_backend/app/routes/search_source_connectors_routes.py b/surfsense_backend/app/routes/search_source_connectors_routes.py index 73a593186..c9831484d 100644 --- a/surfsense_backend/app/routes/search_source_connectors_routes.py +++ b/surfsense_backend/app/routes/search_source_connectors_routes.py @@ -1251,14 +1251,11 @@ async def run_teams_indexing( update_last_indexed=False, # Don't update timestamp in the indexing function ) - # Update last_indexed_at if indexing was successful (regardless of new/skipped docs) - if error_or_warning is None: - await update_connector_last_indexed(session, connector_id) - logger.info( - f"Teams indexing completed successfully: {documents_processed} documents processed" - ) - else: - logger.error(f"Teams indexing failed: {error_or_warning}") + # Update last_indexed_at after successful indexing (even if 0 new docs - they were checked) + await update_connector_last_indexed(session, connector_id) + logger.info( + f"Teams indexing completed successfully: {documents_processed} documents processed. {error_or_warning or ''}" + ) except Exception as e: logger.error(f"Error in background Teams indexing task: {e!s}") From 4532cc356978e864eda474689539b01c927d8b06 Mon Sep 17 00:00:00 2001 From: Manoj Aggarwal Date: Fri, 9 Jan 2026 13:20:59 -0800 Subject: [PATCH 24/28] revert .vscode change --- .vscode/settings.json | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 42d09dcad..f134660b6 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,4 +1,3 @@ { - "biome.configurationPath": "./surfsense_web/biome.json", - "python-envs.pythonProjects": [] + "biome.configurationPath": "./surfsense_web/biome.json" } \ No newline at end of file From 8b650f4cf4e1220e33be3c2b6ddc6aeb87e170c0 Mon Sep 17 00:00:00 2001 From: Manoj Aggarwal Date: Fri, 9 Jan 2026 13:33:28 -0800 Subject: [PATCH 25/28] resolve vercel error --- .../app/routes/new_chat_routes.py | 43 +++++++++++++++---- .../routes/search_source_connectors_routes.py | 39 +++++++++-------- 2 files changed, 54 insertions(+), 28 deletions(-) diff --git a/surfsense_backend/app/routes/new_chat_routes.py b/surfsense_backend/app/routes/new_chat_routes.py index 476ff2935..b2a071cfd 100644 --- a/surfsense_backend/app/routes/new_chat_routes.py +++ b/surfsense_backend/app/routes/new_chat_routes.py @@ -278,16 +278,41 @@ async def get_thread_messages( ) # Return messages in the format expected by assistant-ui - messages = [ - NewChatMessageRead( - id=msg.id, - thread_id=msg.thread_id, - role=msg.role, - content=msg.content, - created_at=msg.created_at, + messages = [] + for msg in thread.messages: + # Eagerly extract all data while in session context + msg_id = msg.id + msg_thread_id = msg.thread_id + msg_role = msg.role + msg_content = msg.content + msg_created_at = msg.created_at + msg_updated_at = msg.updated_at + msg_user_id = msg.user_id + msg_metadata = msg.message_metadata + + # Manually construct user info to avoid lazy loading + user_info = None + if msg_user_id and msg.user: + user_info = MessageUserInfo( + id=msg.user.id, + email=msg.user.email, + is_active=msg.user.is_active, + is_superuser=msg.user.is_superuser, + is_verified=msg.user.is_verified, + ) + + messages.append( + NewChatMessageRead( + id=msg_id, + thread_id=msg_thread_id, + role=msg_role, + content=msg_content, + created_at=msg_created_at, + updated_at=msg_updated_at, + user=user_info, + message_metadata=msg_metadata, + ) ) - for msg in thread.messages - ] return ThreadHistoryLoadResponse(messages=messages) diff --git a/surfsense_backend/app/routes/search_source_connectors_routes.py b/surfsense_backend/app/routes/search_source_connectors_routes.py index c9831484d..f9190c616 100644 --- a/surfsense_backend/app/routes/search_source_connectors_routes.py +++ b/surfsense_backend/app/routes/search_source_connectors_routes.py @@ -871,9 +871,10 @@ async def index_connector_content( ) from e -async def update_connector_last_indexed(session: AsyncSession, connector_id: int): +async def _update_connector_timestamp_by_id(session: AsyncSession, connector_id: int): """ - Update the last_indexed_at timestamp for a connector. + Update the last_indexed_at timestamp for a connector by its ID. + Internal helper function for routes. Args: session: Database session @@ -948,7 +949,7 @@ async def run_slack_indexing( # Only update last_indexed_at if indexing was successful (either new docs or updated docs) if documents_processed > 0: - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) logger.info( f"Slack indexing completed successfully: {documents_processed} documents processed" ) @@ -1010,7 +1011,7 @@ async def run_notion_indexing( # Only update last_indexed_at if indexing was successful (either new docs or updated docs) if documents_processed > 0: - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) logger.info( f"Notion indexing completed successfully: {documents_processed} documents processed" ) @@ -1070,7 +1071,7 @@ async def run_github_indexing( f"GitHub indexing successful for connector {connector_id}. Indexed {indexed_count} documents." ) # Update the last indexed timestamp only on success - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) await session.commit() # Commit timestamp update except Exception as e: await session.rollback() @@ -1129,7 +1130,7 @@ async def run_linear_indexing( f"Linear indexing successful for connector {connector_id}. Indexed {indexed_count} documents." ) # Update the last indexed timestamp only on success - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) await session.commit() # Commit timestamp update except Exception as e: await session.rollback() @@ -1190,7 +1191,7 @@ async def run_discord_indexing( # Only update last_indexed_at if indexing was successful (either new docs or updated docs) if documents_processed > 0: - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) logger.info( f"Discord indexing completed successfully: {documents_processed} documents processed" ) @@ -1252,7 +1253,7 @@ async def run_teams_indexing( ) # Update last_indexed_at after successful indexing (even if 0 new docs - they were checked) - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) logger.info( f"Teams indexing completed successfully: {documents_processed} documents processed. {error_or_warning or ''}" ) @@ -1308,7 +1309,7 @@ async def run_jira_indexing( f"Jira indexing successful for connector {connector_id}. Indexed {indexed_count} documents." ) # Update the last indexed timestamp only on success - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) await session.commit() # Commit timestamp update except Exception as e: logger.error( @@ -1368,7 +1369,7 @@ async def run_confluence_indexing( f"Confluence indexing successful for connector {connector_id}. Indexed {indexed_count} documents." ) # Update the last indexed timestamp only on success - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) await session.commit() # Commit timestamp update except Exception as e: logger.error( @@ -1426,7 +1427,7 @@ async def run_clickup_indexing( f"ClickUp indexing successful for connector {connector_id}. Indexed {indexed_count} tasks." ) # Update the last indexed timestamp only on success - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) await session.commit() # Commit timestamp update except Exception as e: logger.error( @@ -1484,7 +1485,7 @@ async def run_airtable_indexing( f"Airtable indexing successful for connector {connector_id}. Indexed {indexed_count} records." ) # Update the last indexed timestamp only on success - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) await session.commit() # Commit timestamp update except Exception as e: logger.error( @@ -1544,7 +1545,7 @@ async def run_google_calendar_indexing( f"Google Calendar indexing successful for connector {connector_id}. Indexed {indexed_count} documents." ) # Update the last indexed timestamp only on success - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) await session.commit() # Commit timestamp update except Exception as e: logger.error( @@ -1611,7 +1612,7 @@ async def run_google_gmail_indexing( f"Google Gmail indexing successful for connector {connector_id}. Indexed {indexed_count} documents." ) # Update the last indexed timestamp only on success - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) await session.commit() # Commit timestamp update except Exception as e: logger.error( @@ -1695,7 +1696,7 @@ async def run_google_drive_indexing( f"Google Drive indexing successful for connector {connector_id}. Indexed {total_indexed} documents from {len(items.folders)} folder(s) and {len(items.files)} file(s)." ) # Update the last indexed timestamp only on full success - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) await session.commit() # Commit timestamp update except Exception as e: logger.error( @@ -1755,7 +1756,7 @@ async def run_luma_indexing( # Only update last_indexed_at if indexing was successful (either new docs or updated docs) if documents_processed > 0: - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) logger.info( f"Luma indexing completed successfully: {documents_processed} documents processed" ) @@ -1815,7 +1816,7 @@ async def run_elasticsearch_indexing( f"Elasticsearch indexing successful for connector {connector_id}. Indexed {indexed_count} documents." ) # Update the last indexed timestamp only on success - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) await session.commit() except Exception as e: await session.rollback() @@ -1874,7 +1875,7 @@ async def run_web_page_indexing( # Only update last_indexed_at if indexing was successful (either new docs or updated docs) if documents_processed > 0: - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) logger.info( f"Web page indexing completed successfully: {documents_processed} documents processed" ) @@ -1947,7 +1948,7 @@ async def run_bookstack_indexing( f"BookStack indexing successful for connector {connector_id}. Indexed {indexed_count} documents." ) # Update the last indexed timestamp only on success - await update_connector_last_indexed(session, connector_id) + await _update_connector_timestamp_by_id(session, connector_id) await session.commit() # Commit timestamp update except Exception as e: logger.error( From 29dadfd138168c6bb5b12fff4c86fc8885afb3e8 Mon Sep 17 00:00:00 2001 From: Manoj Aggarwal Date: Fri, 9 Jan 2026 13:34:36 -0800 Subject: [PATCH 26/28] nit --- .../app/routes/new_chat_routes.py | 43 ++++--------------- 1 file changed, 9 insertions(+), 34 deletions(-) diff --git a/surfsense_backend/app/routes/new_chat_routes.py b/surfsense_backend/app/routes/new_chat_routes.py index b2a071cfd..476ff2935 100644 --- a/surfsense_backend/app/routes/new_chat_routes.py +++ b/surfsense_backend/app/routes/new_chat_routes.py @@ -278,41 +278,16 @@ async def get_thread_messages( ) # Return messages in the format expected by assistant-ui - messages = [] - for msg in thread.messages: - # Eagerly extract all data while in session context - msg_id = msg.id - msg_thread_id = msg.thread_id - msg_role = msg.role - msg_content = msg.content - msg_created_at = msg.created_at - msg_updated_at = msg.updated_at - msg_user_id = msg.user_id - msg_metadata = msg.message_metadata - - # Manually construct user info to avoid lazy loading - user_info = None - if msg_user_id and msg.user: - user_info = MessageUserInfo( - id=msg.user.id, - email=msg.user.email, - is_active=msg.user.is_active, - is_superuser=msg.user.is_superuser, - is_verified=msg.user.is_verified, - ) - - messages.append( - NewChatMessageRead( - id=msg_id, - thread_id=msg_thread_id, - role=msg_role, - content=msg_content, - created_at=msg_created_at, - updated_at=msg_updated_at, - user=user_info, - message_metadata=msg_metadata, - ) + messages = [ + NewChatMessageRead( + id=msg.id, + thread_id=msg.thread_id, + role=msg.role, + content=msg.content, + created_at=msg.created_at, ) + for msg in thread.messages + ] return ThreadHistoryLoadResponse(messages=messages) From 62d0d8b6db9ff64e69757dbd7964c152ffcea116 Mon Sep 17 00:00:00 2001 From: Manoj Aggarwal Date: Fri, 9 Jan 2026 13:38:49 -0800 Subject: [PATCH 27/28] ruff lint --- surfsense_backend/app/connectors/teams_connector.py | 6 +++--- .../app/tasks/connector_indexers/teams_indexer.py | 8 +++++--- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/surfsense_backend/app/connectors/teams_connector.py b/surfsense_backend/app/connectors/teams_connector.py index 29c2db127..5603357e5 100644 --- a/surfsense_backend/app/connectors/teams_connector.py +++ b/surfsense_backend/app/connectors/teams_connector.py @@ -7,7 +7,7 @@ Supports OAuth-based authentication with token refresh. """ import logging -from datetime import datetime, timezone +from datetime import UTC, datetime from typing import Any import httpx @@ -275,9 +275,9 @@ class TeamsConnector: if start_date or end_date: # Make sure comparison dates are timezone-aware (UTC) if start_date and start_date.tzinfo is None: - start_date = start_date.replace(tzinfo=timezone.utc) + start_date = start_date.replace(tzinfo=UTC) if end_date and end_date.tzinfo is None: - end_date = end_date.replace(tzinfo=timezone.utc) + end_date = end_date.replace(tzinfo=UTC) filtered_messages = [] for message in messages: diff --git a/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py b/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py index c1e778768..3b28d4293 100644 --- a/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py +++ b/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py @@ -2,6 +2,8 @@ Microsoft Teams connector indexer. """ +from datetime import UTC + from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.ext.asyncio import AsyncSession @@ -165,16 +167,16 @@ async def index_teams_messages( ) # Convert date strings to datetime objects for filtering - from datetime import datetime, timezone + from datetime import datetime start_datetime = None end_datetime = None if start_date_str: # Parse as naive datetime and make it timezone-aware (UTC) - start_datetime = datetime.strptime(start_date_str, "%Y-%m-%d").replace(tzinfo=timezone.utc) + start_datetime = datetime.strptime(start_date_str, "%Y-%m-%d").replace(tzinfo=UTC) if end_date_str: # Parse as naive datetime, set to end of day, and make it timezone-aware (UTC) - end_datetime = datetime.strptime(end_date_str, "%Y-%m-%d").replace(hour=23, minute=59, second=59, tzinfo=timezone.utc) + end_datetime = datetime.strptime(end_date_str, "%Y-%m-%d").replace(hour=23, minute=59, second=59, tzinfo=UTC) # Process each team for team in teams: From 8b735a492a8ba1ea0fd10d3ac2576dcd32ef8e23 Mon Sep 17 00:00:00 2001 From: Manoj Aggarwal Date: Fri, 9 Jan 2026 13:53:09 -0800 Subject: [PATCH 28/28] lint --- .../versions/59_add_teams_connector_enums.py | 2 +- .../app/connectors/teams_connector.py | 22 +++++++++++-------- .../app/routes/teams_add_connector_route.py | 8 +++++-- .../tasks/connector_indexers/teams_indexer.py | 21 +++++++++++++----- 4 files changed, 35 insertions(+), 18 deletions(-) diff --git a/surfsense_backend/alembic/versions/59_add_teams_connector_enums.py b/surfsense_backend/alembic/versions/59_add_teams_connector_enums.py index f13fbe9e5..d4f6629a7 100644 --- a/surfsense_backend/alembic/versions/59_add_teams_connector_enums.py +++ b/surfsense_backend/alembic/versions/59_add_teams_connector_enums.py @@ -86,7 +86,7 @@ def downgrade() -> None: "ELASTICSEARCH_CONNECTOR", "WEBCRAWLER_CONNECTOR", ) - + # All document values except TEAMS_CONNECTOR old_document_values = ( "EXTENSION", diff --git a/surfsense_backend/app/connectors/teams_connector.py b/surfsense_backend/app/connectors/teams_connector.py index 5603357e5..c639ab177 100644 --- a/surfsense_backend/app/connectors/teams_connector.py +++ b/surfsense_backend/app/connectors/teams_connector.py @@ -253,7 +253,9 @@ class TeamsConnector: access_token = await self._get_valid_token() async with httpx.AsyncClient() as client: - url = f"{self.GRAPH_API_BASE}/teams/{team_id}/channels/{channel_id}/messages" + url = ( + f"{self.GRAPH_API_BASE}/teams/{team_id}/channels/{channel_id}/messages" + ) # Note: The Graph API for channel messages doesn't support $filter parameter # We fetch all messages and filter them client-side @@ -270,7 +272,7 @@ class TeamsConnector: data = response.json() messages = data.get("value", []) - + # Filter messages by date if needed (client-side filtering) if start_date or end_date: # Make sure comparison dates are timezone-aware (UTC) @@ -278,26 +280,28 @@ class TeamsConnector: start_date = start_date.replace(tzinfo=UTC) if end_date and end_date.tzinfo is None: end_date = end_date.replace(tzinfo=UTC) - + filtered_messages = [] for message in messages: created_at_str = message.get("createdDateTime") if not created_at_str: continue - + # Parse the ISO 8601 datetime string (already timezone-aware) - created_at = datetime.fromisoformat(created_at_str.replace('Z', '+00:00')) - + created_at = datetime.fromisoformat( + created_at_str.replace("Z", "+00:00") + ) + # Check if message is within date range if start_date and created_at < start_date: continue if end_date and created_at > end_date: continue - + filtered_messages.append(message) - + return filtered_messages - + return messages async def get_message_replies( diff --git a/surfsense_backend/app/routes/teams_add_connector_route.py b/surfsense_backend/app/routes/teams_add_connector_route.py index ce014be0d..9ce84e171 100644 --- a/surfsense_backend/app/routes/teams_add_connector_route.py +++ b/surfsense_backend/app/routes/teams_add_connector_route.py @@ -343,8 +343,12 @@ async def teams_callback( except IntegrityError as e: await session.rollback() - logger.error("Database integrity error creating Teams connector: %s", str(e)) - redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=connector_creation_failed" + logger.error( + "Database integrity error creating Teams connector: %s", str(e) + ) + redirect_url = ( + f"{config.NEXT_FRONTEND_URL}/dashboard?error=connector_creation_failed" + ) return RedirectResponse(url=redirect_url) except HTTPException: diff --git a/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py b/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py index 3b28d4293..2709adaf1 100644 --- a/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py +++ b/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py @@ -173,10 +173,14 @@ async def index_teams_messages( end_datetime = None if start_date_str: # Parse as naive datetime and make it timezone-aware (UTC) - start_datetime = datetime.strptime(start_date_str, "%Y-%m-%d").replace(tzinfo=UTC) + start_datetime = datetime.strptime(start_date_str, "%Y-%m-%d").replace( + tzinfo=UTC + ) if end_date_str: # Parse as naive datetime, set to end of day, and make it timezone-aware (UTC) - end_datetime = datetime.strptime(end_date_str, "%Y-%m-%d").replace(hour=23, minute=59, second=59, tzinfo=UTC) + end_datetime = datetime.strptime(end_date_str, "%Y-%m-%d").replace( + hour=23, minute=59, second=59, tzinfo=UTC + ) # Process each team for team in teams: @@ -314,8 +318,10 @@ async def index_teams_messages( chunks = await create_document_chunks( combined_document_string ) - doc_embedding = config.embedding_model_instance.embed( - combined_document_string + doc_embedding = ( + config.embedding_model_instance.embed( + combined_document_string + ) ) # Update existing document @@ -337,11 +343,14 @@ async def index_teams_messages( # Delete old chunks and add new ones existing_document.chunks = chunks - existing_document.updated_at = get_current_timestamp() + existing_document.updated_at = ( + get_current_timestamp() + ) documents_indexed += 1 logger.info( - "Successfully updated Teams message %s", message_id + "Successfully updated Teams message %s", + message_id, ) continue