diff --git a/.vscode/launch.json b/.vscode/launch.json index dfe20d832..ad7f04bd0 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -5,7 +5,7 @@ "version": "0.2.0", "configurations": [ { - "name": "Python Debugger: UV Run with Reload", + "name": "Backend: FastAPI", "type": "debugpy", "request": "launch", "module": "uvicorn", @@ -25,7 +25,7 @@ "python": "${command:python.interpreterPath}" }, { - "name": "Python Debugger: main.py (direct)", + "name": "Backend: FastAPI (main.py)", "type": "debugpy", "request": "launch", "program": "${workspaceFolder}/surfsense_backend/main.py", @@ -34,17 +34,95 @@ "cwd": "${workspaceFolder}/surfsense_backend" }, { - "name": "Python Debugger: Chat DeepAgent", + "name": "Frontend: Next.js", + "type": "node", + "request": "launch", + "cwd": "${workspaceFolder}/surfsense_web", + "runtimeExecutable": "npm", + "runtimeArgs": ["run", "dev"], + "console": "integratedTerminal", + "serverReadyAction": { + "pattern": "- Local:.+(https?://.+)", + "uriFormat": "%s", + "action": "debugWithChrome" + } + }, + { + "name": "Frontend: Next.js (Server-Side Debug)", + "type": "node", + "request": "launch", + "cwd": "${workspaceFolder}/surfsense_web", + "runtimeExecutable": "npm", + "runtimeArgs": ["run", "debug:server"], + "console": "integratedTerminal", + "serverReadyAction": { + "pattern": "- Local:.+(https?://.+)", + "uriFormat": "%s", + "action": "debugWithChrome" + } + }, + { + "name": "Celery: Worker", "type": "debugpy", "request": "launch", - "module": "app.agents.new_chat.chat_deepagent", + "module": "celery", + "args": [ + "-A", + "app.celery_app:celery_app", + "worker", + "--loglevel=info", + "--pool=solo" + ], "console": "integratedTerminal", "justMyCode": false, "cwd": "${workspaceFolder}/surfsense_backend", - "python": "${command:python.interpreterPath}", - "env": { - "PYTHONPATH": "${workspaceFolder}/surfsense_backend" + "python": "${command:python.interpreterPath}" + }, + { + "name": "Celery: Beat Scheduler", + "type": "debugpy", + "request": "launch", + "module": "celery", + "args": [ + "-A", + "app.celery_app:celery_app", + "beat", + "--loglevel=info" + ], + "console": "integratedTerminal", + "justMyCode": false, + "cwd": "${workspaceFolder}/surfsense_backend", + "python": "${command:python.interpreterPath}" + } + ], + "compounds": [ + { + "name": "Full Stack: Backend + Frontend + Celery", + "configurations": [ + "Backend: FastAPI", + "Frontend: Next.js", + "Celery: Worker", + "Celery: Beat Scheduler" + ], + "stopAll": true, + "presentation": { + "hidden": false, + "group": "Full Stack", + "order": 1 + } + }, + { + "name": "Full Stack: Backend + Frontend", + "configurations": [ + "Backend: FastAPI", + "Frontend: Next.js" + ], + "stopAll": true, + "presentation": { + "hidden": false, + "group": "Full Stack", + "order": 2 } } ] -} \ No newline at end of file +} diff --git a/README.md b/README.md index acd900588..4f2ce4332 100644 --- a/README.md +++ b/README.md @@ -174,44 +174,27 @@ docker run -d -p 3000:3000 -p 8000:8000 ` ghcr.io/modsetter/surfsense:latest ``` -**With Custom Configuration (e.g., OpenAI Embeddings):** +**With Custom Configuration:** + +You can pass any environment variable using `-e` flags: ```bash docker run -d -p 3000:3000 -p 8000:8000 \ -v surfsense-data:/data \ -e EMBEDDING_MODEL=openai://text-embedding-ada-002 \ -e OPENAI_API_KEY=your_openai_api_key \ - --name surfsense \ - --restart unless-stopped \ - ghcr.io/modsetter/surfsense:latest -``` - -**With OAuth-based Connectors (Google Calendar, Gmail, Drive, Airtable):** - -To use OAuth-based connectors, you need to configure the respective client credentials: - -```bash -docker run -d -p 3000:3000 -p 8000:8000 \ - -v surfsense-data:/data \ - # Google Connectors (Calendar, Gmail, Drive) + -e AUTH_TYPE=GOOGLE \ -e GOOGLE_OAUTH_CLIENT_ID=your_google_client_id \ -e GOOGLE_OAUTH_CLIENT_SECRET=your_google_client_secret \ - -e GOOGLE_CALENDAR_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/calendar/connector/callback \ - -e GOOGLE_GMAIL_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/gmail/connector/callback \ - -e GOOGLE_DRIVE_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/drive/connector/callback \ - # Airtable Connector - -e AIRTABLE_CLIENT_ID=your_airtable_client_id \ - -e AIRTABLE_CLIENT_SECRET=your_airtable_client_secret \ - -e AIRTABLE_REDIRECT_URI=http://localhost:8000/api/v1/auth/airtable/connector/callback \ + -e ETL_SERVICE=LLAMACLOUD \ + -e LLAMA_CLOUD_API_KEY=your_llama_cloud_key \ --name surfsense \ --restart unless-stopped \ ghcr.io/modsetter/surfsense:latest ``` > [!NOTE] -> - For Google connectors, create OAuth 2.0 credentials in the [Google Cloud Console](https://console.cloud.google.com/apis/credentials) -> - For Airtable connector, create an OAuth integration in the [Airtable Developer Hub](https://airtable.com/create/oauth) -> - If deploying behind a reverse proxy with HTTPS, add `-e BACKEND_URL=https://api.yourdomain.com` and update the redirect URIs accordingly +> - If deploying behind a reverse proxy with HTTPS, add `-e BACKEND_URL=https://api.yourdomain.com` After starting, access SurfSense at: - **Frontend**: [http://localhost:3000](http://localhost:3000) diff --git a/README.zh-CN.md b/README.zh-CN.md index 4e4b0174b..fe6ec8e30 100644 --- a/README.zh-CN.md +++ b/README.zh-CN.md @@ -181,44 +181,27 @@ docker run -d -p 3000:3000 -p 8000:8000 ` ghcr.io/modsetter/surfsense:latest ``` -**使用自定义配置(例如 OpenAI 嵌入):** +**使用自定义配置:** + +您可以使用 `-e` 标志传递任何环境变量: ```bash docker run -d -p 3000:3000 -p 8000:8000 \ -v surfsense-data:/data \ -e EMBEDDING_MODEL=openai://text-embedding-ada-002 \ -e OPENAI_API_KEY=your_openai_api_key \ - --name surfsense \ - --restart unless-stopped \ - ghcr.io/modsetter/surfsense:latest -``` - -**使用 OAuth 连接器(Google 日历、Gmail、云端硬盘、Airtable):** - -要使用基于 OAuth 的连接器,您需要配置相应的客户端凭据: - -```bash -docker run -d -p 3000:3000 -p 8000:8000 \ - -v surfsense-data:/data \ - # Google 连接器(日历、Gmail、云端硬盘) + -e AUTH_TYPE=GOOGLE \ -e GOOGLE_OAUTH_CLIENT_ID=your_google_client_id \ -e GOOGLE_OAUTH_CLIENT_SECRET=your_google_client_secret \ - -e GOOGLE_CALENDAR_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/calendar/connector/callback \ - -e GOOGLE_GMAIL_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/gmail/connector/callback \ - -e GOOGLE_DRIVE_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/drive/connector/callback \ - # Airtable 连接器 - -e AIRTABLE_CLIENT_ID=your_airtable_client_id \ - -e AIRTABLE_CLIENT_SECRET=your_airtable_client_secret \ - -e AIRTABLE_REDIRECT_URI=http://localhost:8000/api/v1/auth/airtable/connector/callback \ + -e ETL_SERVICE=LLAMACLOUD \ + -e LLAMA_CLOUD_API_KEY=your_llama_cloud_key \ --name surfsense \ --restart unless-stopped \ ghcr.io/modsetter/surfsense:latest ``` > [!NOTE] -> - 对于 Google 连接器,请在 [Google Cloud Console](https://console.cloud.google.com/apis/credentials) 中创建 OAuth 2.0 凭据 -> - 对于 Airtable 连接器,请在 [Airtable 开发者中心](https://airtable.com/create/oauth) 中创建 OAuth 集成 -> - 如果部署在带有 HTTPS 的反向代理后面,请添加 `-e BACKEND_URL=https://api.yourdomain.com` 并相应地更新重定向 URI +> - 如果部署在带有 HTTPS 的反向代理后面,请添加 `-e BACKEND_URL=https://api.yourdomain.com` 启动后,访问 SurfSense: - **前端**: [http://localhost:3000](http://localhost:3000) diff --git a/surfsense_backend/.env.example b/surfsense_backend/.env.example index a2f662c23..6ac7c55de 100644 --- a/surfsense_backend/.env.example +++ b/surfsense_backend/.env.example @@ -34,42 +34,53 @@ REGISTRATION_ENABLED=TRUE or FALSE GOOGLE_OAUTH_CLIENT_ID=924507538m GOOGLE_OAUTH_CLIENT_SECRET=GOCSV -# Connector Specific Configs +# Google Connector Specific Configurations GOOGLE_CALENDAR_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/calendar/connector/callback GOOGLE_GMAIL_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/gmail/connector/callback GOOGLE_DRIVE_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/drive/connector/callback -# OAuth for Aitable Connector -AIRTABLE_CLIENT_ID=your_airtable_client_id -AIRTABLE_CLIENT_SECRET=your_airtable_client_secret +# Aitable OAuth Configuration +AIRTABLE_CLIENT_ID=your_airtable_client_id_here +AIRTABLE_CLIENT_SECRET=your_airtable_client_secret_here AIRTABLE_REDIRECT_URI=http://localhost:8000/api/v1/auth/airtable/connector/callback +# ClickUp OAuth Configuration +CLICKUP_CLIENT_ID=your_clickup_client_id_here +CLICKUP_CLIENT_SECRET=your_clickup_client_secret_here +CLICKUP_REDIRECT_URI=http://localhost:8000/api/v1/auth/clickup/connector/callback + # Discord OAuth Configuration DISCORD_CLIENT_ID=your_discord_client_id_here DISCORD_CLIENT_SECRET=your_discord_client_secret_here DISCORD_REDIRECT_URI=http://localhost:8000/api/v1/auth/discord/connector/callback DISCORD_BOT_TOKEN=your_bot_token_from_developer_portal -# Jira OAuth Configuration -JIRA_CLIENT_ID=our_jira_client_id -JIRA_CLIENT_SECRET=your_jira_client_secret +# Atlassian OAuth Configuration +ATLASSIAN_CLIENT_ID=your_atlassian_client_id_here +ATLASSIAN_CLIENT_SECRET=your_atlassian_client_secret_here JIRA_REDIRECT_URI=http://localhost:8000/api/v1/auth/jira/connector/callback +CONFLUENCE_REDIRECT_URI=http://localhost:8000/api/v1/auth/confluence/connector/callback -# OAuth for Linear Connector -LINEAR_CLIENT_ID=your_linear_client_id -LINEAR_CLIENT_SECRET=your_linear_client_secret +# Linear OAuth Configuration +LINEAR_CLIENT_ID=your_linear_client_id_here +LINEAR_CLIENT_SECRET=your_linear_client_secret_here LINEAR_REDIRECT_URI=http://localhost:8000/api/v1/auth/linear/connector/callback -# OAuth for Notion Connector -NOTION_CLIENT_ID=your_notion_client_id -NOTION_CLIENT_SECRET=your_notion_client_secret +# Notion OAuth Configuration +NOTION_CLIENT_ID=your_notion_client_id_here +NOTION_CLIENT_SECRET=your_notion_client_secret_here NOTION_REDIRECT_URI=http://localhost:8000/api/v1/auth/notion/connector/callback -# OAuth for Slack connector -SLACK_CLIENT_ID=1234567890.1234567890123 -SLACK_CLIENT_SECRET=abcdefghijklmnopqrstuvwxyz1234567890 +# Slack OAuth Configuration +SLACK_CLIENT_ID=your_slack_client_id_here +SLACK_CLIENT_SECRET=your_slack_client_secret_here SLACK_REDIRECT_URI=http://localhost:8000/api/v1/auth/slack/connector/callback +# Teams OAuth Configuration +TEAMS_CLIENT_ID=your_teams_client_id_here +TEAMS_CLIENT_SECRET=your_teams_client_secret_here +TEAMS_REDIRECT_URI=http://localhost:8000/api/v1/auth/teams/connector/callback + # Embedding Model # Examples: # # Get sentence transformers embeddings diff --git a/surfsense_backend/alembic/versions/57_allow_multiple_connectors_per_type.py b/surfsense_backend/alembic/versions/57_allow_multiple_connectors_per_type.py new file mode 100644 index 000000000..a1482ee4b --- /dev/null +++ b/surfsense_backend/alembic/versions/57_allow_multiple_connectors_per_type.py @@ -0,0 +1,55 @@ +"""Allow multiple connectors of same type per search space + +Revision ID: 57 +Revises: 56 +Create Date: 2026-01-06 12:00:00.000000 + +""" + +from collections.abc import Sequence + +from sqlalchemy import text + +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "57" +down_revision: str | None = "56" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None + + +def upgrade() -> None: + connection = op.get_bind() + constraint_exists = connection.execute( + text(""" + SELECT 1 FROM information_schema.table_constraints + WHERE table_name='search_source_connectors' + AND constraint_type='UNIQUE' + AND constraint_name='uq_searchspace_user_connector_type' + """) + ).scalar() + if constraint_exists: + op.drop_constraint( + "uq_searchspace_user_connector_type", + "search_source_connectors", + type_="unique", + ) + + +def downgrade() -> None: + connection = op.get_bind() + constraint_exists = connection.execute( + text(""" + SELECT 1 FROM information_schema.table_constraints + WHERE table_name='search_source_connectors' + AND constraint_type='UNIQUE' + AND constraint_name='uq_searchspace_user_connector_type' + """) + ).scalar() + if not constraint_exists: + op.create_unique_constraint( + "uq_searchspace_user_connector_type", + "search_source_connectors", + ["search_space_id", "user_id", "connector_type"], + ) diff --git a/surfsense_backend/alembic/versions/58_unique_connector_name_per_space_user.py b/surfsense_backend/alembic/versions/58_unique_connector_name_per_space_user.py new file mode 100644 index 000000000..4dd8d7b70 --- /dev/null +++ b/surfsense_backend/alembic/versions/58_unique_connector_name_per_space_user.py @@ -0,0 +1,55 @@ +""" +Add unique constraint for (search_space_id, user_id, name) on search_source_connectors. + +Revision ID: 58 +Revises: 57 +Create Date: 2026-01-06 14:00:00.000000 + +""" + +from collections.abc import Sequence + +from sqlalchemy import text + +from alembic import op + +revision: str = "58" +down_revision: str | None = "57" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None + + +def upgrade() -> None: + connection = op.get_bind() + constraint_exists = connection.execute( + text(""" + SELECT 1 FROM information_schema.table_constraints + WHERE table_name='search_source_connectors' + AND constraint_type='UNIQUE' + AND constraint_name='uq_searchspace_user_connector_name' + """) + ).scalar() + if not constraint_exists: + op.create_unique_constraint( + "uq_searchspace_user_connector_name", + "search_source_connectors", + ["search_space_id", "user_id", "name"], + ) + + +def downgrade() -> None: + connection = op.get_bind() + constraint_exists = connection.execute( + text(""" + SELECT 1 FROM information_schema.table_constraints + WHERE table_name='search_source_connectors' + AND constraint_type='UNIQUE' + AND constraint_name='uq_searchspace_user_connector_name' + """) + ).scalar() + if constraint_exists: + op.drop_constraint( + "uq_searchspace_user_connector_name", + "search_source_connectors", + type_="unique", + ) diff --git a/surfsense_backend/alembic/versions/59_add_teams_connector_enums.py b/surfsense_backend/alembic/versions/59_add_teams_connector_enums.py new file mode 100644 index 000000000..f13fbe9e5 --- /dev/null +++ b/surfsense_backend/alembic/versions/59_add_teams_connector_enums.py @@ -0,0 +1,160 @@ +"""Add TEAMS_CONNECTOR to SearchSourceConnectorType and DocumentType enums + +Revision ID: 59 +Revises: 58 +""" + +from collections.abc import Sequence + +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "59" +down_revision: str | None = "58" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None + +# Define the ENUM type name and the new value +CONNECTOR_ENUM = "searchsourceconnectortype" +CONNECTOR_NEW_VALUE = "TEAMS_CONNECTOR" +DOCUMENT_ENUM = "documenttype" +DOCUMENT_NEW_VALUE = "TEAMS_CONNECTOR" + + +def upgrade() -> None: + """Upgrade schema - add TEAMS_CONNECTOR to connector and document enum safely.""" + # Add TEAMS_CONNECTOR to searchsourceconnectortype only if not exists + op.execute( + f""" + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 FROM pg_enum + WHERE enumlabel = '{CONNECTOR_NEW_VALUE}' + AND enumtypid = (SELECT oid FROM pg_type WHERE typname = '{CONNECTOR_ENUM}') + ) THEN + ALTER TYPE {CONNECTOR_ENUM} ADD VALUE '{CONNECTOR_NEW_VALUE}'; + END IF; + END$$; + """ + ) + + # Add TEAMS_CONNECTOR to documenttype only if not exists + op.execute( + f""" + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 FROM pg_enum + WHERE enumlabel = '{DOCUMENT_NEW_VALUE}' + AND enumtypid = (SELECT oid FROM pg_type WHERE typname = '{DOCUMENT_ENUM}') + ) THEN + ALTER TYPE {DOCUMENT_ENUM} ADD VALUE '{DOCUMENT_NEW_VALUE}'; + END IF; + END$$; + """ + ) + + +def downgrade() -> None: + """Downgrade schema - remove TEAMS_CONNECTOR from connector and document enum.""" + + # Old enum name + old_connector_enum_name = f"{CONNECTOR_ENUM}_old" + old_document_enum_name = f"{DOCUMENT_ENUM}_old" + + # All connector values except TEAMS_CONNECTOR + old_connector_values = ( + "SERPER_API", + "TAVILY_API", + "SEARXNG_API", + "LINKUP_API", + "BAIDU_SEARCH_API", + "SLACK_CONNECTOR", + "NOTION_CONNECTOR", + "GITHUB_CONNECTOR", + "LINEAR_CONNECTOR", + "DISCORD_CONNECTOR", + "JIRA_CONNECTOR", + "CONFLUENCE_CONNECTOR", + "CLICKUP_CONNECTOR", + "GOOGLE_CALENDAR_CONNECTOR", + "GOOGLE_GMAIL_CONNECTOR", + "GOOGLE_DRIVE_CONNECTOR", + "AIRTABLE_CONNECTOR", + "LUMA_CONNECTOR", + "ELASTICSEARCH_CONNECTOR", + "WEBCRAWLER_CONNECTOR", + ) + + # All document values except TEAMS_CONNECTOR + old_document_values = ( + "EXTENSION", + "CRAWLED_URL", + "FILE", + "SLACK_CONNECTOR", + "NOTION_CONNECTOR", + "YOUTUBE_VIDEO", + "GITHUB_CONNECTOR", + "LINEAR_CONNECTOR", + "DISCORD_CONNECTOR", + "JIRA_CONNECTOR", + "CONFLUENCE_CONNECTOR", + "CLICKUP_CONNECTOR", + "GOOGLE_CALENDAR_CONNECTOR", + "GOOGLE_GMAIL_CONNECTOR", + "GOOGLE_DRIVE_FILE", + "AIRTABLE_CONNECTOR", + "LUMA_CONNECTOR", + "ELASTICSEARCH_CONNECTOR", + "BOOKSTACK_CONNECTOR", + "CIRCLEBACK", + "NOTE", + ) + + old_connector_values_sql = ", ".join([f"'{v}'" for v in old_connector_values]) + old_document_values_sql = ", ".join([f"'{v}'" for v in old_document_values]) + + # Table and column names + connector_table_name = "search_source_connectors" + connector_column_name = "connector_type" + document_table_name = "documents" + document_column_name = "document_type" + + # Connector Enum Downgrade Steps + # 1. Rename the current connector enum type + op.execute(f"ALTER TYPE {CONNECTOR_ENUM} RENAME TO {old_connector_enum_name}") + + # 2. Create the new connector enum type with the old values + op.execute(f"CREATE TYPE {CONNECTOR_ENUM} AS ENUM({old_connector_values_sql})") + + # 3. Alter the column to use the new connector enum type + op.execute( + f""" + ALTER TABLE {connector_table_name} + ALTER COLUMN {connector_column_name} TYPE {CONNECTOR_ENUM} + USING {connector_column_name}::text::{CONNECTOR_ENUM} + """ + ) + + # 4. Drop the old connector enum type + op.execute(f"DROP TYPE {old_connector_enum_name}") + + # Document Enum Downgrade Steps + # 1. Rename the current document enum type + op.execute(f"ALTER TYPE {DOCUMENT_ENUM} RENAME TO {old_document_enum_name}") + + # 2. Create the new document enum type with the old values + op.execute(f"CREATE TYPE {DOCUMENT_ENUM} AS ENUM({old_document_values_sql})") + + # 3. Alter the column to use the new document enum type + op.execute( + f""" + ALTER TABLE {document_table_name} + ALTER COLUMN {document_column_name} TYPE {DOCUMENT_ENUM} + USING {document_column_name}::text::{DOCUMENT_ENUM} + """ + ) + + # 4. Drop the old document enum type + op.execute(f"DROP TYPE {old_document_enum_name}") diff --git a/surfsense_backend/app/agents/new_chat/tools/knowledge_base.py b/surfsense_backend/app/agents/new_chat/tools/knowledge_base.py index a3cdad359..e91d865fa 100644 --- a/surfsense_backend/app/agents/new_chat/tools/knowledge_base.py +++ b/surfsense_backend/app/agents/new_chat/tools/knowledge_base.py @@ -26,6 +26,7 @@ _ALL_CONNECTORS: list[str] = [ "EXTENSION", "FILE", "SLACK_CONNECTOR", + "TEAMS_CONNECTOR", "NOTION_CONNECTOR", "YOUTUBE_VIDEO", "GITHUB_CONNECTOR", @@ -573,6 +574,7 @@ def create_search_knowledge_base_tool( - FILE: "User-uploaded documents (PDFs, Word, etc.)" (personal files) - NOTE: "SurfSense Notes" (notes created inside SurfSense) - SLACK_CONNECTOR: "Slack conversations and shared content" (personal workspace communications) + - TEAMS_CONNECTOR: "Microsoft Teams messages and conversations" (personal Teams communications) - NOTION_CONNECTOR: "Notion workspace pages and databases" (personal knowledge management) - YOUTUBE_VIDEO: "YouTube video transcripts and metadata" (personally saved videos) - GITHUB_CONNECTOR: "GitHub repository content and issues" (personal repositories and interactions) diff --git a/surfsense_backend/app/config/__init__.py b/surfsense_backend/app/config/__init__.py index f227f3131..448e2c253 100644 --- a/surfsense_backend/app/config/__init__.py +++ b/surfsense_backend/app/config/__init__.py @@ -117,6 +117,16 @@ class Config: DISCORD_REDIRECT_URI = os.getenv("DISCORD_REDIRECT_URI") DISCORD_BOT_TOKEN = os.getenv("DISCORD_BOT_TOKEN") + # Microsoft Teams OAuth + TEAMS_CLIENT_ID = os.getenv("TEAMS_CLIENT_ID") + TEAMS_CLIENT_SECRET = os.getenv("TEAMS_CLIENT_SECRET") + TEAMS_REDIRECT_URI = os.getenv("TEAMS_REDIRECT_URI") + + # ClickUp OAuth + CLICKUP_CLIENT_ID = os.getenv("CLICKUP_CLIENT_ID") + CLICKUP_CLIENT_SECRET = os.getenv("CLICKUP_CLIENT_SECRET") + CLICKUP_REDIRECT_URI = os.getenv("CLICKUP_REDIRECT_URI") + # LLM instances are now managed per-user through the LLMConfig system # Legacy environment variables removed in favor of user-specific configurations diff --git a/surfsense_backend/app/connectors/airtable_connector.py b/surfsense_backend/app/connectors/airtable_connector.py index 840b2276c..ec873e3fe 100644 --- a/surfsense_backend/app/connectors/airtable_connector.py +++ b/surfsense_backend/app/connectors/airtable_connector.py @@ -294,6 +294,12 @@ class AirtableConnector: Tuple of (records, error_message) """ try: + # Validate date strings before parsing + if not start_date or start_date.lower() in ("undefined", "null", "none"): + return [], "Invalid start_date: date string is required" + if not end_date or end_date.lower() in ("undefined", "null", "none"): + return [], "Invalid end_date: date string is required" + # Parse and validate dates start_dt = isoparse(start_date) end_dt = isoparse(end_date) @@ -382,3 +388,43 @@ class AirtableConnector: markdown_parts.append("") return "\n".join(markdown_parts) + + +# --- OAuth User Info --- + +AIRTABLE_WHOAMI_URL = "https://api.airtable.com/v0/meta/whoami" + + +async def fetch_airtable_user_email(access_token: str) -> str | None: + """ + Fetch user email from Airtable whoami API. + + Args: + access_token: The Airtable OAuth access token + + Returns: + User's email address or None if fetch fails + """ + try: + async with httpx.AsyncClient() as client: + response = await client.get( + AIRTABLE_WHOAMI_URL, + headers={"Authorization": f"Bearer {access_token}"}, + timeout=10.0, + ) + + if response.status_code == 200: + data = response.json() + email = data.get("email") + if email: + logger.debug(f"Fetched Airtable user email: {email}") + return email + + logger.warning( + f"Failed to fetch Airtable user info: {response.status_code}" + ) + return None + + except Exception as e: + logger.warning(f"Error fetching Airtable user email: {e!s}") + return None diff --git a/surfsense_backend/app/connectors/airtable_history.py b/surfsense_backend/app/connectors/airtable_history.py new file mode 100644 index 000000000..64f6465fe --- /dev/null +++ b/surfsense_backend/app/connectors/airtable_history.py @@ -0,0 +1,175 @@ +""" +Airtable OAuth Connector. + +Handles OAuth-based authentication and token refresh for Airtable API access. +""" + +import logging + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.config import config +from app.connectors.airtable_connector import AirtableConnector +from app.db import SearchSourceConnector +from app.routes.airtable_add_connector_route import refresh_airtable_token +from app.schemas.airtable_auth_credentials import AirtableAuthCredentialsBase +from app.utils.oauth_security import TokenEncryption + +logger = logging.getLogger(__name__) + + +class AirtableHistoryConnector: + """ + Airtable connector with OAuth support and automatic token refresh. + + This connector uses OAuth 2.0 access tokens to authenticate with the + Airtable API. It automatically refreshes expired tokens when needed. + """ + + def __init__( + self, + session: AsyncSession, + connector_id: int, + credentials: AirtableAuthCredentialsBase | None = None, + ): + """ + Initialize the AirtableHistoryConnector with auto-refresh capability. + + Args: + session: Database session for updating connector + connector_id: Connector ID for direct updates + credentials: Airtable OAuth credentials (optional, will be loaded from DB if not provided) + """ + self._session = session + self._connector_id = connector_id + self._credentials = credentials + self._airtable_connector: AirtableConnector | None = None + + async def _get_valid_token(self) -> str: + """ + Get valid Airtable access token, refreshing if needed. + + Returns: + Valid access token + + Raises: + ValueError: If credentials are missing or invalid + Exception: If token refresh fails + """ + # Load credentials from DB if not provided + if self._credentials is None: + result = await self._session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == self._connector_id + ) + ) + connector = result.scalars().first() + + if not connector: + raise ValueError(f"Connector {self._connector_id} not found") + + config_data = connector.config.copy() + + # Decrypt credentials if they are encrypted + token_encrypted = config_data.get("_token_encrypted", False) + if token_encrypted and config.SECRET_KEY: + try: + token_encryption = TokenEncryption(config.SECRET_KEY) + + # Decrypt sensitive fields + if config_data.get("access_token"): + config_data["access_token"] = token_encryption.decrypt_token( + config_data["access_token"] + ) + if config_data.get("refresh_token"): + config_data["refresh_token"] = token_encryption.decrypt_token( + config_data["refresh_token"] + ) + + logger.info( + f"Decrypted Airtable credentials for connector {self._connector_id}" + ) + except Exception as e: + logger.error( + f"Failed to decrypt Airtable credentials for connector {self._connector_id}: {e!s}" + ) + raise ValueError( + f"Failed to decrypt Airtable credentials: {e!s}" + ) from e + + try: + self._credentials = AirtableAuthCredentialsBase.from_dict(config_data) + except Exception as e: + raise ValueError(f"Invalid Airtable credentials: {e!s}") from e + + # Check if token is expired and refreshable + if self._credentials.is_expired and self._credentials.is_refreshable: + try: + logger.info( + f"Airtable token expired for connector {self._connector_id}, refreshing..." + ) + + # Get connector for refresh + result = await self._session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == self._connector_id + ) + ) + connector = result.scalars().first() + + if not connector: + raise RuntimeError( + f"Connector {self._connector_id} not found; cannot refresh token." + ) + + # Refresh token + connector = await refresh_airtable_token(self._session, connector) + + # Reload credentials after refresh + config_data = connector.config.copy() + token_encrypted = config_data.get("_token_encrypted", False) + if token_encrypted and config.SECRET_KEY: + token_encryption = TokenEncryption(config.SECRET_KEY) + if config_data.get("access_token"): + config_data["access_token"] = token_encryption.decrypt_token( + config_data["access_token"] + ) + if config_data.get("refresh_token"): + config_data["refresh_token"] = token_encryption.decrypt_token( + config_data["refresh_token"] + ) + + self._credentials = AirtableAuthCredentialsBase.from_dict(config_data) + + # Invalidate cached connector so it's recreated with new token + self._airtable_connector = None + + logger.info( + f"Successfully refreshed Airtable token for connector {self._connector_id}" + ) + except Exception as e: + logger.error( + f"Failed to refresh Airtable token for connector {self._connector_id}: {e!s}" + ) + raise Exception( + f"Failed to refresh Airtable OAuth credentials: {e!s}" + ) from e + + return self._credentials.access_token + + async def _get_connector(self) -> AirtableConnector: + """ + Get or create AirtableConnector with valid token. + + Returns: + AirtableConnector instance + """ + if self._airtable_connector is None: + # Ensure we have valid credentials (this will refresh if needed) + await self._get_valid_token() + # Use the credentials object which is now guaranteed to be valid + if not self._credentials: + raise ValueError("Credentials not loaded") + self._airtable_connector = AirtableConnector(self._credentials) + return self._airtable_connector diff --git a/surfsense_backend/app/connectors/clickup_history.py b/surfsense_backend/app/connectors/clickup_history.py new file mode 100644 index 000000000..70e90028b --- /dev/null +++ b/surfsense_backend/app/connectors/clickup_history.py @@ -0,0 +1,349 @@ +""" +ClickUp History Module + +A module for retrieving data from ClickUp with OAuth support and backward compatibility. +Allows fetching tasks from workspaces and lists with automatic token refresh. +""" + +import logging +from typing import Any + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.config import config +from app.connectors.clickup_connector import ClickUpConnector +from app.db import SearchSourceConnector +from app.routes.clickup_add_connector_route import refresh_clickup_token +from app.schemas.clickup_auth_credentials import ClickUpAuthCredentialsBase +from app.utils.oauth_security import TokenEncryption + +logger = logging.getLogger(__name__) + + +class ClickUpHistoryConnector: + """ + Class for retrieving data from ClickUp with OAuth support and backward compatibility. + """ + + def __init__( + self, + session: AsyncSession, + connector_id: int, + credentials: ClickUpAuthCredentialsBase | None = None, + api_token: str | None = None, # For backward compatibility + ): + """ + Initialize the ClickUpHistoryConnector. + + Args: + session: Database session for token refresh + connector_id: Connector ID for direct updates + credentials: ClickUp OAuth credentials (optional, will be loaded from DB if not provided) + api_token: Legacy API token for backward compatibility (optional) + """ + self._session = session + self._connector_id = connector_id + self._credentials = credentials + self._api_token = api_token # Legacy API token + self._use_oauth = False + self._use_legacy = api_token is not None + self._clickup_client: ClickUpConnector | None = None + + async def _get_valid_token(self) -> str: + """ + Get valid ClickUp access token, refreshing if needed. + For legacy API tokens, returns the token directly. + + Returns: + Valid access token or API token + + Raises: + ValueError: If credentials are missing or invalid + Exception: If token refresh fails + """ + # If using legacy API token, return it directly + if self._use_legacy and self._api_token: + return self._api_token + + # Load credentials from DB if not provided + if self._credentials is None: + result = await self._session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == self._connector_id + ) + ) + connector = result.scalars().first() + + if not connector: + raise ValueError(f"Connector {self._connector_id} not found") + + config_data = connector.config.copy() + + # Check if using OAuth or legacy API token + is_oauth = config_data.get("_token_encrypted", False) or config_data.get( + "access_token" + ) + has_legacy_token = config_data.get("CLICKUP_API_TOKEN") is not None + + if is_oauth: + # OAuth 2.0 authentication + self._use_oauth = True + # Decrypt credentials if they are encrypted + token_encrypted = config_data.get("_token_encrypted", False) + if token_encrypted and config.SECRET_KEY: + try: + token_encryption = TokenEncryption(config.SECRET_KEY) + + # Decrypt sensitive fields + if config_data.get("access_token"): + config_data["access_token"] = ( + token_encryption.decrypt_token( + config_data["access_token"] + ) + ) + if config_data.get("refresh_token"): + config_data["refresh_token"] = ( + token_encryption.decrypt_token( + config_data["refresh_token"] + ) + ) + + logger.info( + f"Decrypted ClickUp OAuth credentials for connector {self._connector_id}" + ) + except Exception as e: + logger.error( + f"Failed to decrypt ClickUp OAuth credentials for connector {self._connector_id}: {e!s}" + ) + raise ValueError( + f"Failed to decrypt ClickUp OAuth credentials: {e!s}" + ) from e + + try: + self._credentials = ClickUpAuthCredentialsBase.from_dict( + config_data + ) + except Exception as e: + raise ValueError(f"Invalid ClickUp OAuth credentials: {e!s}") from e + elif has_legacy_token: + # Legacy API token authentication (backward compatibility) + self._use_legacy = True + self._api_token = config_data.get("CLICKUP_API_TOKEN") + + # Decrypt token if it's encrypted (legacy tokens might be encrypted) + token_encrypted = config_data.get("_token_encrypted", False) + if token_encrypted and config.SECRET_KEY and self._api_token: + try: + token_encryption = TokenEncryption(config.SECRET_KEY) + self._api_token = token_encryption.decrypt_token( + self._api_token + ) + logger.info( + f"Decrypted legacy ClickUp API token for connector {self._connector_id}" + ) + except Exception as e: + logger.warning( + f"Failed to decrypt legacy ClickUp API token for connector {self._connector_id}: {e!s}. " + "Trying to use token as-is (might be unencrypted)." + ) + # Continue with token as-is - might be unencrypted legacy token + + if not self._api_token: + raise ValueError("ClickUp API token not found in connector config") + + # Return legacy token directly (no refresh needed) + return self._api_token + else: + raise ValueError( + "ClickUp credentials not found in connector config (neither OAuth nor API token)" + ) + + # Check if token is expired and refreshable (only for OAuth) + if ( + self._use_oauth + and self._credentials.is_expired + and self._credentials.is_refreshable + ): + try: + logger.info( + f"ClickUp token expired for connector {self._connector_id}, refreshing..." + ) + + # Get connector for refresh + result = await self._session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == self._connector_id + ) + ) + connector = result.scalars().first() + + if not connector: + raise RuntimeError( + f"Connector {self._connector_id} not found; cannot refresh token." + ) + + # Refresh token + connector = await refresh_clickup_token(self._session, connector) + + # Reload credentials after refresh + config_data = connector.config.copy() + token_encrypted = config_data.get("_token_encrypted", False) + if token_encrypted and config.SECRET_KEY: + token_encryption = TokenEncryption(config.SECRET_KEY) + if config_data.get("access_token"): + config_data["access_token"] = token_encryption.decrypt_token( + config_data["access_token"] + ) + if config_data.get("refresh_token"): + config_data["refresh_token"] = token_encryption.decrypt_token( + config_data["refresh_token"] + ) + + self._credentials = ClickUpAuthCredentialsBase.from_dict(config_data) + + # Invalidate cached client so it's recreated with new token + self._clickup_client = None + + logger.info( + f"Successfully refreshed ClickUp token for connector {self._connector_id}" + ) + except Exception as e: + logger.error( + f"Failed to refresh ClickUp token for connector {self._connector_id}: {e!s}" + ) + raise Exception( + f"Failed to refresh ClickUp OAuth credentials: {e!s}" + ) from e + + if self._use_oauth: + return self._credentials.access_token + else: + return self._api_token + + async def _get_client(self) -> ClickUpConnector: + """ + Get or create ClickUpConnector with valid token. + + Returns: + ClickUpConnector instance + """ + if self._clickup_client is None: + token = await self._get_valid_token() + # ClickUp API uses Bearer token for OAuth, or direct token for legacy + if self._use_oauth: + # For OAuth, use Bearer token format (ClickUp OAuth expects "Bearer {token}") + self._clickup_client = ClickUpConnector(api_token=f"Bearer {token}") + else: + # For legacy API token, use token directly (format: "pk_...") + self._clickup_client = ClickUpConnector(api_token=token) + return self._clickup_client + + async def close(self): + """Close any open connections.""" + self._clickup_client = None + + async def __aenter__(self): + """Async context manager entry.""" + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + """Async context manager exit.""" + await self.close() + + async def get_authorized_workspaces(self) -> dict[str, Any]: + """ + Fetch authorized workspaces (teams) from ClickUp. + + Returns: + Dictionary containing teams data + + Raises: + ValueError: If credentials have not been set + Exception: If the API request fails + """ + client = await self._get_client() + return client.get_authorized_workspaces() + + async def get_workspace_tasks( + self, workspace_id: str, include_closed: bool = False + ) -> list[dict[str, Any]]: + """ + Fetch all tasks from a ClickUp workspace. + + Args: + workspace_id: ClickUp workspace (team) ID + include_closed: Whether to include closed tasks (default: False) + + Returns: + List of task objects + + Raises: + ValueError: If credentials have not been set + Exception: If the API request fails + """ + client = await self._get_client() + return client.get_workspace_tasks( + workspace_id=workspace_id, include_closed=include_closed + ) + + async def get_tasks_in_date_range( + self, + workspace_id: str, + start_date: str, + end_date: str, + include_closed: bool = False, + ) -> tuple[list[dict[str, Any]], str | None]: + """ + Fetch tasks from ClickUp within a specific date range. + + Args: + workspace_id: ClickUp workspace (team) ID + start_date: Start date in YYYY-MM-DD format + end_date: End date in YYYY-MM-DD format + include_closed: Whether to include closed tasks (default: False) + + Returns: + Tuple containing (tasks list, error message or None) + """ + client = await self._get_client() + return client.get_tasks_in_date_range( + workspace_id=workspace_id, + start_date=start_date, + end_date=end_date, + include_closed=include_closed, + ) + + async def get_task_details(self, task_id: str) -> dict[str, Any]: + """ + Fetch detailed information about a specific task. + + Args: + task_id: ClickUp task ID + + Returns: + Task details + + Raises: + ValueError: If credentials have not been set + Exception: If the API request fails + """ + client = await self._get_client() + return client.get_task_details(task_id) + + async def get_task_comments(self, task_id: str) -> dict[str, Any]: + """ + Fetch comments for a specific task. + + Args: + task_id: ClickUp task ID + + Returns: + Task comments + + Raises: + ValueError: If credentials have not been set + Exception: If the API request fails + """ + client = await self._get_client() + return client.get_task_comments(task_id) diff --git a/surfsense_backend/app/connectors/google_gmail_connector.py b/surfsense_backend/app/connectors/google_gmail_connector.py index 402337448..10008ad73 100644 --- a/surfsense_backend/app/connectors/google_gmail_connector.py +++ b/surfsense_backend/app/connectors/google_gmail_connector.py @@ -6,6 +6,7 @@ Allows fetching emails from Gmail mailbox using Google OAuth credentials. import base64 import json +import logging import re from typing import Any @@ -21,6 +22,34 @@ from app.db import ( SearchSourceConnectorType, ) +logger = logging.getLogger(__name__) + + +def fetch_google_user_email(credentials: Credentials) -> str | None: + """ + Fetch user email from Gmail API using Google credentials. + + Uses the Gmail users.getProfile endpoint which returns the authenticated + user's email address. + + Args: + credentials: Google OAuth Credentials object (not encrypted) + + Returns: + User's email address or None if fetch fails + """ + try: + service = build("gmail", "v1", credentials=credentials) + profile = service.users().getProfile(userId="me").execute() + email = profile.get("emailAddress") + if email: + logger.debug(f"Fetched Google user email: {email}") + return email + return None + except Exception as e: + logger.warning(f"Error fetching Google user email: {e!s}") + return None + class GoogleGmailConnector: """Class for retrieving emails from Gmail using Google OAuth credentials.""" diff --git a/surfsense_backend/app/connectors/linear_connector.py b/surfsense_backend/app/connectors/linear_connector.py index 148aa4d0a..b8206a40d 100644 --- a/surfsense_backend/app/connectors/linear_connector.py +++ b/surfsense_backend/app/connectors/linear_connector.py @@ -9,18 +9,65 @@ import logging from datetime import datetime from typing import Any +import httpx import requests from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select from app.config import config from app.db import SearchSourceConnector -from app.routes.linear_add_connector_route import refresh_linear_token from app.schemas.linear_auth_credentials import LinearAuthCredentialsBase from app.utils.oauth_security import TokenEncryption logger = logging.getLogger(__name__) +LINEAR_GRAPHQL_URL = "https://api.linear.app/graphql" + +ORGANIZATION_QUERY = """ +query { + organization { + name + } +} +""" + + +async def fetch_linear_organization_name(access_token: str) -> str | None: + """ + Fetch organization/workspace name from Linear GraphQL API. + + Args: + access_token: The Linear OAuth access token + + Returns: + Organization name or None if fetch fails + """ + try: + async with httpx.AsyncClient() as client: + response = await client.post( + LINEAR_GRAPHQL_URL, + headers={ + "Authorization": access_token, + "Content-Type": "application/json", + }, + json={"query": ORGANIZATION_QUERY}, + timeout=10.0, + ) + + if response.status_code == 200: + data = response.json() + org_name = data.get("data", {}).get("organization", {}).get("name") + if org_name: + logger.debug(f"Fetched Linear organization name: {org_name}") + return org_name + + logger.warning(f"Failed to fetch Linear org info: {response.status_code}") + return None + + except Exception as e: + logger.warning(f"Error fetching Linear organization name: {e!s}") + return None + class LinearConnector: """Class for retrieving issues and comments from Linear.""" @@ -121,6 +168,9 @@ class LinearConnector: f"Connector {self._connector_id} not found; cannot refresh token." ) + # Lazy import to avoid circular dependency + from app.routes.linear_add_connector_route import refresh_linear_token + # Refresh token connector = await refresh_linear_token(self._session, connector) diff --git a/surfsense_backend/app/connectors/slack_history.py b/surfsense_backend/app/connectors/slack_history.py index dbf43bb24..2b36b9f96 100644 --- a/surfsense_backend/app/connectors/slack_history.py +++ b/surfsense_backend/app/connectors/slack_history.py @@ -377,7 +377,7 @@ class SlackHistory: else: raise # Re-raise to outer handler for not_in_channel or other SlackApiErrors - if not current_api_call_successful: + if not current_api_call_successful or result is None: continue # Retry the current page fetch due to handled rate limit # Process result if successful diff --git a/surfsense_backend/app/connectors/teams_connector.py b/surfsense_backend/app/connectors/teams_connector.py new file mode 100644 index 000000000..29c2db127 --- /dev/null +++ b/surfsense_backend/app/connectors/teams_connector.py @@ -0,0 +1,338 @@ +""" +Microsoft Teams Connector + +A module for interacting with Microsoft Teams Graph API to retrieve teams, channels, and message history. + +Supports OAuth-based authentication with token refresh. +""" + +import logging +from datetime import datetime, timezone +from typing import Any + +import httpx +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.config import config +from app.db import SearchSourceConnector +from app.routes.teams_add_connector_route import refresh_teams_token +from app.schemas.teams_auth_credentials import TeamsAuthCredentialsBase +from app.utils.oauth_security import TokenEncryption + +logger = logging.getLogger(__name__) + + +class TeamsConnector: + """Class for retrieving teams, channels, and message history from Microsoft Teams.""" + + # Microsoft Graph API endpoints + GRAPH_API_BASE = "https://graph.microsoft.com/v1.0" + + def __init__( + self, + access_token: str | None = None, + session: AsyncSession | None = None, + connector_id: int | None = None, + credentials: TeamsAuthCredentialsBase | None = None, + ): + """ + Initialize the TeamsConnector with an access token or OAuth credentials. + + Args: + access_token: Microsoft Graph API access token (optional, for backward compatibility) + session: Database session for token refresh (optional) + connector_id: Connector ID for token refresh (optional) + credentials: Teams OAuth credentials (optional, will be loaded from DB if not provided) + """ + self._session = session + self._connector_id = connector_id + self._credentials = credentials + self._access_token = access_token + + async def _get_valid_token(self) -> str: + """ + Get valid Microsoft Teams access token, refreshing if needed. + + Returns: + Valid access token + + Raises: + ValueError: If credentials are missing or invalid + Exception: If token refresh fails + """ + # If we have a direct token (backward compatibility), use it + if ( + self._access_token + and self._session is None + and self._connector_id is None + and self._credentials is None + ): + return self._access_token + + # Load credentials from DB if not provided + if self._credentials is None: + if not self._session or not self._connector_id: + raise ValueError( + "Cannot load credentials: session and connector_id required" + ) + + result = await self._session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == self._connector_id + ) + ) + connector = result.scalars().first() + + if not connector: + raise ValueError(f"Connector {self._connector_id} not found") + + config_data = connector.config.copy() + + # Decrypt credentials if they are encrypted + token_encrypted = config_data.get("_token_encrypted", False) + if token_encrypted and config.SECRET_KEY: + try: + token_encryption = TokenEncryption(config.SECRET_KEY) + + # Decrypt sensitive fields + if config_data.get("access_token"): + config_data["access_token"] = token_encryption.decrypt_token( + config_data["access_token"] + ) + if config_data.get("refresh_token"): + config_data["refresh_token"] = token_encryption.decrypt_token( + config_data["refresh_token"] + ) + + logger.info( + "Decrypted Teams credentials for connector %s", + self._connector_id, + ) + except Exception as e: + logger.error( + "Failed to decrypt Teams credentials for connector %s: %s", + self._connector_id, + str(e), + ) + raise ValueError( + f"Failed to decrypt Teams credentials: {e!s}" + ) from e + + try: + self._credentials = TeamsAuthCredentialsBase.from_dict(config_data) + except Exception as e: + raise ValueError(f"Invalid Teams credentials: {e!s}") from e + + # Check if token is expired and refreshable + if self._credentials.is_expired and self._credentials.is_refreshable: + try: + logger.info( + "Teams token expired for connector %s, refreshing...", + self._connector_id, + ) + + # Get connector for refresh + result = await self._session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == self._connector_id + ) + ) + connector = result.scalars().first() + + if not connector: + raise RuntimeError( + f"Connector {self._connector_id} not found; cannot refresh token." + ) + + # Refresh token + connector = await refresh_teams_token(self._session, connector) + + # Reload credentials after refresh + config_data = connector.config.copy() + token_encrypted = config_data.get("_token_encrypted", False) + if token_encrypted and config.SECRET_KEY: + token_encryption = TokenEncryption(config.SECRET_KEY) + if config_data.get("access_token"): + config_data["access_token"] = token_encryption.decrypt_token( + config_data["access_token"] + ) + if config_data.get("refresh_token"): + config_data["refresh_token"] = token_encryption.decrypt_token( + config_data["refresh_token"] + ) + + self._credentials = TeamsAuthCredentialsBase.from_dict(config_data) + + logger.info( + "Successfully refreshed Teams token for connector %s", + self._connector_id, + ) + except Exception as e: + logger.error( + "Failed to refresh Teams token for connector %s: %s", + self._connector_id, + str(e), + ) + raise ValueError( + f"Failed to refresh Teams OAuth credentials: {e!s}" + ) from e + + return self._credentials.access_token + + async def get_joined_teams(self) -> list[dict[str, Any]]: + """ + Get list of all teams the user is a member of. + + Returns: + List of team objects with id, display_name, etc. + """ + access_token = await self._get_valid_token() + + async with httpx.AsyncClient() as client: + response = await client.get( + f"{self.GRAPH_API_BASE}/me/joinedTeams", + headers={"Authorization": f"Bearer {access_token}"}, + timeout=30.0, + ) + + if response.status_code != 200: + raise ValueError( + f"Failed to get joined teams: {response.status_code} - {response.text}" + ) + + data = response.json() + return data.get("value", []) + + async def get_team_channels(self, team_id: str) -> list[dict[str, Any]]: + """ + Get list of all channels in a team. + + Args: + team_id: The team ID + + Returns: + List of channel objects + """ + access_token = await self._get_valid_token() + + async with httpx.AsyncClient() as client: + response = await client.get( + f"{self.GRAPH_API_BASE}/teams/{team_id}/channels", + headers={"Authorization": f"Bearer {access_token}"}, + timeout=30.0, + ) + + if response.status_code != 200: + raise ValueError( + f"Failed to get channels for team {team_id}: {response.status_code} - {response.text}" + ) + + data = response.json() + return data.get("value", []) + + async def get_channel_messages( + self, + team_id: str, + channel_id: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + ) -> list[dict[str, Any]]: + """ + Get messages from a specific channel with optional date filtering. + + Args: + team_id: The team ID + channel_id: The channel ID + start_date: Optional start date for filtering messages + end_date: Optional end date for filtering messages + + Returns: + List of message objects + """ + access_token = await self._get_valid_token() + + async with httpx.AsyncClient() as client: + url = f"{self.GRAPH_API_BASE}/teams/{team_id}/channels/{channel_id}/messages" + + # Note: The Graph API for channel messages doesn't support $filter parameter + # We fetch all messages and filter them client-side + response = await client.get( + url, + headers={"Authorization": f"Bearer {access_token}"}, + timeout=30.0, + ) + + if response.status_code != 200: + raise ValueError( + f"Failed to get messages from channel {channel_id}: {response.status_code} - {response.text}" + ) + + data = response.json() + messages = data.get("value", []) + + # Filter messages by date if needed (client-side filtering) + if start_date or end_date: + # Make sure comparison dates are timezone-aware (UTC) + if start_date and start_date.tzinfo is None: + start_date = start_date.replace(tzinfo=timezone.utc) + if end_date and end_date.tzinfo is None: + end_date = end_date.replace(tzinfo=timezone.utc) + + filtered_messages = [] + for message in messages: + created_at_str = message.get("createdDateTime") + if not created_at_str: + continue + + # Parse the ISO 8601 datetime string (already timezone-aware) + created_at = datetime.fromisoformat(created_at_str.replace('Z', '+00:00')) + + # Check if message is within date range + if start_date and created_at < start_date: + continue + if end_date and created_at > end_date: + continue + + filtered_messages.append(message) + + return filtered_messages + + return messages + + async def get_message_replies( + self, team_id: str, channel_id: str, message_id: str + ) -> list[dict[str, Any]]: + """ + Get replies to a specific message. + + Args: + team_id: The team ID + channel_id: The channel ID + message_id: The message ID + + Returns: + List of reply message objects + """ + access_token = await self._get_valid_token() + + async with httpx.AsyncClient() as client: + url = f"{self.GRAPH_API_BASE}/teams/{team_id}/channels/{channel_id}/messages/{message_id}/replies" + + response = await client.get( + url, + headers={"Authorization": f"Bearer {access_token}"}, + timeout=30.0, + ) + + if response.status_code != 200: + logger.warning( + "Failed to get replies for message %s: %s - %s", + message_id, + response.status_code, + response.text, + ) + return [] + + data = response.json() + return data.get("value", []) diff --git a/surfsense_backend/app/connectors/teams_history.py b/surfsense_backend/app/connectors/teams_history.py new file mode 100644 index 000000000..314ee6304 --- /dev/null +++ b/surfsense_backend/app/connectors/teams_history.py @@ -0,0 +1,254 @@ +""" +Microsoft Teams History Module + +A module for retrieving conversation history from Microsoft Teams channels. +Allows fetching team lists, channel lists, and message history with date range filtering. +""" + +import logging +from datetime import datetime +from typing import Any + +from sqlalchemy.ext.asyncio import AsyncSession + +from app.connectors.teams_connector import TeamsConnector +from app.schemas.teams_auth_credentials import TeamsAuthCredentialsBase + +logger = logging.getLogger(__name__) + + +class TeamsHistory: + """Class for retrieving conversation history from Microsoft Teams channels.""" + + def __init__( + self, + access_token: str | None = None, + session: AsyncSession | None = None, + connector_id: int | None = None, + credentials: TeamsAuthCredentialsBase | None = None, + ): + """ + Initialize the TeamsHistory class. + + Args: + access_token: Microsoft Graph API access token (optional, for backward compatibility) + session: Database session for token refresh (optional) + connector_id: Connector ID for token refresh (optional) + credentials: Teams OAuth credentials (optional, will be loaded from DB if not provided) + """ + self.connector = TeamsConnector( + access_token=access_token, + session=session, + connector_id=connector_id, + credentials=credentials, + ) + + async def get_all_teams(self) -> list[dict[str, Any]]: + """ + Get list of all teams the user has access to. + + Returns: + List of team objects containing team metadata. + """ + try: + teams = await self.connector.get_joined_teams() + logger.info("Retrieved %s teams", len(teams)) + return teams + except Exception as e: + logger.error("Error fetching teams: %s", str(e)) + raise + + async def get_channels_for_team(self, team_id: str) -> list[dict[str, Any]]: + """ + Get list of all channels in a specific team. + + Args: + team_id: The ID of the team + + Returns: + List of channel objects containing channel metadata. + """ + try: + channels = await self.connector.get_team_channels(team_id) + logger.info("Retrieved %s channels for team %s", len(channels), team_id) + return channels + except Exception as e: + logger.error("Error fetching channels for team %s: %s", team_id, str(e)) + raise + + async def get_messages_from_channel( + self, + team_id: str, + channel_id: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + include_replies: bool = True, + ) -> list[dict[str, Any]]: + """ + Get messages from a specific channel with optional date filtering. + + Args: + team_id: The ID of the team + channel_id: The ID of the channel + start_date: Optional start date for filtering messages + end_date: Optional end date for filtering messages + include_replies: Whether to include reply messages (default: True) + + Returns: + List of message objects with content and metadata. + """ + try: + messages = await self.connector.get_channel_messages( + team_id, channel_id, start_date, end_date + ) + + logger.info( + "Retrieved %s messages from channel %s in team %s", + len(messages), + channel_id, + team_id, + ) + + # Fetch replies if requested + if include_replies: + all_messages = [] + for message in messages: + all_messages.append(message) + # Get replies for this message + try: + replies = await self.connector.get_message_replies( + team_id, channel_id, message.get("id") + ) + all_messages.extend(replies) + except Exception: + logger.warning( + "Failed to get replies for message %s", + message.get("id"), + exc_info=True, + ) + # Continue without replies for this message + + logger.info( + "Total messages including replies: %s for channel %s", + len(all_messages), + channel_id, + ) + return all_messages + + return messages + + except Exception as e: + logger.error( + "Error fetching messages from channel %s in team %s: %s", + channel_id, + team_id, + str(e), + ) + raise + + async def get_all_messages_from_team( + self, + team_id: str, + start_date: datetime | None = None, + end_date: datetime | None = None, + include_replies: bool = True, + ) -> dict[str, list[dict[str, Any]]]: + """ + Get all messages from all channels in a team. + + Args: + team_id: The ID of the team + start_date: Optional start date for filtering messages + end_date: Optional end date for filtering messages + include_replies: Whether to include reply messages (default: True) + + Returns: + Dictionary mapping channel IDs to lists of messages. + """ + try: + channels = await self.get_channels_for_team(team_id) + all_channel_messages = {} + + for channel in channels: + channel_id = channel.get("id") + channel_name = channel.get("displayName", "Unknown") + + try: + messages = await self.get_messages_from_channel( + team_id, channel_id, start_date, end_date, include_replies + ) + all_channel_messages[channel_id] = messages + logger.info( + "Fetched %s messages from channel '%s' (%s)", + len(messages), + channel_name, + channel_id, + ) + except Exception: + logger.error( + "Failed to fetch messages from channel '%s' (%s)", + channel_name, + channel_id, + exc_info=True, + ) + all_channel_messages[channel_id] = [] + + return all_channel_messages + + except Exception as e: + logger.error("Error fetching messages from team %s: %s", team_id, str(e)) + raise + + async def get_all_messages( + self, + start_date: datetime | None = None, + end_date: datetime | None = None, + include_replies: bool = True, + ) -> dict[str, dict[str, list[dict[str, Any]]]]: + """ + Get all messages from all teams and channels the user has access to. + + Args: + start_date: Optional start date for filtering messages + end_date: Optional end date for filtering messages + include_replies: Whether to include reply messages (default: True) + + Returns: + Nested dictionary: team_id -> channel_id -> list of messages. + """ + try: + teams = await self.get_all_teams() + all_messages = {} + + for team in teams: + team_id = team.get("id") + team_name = team.get("displayName", "Unknown") + + try: + team_messages = await self.get_all_messages_from_team( + team_id, start_date, end_date, include_replies + ) + all_messages[team_id] = team_messages + total_messages = sum( + len(messages) for messages in team_messages.values() + ) + logger.info( + "Fetched %s total messages from team '%s' (%s)", + total_messages, + team_name, + team_id, + ) + except Exception: + logger.error( + "Failed to fetch messages from team '%s' (%s)", + team_name, + team_id, + exc_info=True, + ) + all_messages[team_id] = {} + + return all_messages + + except Exception as e: + logger.error("Error fetching all messages: %s", str(e)) + raise diff --git a/surfsense_backend/app/db.py b/surfsense_backend/app/db.py index fbd53bd06..d54254f9c 100644 --- a/surfsense_backend/app/db.py +++ b/surfsense_backend/app/db.py @@ -36,6 +36,7 @@ class DocumentType(str, Enum): CRAWLED_URL = "CRAWLED_URL" FILE = "FILE" SLACK_CONNECTOR = "SLACK_CONNECTOR" + TEAMS_CONNECTOR = "TEAMS_CONNECTOR" NOTION_CONNECTOR = "NOTION_CONNECTOR" YOUTUBE_VIDEO = "YOUTUBE_VIDEO" GITHUB_CONNECTOR = "GITHUB_CONNECTOR" @@ -62,6 +63,7 @@ class SearchSourceConnectorType(str, Enum): LINKUP_API = "LINKUP_API" BAIDU_SEARCH_API = "BAIDU_SEARCH_API" # Baidu AI Search API for Chinese web search SLACK_CONNECTOR = "SLACK_CONNECTOR" + TEAMS_CONNECTOR = "TEAMS_CONNECTOR" NOTION_CONNECTOR = "NOTION_CONNECTOR" GITHUB_CONNECTOR = "GITHUB_CONNECTOR" LINEAR_CONNECTOR = "LINEAR_CONNECTOR" diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py index 5015b80c2..b4e94c732 100644 --- a/surfsense_backend/app/routes/__init__.py +++ b/surfsense_backend/app/routes/__init__.py @@ -4,6 +4,7 @@ from .airtable_add_connector_route import ( router as airtable_add_connector_router, ) from .circleback_webhook_route import router as circleback_webhook_router +from .clickup_add_connector_route import router as clickup_add_connector_router from .confluence_add_connector_route import router as confluence_add_connector_router from .discord_add_connector_route import router as discord_add_connector_router from .documents_routes import router as documents_router @@ -30,6 +31,7 @@ from .rbac_routes import router as rbac_router from .search_source_connectors_routes import router as search_source_connectors_router from .search_spaces_routes import router as search_spaces_router from .slack_add_connector_route import router as slack_add_connector_router +from .teams_add_connector_route import router as teams_add_connector_router router = APIRouter() @@ -49,9 +51,11 @@ router.include_router(linear_add_connector_router) router.include_router(luma_add_connector_router) router.include_router(notion_add_connector_router) router.include_router(slack_add_connector_router) +router.include_router(teams_add_connector_router) router.include_router(discord_add_connector_router) router.include_router(jira_add_connector_router) router.include_router(confluence_add_connector_router) +router.include_router(clickup_add_connector_router) router.include_router(new_llm_config_router) # LLM configs with prompt configuration router.include_router(logs_router) router.include_router(circleback_webhook_router) # Circleback meeting webhooks diff --git a/surfsense_backend/app/routes/airtable_add_connector_route.py b/surfsense_backend/app/routes/airtable_add_connector_route.py index 9284d89e8..64fa104d8 100644 --- a/surfsense_backend/app/routes/airtable_add_connector_route.py +++ b/surfsense_backend/app/routes/airtable_add_connector_route.py @@ -11,9 +11,9 @@ from fastapi.responses import RedirectResponse from pydantic import ValidationError from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.future import select from app.config import config +from app.connectors.airtable_connector import fetch_airtable_user_email from app.db import ( SearchSourceConnector, SearchSourceConnectorType, @@ -22,6 +22,10 @@ from app.db import ( ) from app.schemas.airtable_auth_credentials import AirtableAuthCredentialsBase from app.users import current_active_user +from app.utils.connector_naming import ( + check_duplicate_connector, + generate_unique_connector_name, +) from app.utils.oauth_security import OAuthStateManager, TokenEncryption logger = logging.getLogger(__name__) @@ -275,6 +279,8 @@ async def airtable_callback( status_code=400, detail="No access token received from Airtable" ) + user_email = await fetch_airtable_user_email(access_token) + # Calculate expiration time (UTC, tz-aware) expires_at = None if token_json.get("expires_in"): @@ -297,39 +303,43 @@ async def airtable_callback( credentials_dict = credentials.to_dict() credentials_dict["_token_encrypted"] = True - # Check if connector already exists for this search space and user - existing_connector_result = await session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type - == SearchSourceConnectorType.AIRTABLE_CONNECTOR, - ) + # Check for duplicate connector (same account already connected) + is_duplicate = await check_duplicate_connector( + session, + SearchSourceConnectorType.AIRTABLE_CONNECTOR, + space_id, + user_id, + user_email, ) - existing_connector = existing_connector_result.scalars().first() + if is_duplicate: + logger.warning( + f"Duplicate Airtable connector detected for user {user_id} with email {user_email}" + ) + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=airtable-connector" + ) - if existing_connector: - # Update existing connector - existing_connector.config = credentials_dict - existing_connector.name = "Airtable Connector" - existing_connector.is_indexable = True - logger.info( - f"Updated existing Airtable connector for user {user_id} in space {space_id}" - ) - else: - # Create new connector - new_connector = SearchSourceConnector( - name="Airtable Connector", - connector_type=SearchSourceConnectorType.AIRTABLE_CONNECTOR, - is_indexable=True, - config=credentials_dict, - search_space_id=space_id, - user_id=user_id, - ) - session.add(new_connector) - logger.info( - f"Created new Airtable connector for user {user_id} in space {space_id}" - ) + # Generate a unique, user-friendly connector name + connector_name = await generate_unique_connector_name( + session, + SearchSourceConnectorType.AIRTABLE_CONNECTOR, + space_id, + user_id, + user_email, + ) + # Create new connector + new_connector = SearchSourceConnector( + name=connector_name, + connector_type=SearchSourceConnectorType.AIRTABLE_CONNECTOR, + is_indexable=True, + config=credentials_dict, + search_space_id=space_id, + user_id=user_id, + ) + session.add(new_connector) + logger.info( + f"Created new Airtable connector for user {user_id} in space {space_id}" + ) try: await session.commit() @@ -338,7 +348,7 @@ async def airtable_callback( # Redirect to the frontend with success params for indexing config # Using query params to auto-open the popup with config view on new-chat page return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=airtable-connector" + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=airtable-connector&connectorId={new_connector.id}" ) except ValidationError as e: @@ -350,7 +360,7 @@ async def airtable_callback( await session.rollback() raise HTTPException( status_code=409, - detail=f"Integrity error: A connector with this type already exists. {e!s}", + detail=f"Database integrity error: {e!s}", ) from e except Exception as e: logger.error(f"Failed to create search source connector: {e!s}") @@ -371,7 +381,7 @@ async def airtable_callback( async def refresh_airtable_token( session: AsyncSession, connector: SearchSourceConnector -): +) -> SearchSourceConnector: """ Refresh the Airtable access token for a connector. @@ -401,6 +411,12 @@ async def refresh_airtable_token( status_code=500, detail="Failed to decrypt stored refresh token" ) from e + if not refresh_token: + raise HTTPException( + status_code=400, + detail="No refresh token available. Please re-authenticate.", + ) + auth_header = make_basic_auth_header( config.AIRTABLE_CLIENT_ID, config.AIRTABLE_CLIENT_SECRET ) @@ -425,8 +441,14 @@ async def refresh_airtable_token( ) if token_response.status_code != 200: + error_detail = token_response.text + try: + error_json = token_response.json() + error_detail = error_json.get("error_description", error_detail) + except Exception: + pass raise HTTPException( - status_code=400, detail="Token refresh failed: {token_response.text}" + status_code=400, detail=f"Token refresh failed: {error_detail}" ) token_json = token_response.json() @@ -468,6 +490,8 @@ async def refresh_airtable_token( ) return connector + except HTTPException: + raise except Exception as e: raise HTTPException( status_code=500, detail=f"Failed to refresh Airtable token: {e!s}" diff --git a/surfsense_backend/app/routes/clickup_add_connector_route.py b/surfsense_backend/app/routes/clickup_add_connector_route.py new file mode 100644 index 000000000..f962f65fb --- /dev/null +++ b/surfsense_backend/app/routes/clickup_add_connector_route.py @@ -0,0 +1,481 @@ +""" +ClickUp Connector OAuth Routes. + +Handles OAuth 2.0 authentication flow for ClickUp connector. +""" + +import logging +from datetime import UTC, datetime, timedelta +from uuid import UUID + +import httpx +from fastapi import APIRouter, Depends, HTTPException, Request +from fastapi.responses import RedirectResponse +from pydantic import ValidationError +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.config import config +from app.db import ( + SearchSourceConnector, + SearchSourceConnectorType, + User, + get_async_session, +) +from app.schemas.clickup_auth_credentials import ClickUpAuthCredentialsBase +from app.users import current_active_user +from app.utils.oauth_security import OAuthStateManager, TokenEncryption + +logger = logging.getLogger(__name__) + +router = APIRouter() + +# ClickUp OAuth endpoints +AUTHORIZATION_URL = "https://app.clickup.com/api" +TOKEN_URL = "https://api.clickup.com/api/v2/oauth/token" + +# Initialize security utilities +_state_manager = None +_token_encryption = None + + +def get_state_manager() -> OAuthStateManager: + """Get or create OAuth state manager instance.""" + global _state_manager + if _state_manager is None: + if not config.SECRET_KEY: + raise ValueError("SECRET_KEY must be set for OAuth security") + _state_manager = OAuthStateManager(config.SECRET_KEY) + return _state_manager + + +def get_token_encryption() -> TokenEncryption: + """Get or create token encryption instance.""" + global _token_encryption + if _token_encryption is None: + if not config.SECRET_KEY: + raise ValueError("SECRET_KEY must be set for token encryption") + _token_encryption = TokenEncryption(config.SECRET_KEY) + return _token_encryption + + +@router.get("/auth/clickup/connector/add") +async def connect_clickup(space_id: int, user: User = Depends(current_active_user)): + """ + Initiate ClickUp OAuth flow. + + Args: + space_id: The search space ID + user: Current authenticated user + + Returns: + Authorization URL for redirect + """ + try: + if not space_id: + raise HTTPException(status_code=400, detail="space_id is required") + + if not config.CLICKUP_CLIENT_ID: + raise HTTPException(status_code=500, detail="ClickUp OAuth not configured.") + + if not config.SECRET_KEY: + raise HTTPException( + status_code=500, detail="SECRET_KEY not configured for OAuth security." + ) + + # Generate secure state parameter with HMAC signature + state_manager = get_state_manager() + state_encoded = state_manager.generate_secure_state(space_id, user.id) + + # Build authorization URL + from urllib.parse import urlencode + + auth_params = { + "client_id": config.CLICKUP_CLIENT_ID, + "redirect_uri": config.CLICKUP_REDIRECT_URI, + "state": state_encoded, + } + + auth_url = f"{AUTHORIZATION_URL}?{urlencode(auth_params)}" + + logger.info(f"Generated ClickUp OAuth URL for user {user.id}, space {space_id}") + return {"auth_url": auth_url} + + except Exception as e: + logger.error(f"Failed to initiate ClickUp OAuth: {e!s}", exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to initiate ClickUp OAuth: {e!s}" + ) from e + + +@router.get("/auth/clickup/connector/callback") +async def clickup_callback( + request: Request, + code: str | None = None, + error: str | None = None, + state: str | None = None, + session: AsyncSession = Depends(get_async_session), +): + """ + Handle ClickUp OAuth callback. + + Args: + request: FastAPI request object + code: Authorization code from ClickUp (if user granted access) + error: Error code from ClickUp (if user denied access or error occurred) + state: State parameter containing user/space info + session: Database session + + Returns: + Redirect response to frontend + """ + try: + # Handle OAuth errors (e.g., user denied access) + if error: + logger.warning(f"ClickUp OAuth error: {error}") + # Try to decode state to get space_id for redirect, but don't fail if it's invalid + space_id = None + if state: + try: + state_manager = get_state_manager() + data = state_manager.validate_state(state) + space_id = data.get("space_id") + except Exception: + # If state is invalid, we'll redirect without space_id + logger.warning("Failed to validate state in error handler") + + # Redirect to frontend with error parameter + if space_id: + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=clickup_oauth_denied" + ) + else: + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=clickup_oauth_denied" + ) + + # Validate required parameters for successful flow + if not code: + raise HTTPException(status_code=400, detail="Missing authorization code") + if not state: + raise HTTPException(status_code=400, detail="Missing state parameter") + + # Validate and decode state with signature verification + state_manager = get_state_manager() + try: + data = state_manager.validate_state(state) + except HTTPException: + raise + except Exception as e: + raise HTTPException( + status_code=400, detail=f"Invalid state parameter: {e!s}" + ) from e + + user_id = UUID(data["user_id"]) + space_id = data["space_id"] + + # Validate redirect URI (security: ensure it matches configured value) + if not config.CLICKUP_REDIRECT_URI: + raise HTTPException( + status_code=500, detail="CLICKUP_REDIRECT_URI not configured" + ) + + # Exchange authorization code for access token + token_data = { + "client_id": config.CLICKUP_CLIENT_ID, + "client_secret": config.CLICKUP_CLIENT_SECRET, + "code": code, + } + + async with httpx.AsyncClient() as client: + token_response = await client.post( + TOKEN_URL, + json=token_data, + headers={"Content-Type": "application/json"}, + timeout=30.0, + ) + + if token_response.status_code != 200: + error_detail = token_response.text + try: + error_json = token_response.json() + error_detail = error_json.get("error", error_detail) + except Exception: + pass + raise HTTPException( + status_code=400, detail=f"Token exchange failed: {error_detail}" + ) + + token_json = token_response.json() + + # Extract access token + access_token = token_json.get("access_token") + if not access_token: + raise HTTPException( + status_code=400, detail="No access token received from ClickUp" + ) + + # Extract refresh token if available + refresh_token = token_json.get("refresh_token") + + # Encrypt sensitive tokens before storing + token_encryption = get_token_encryption() + + # Calculate expiration time (UTC, tz-aware) + expires_at = None + expires_in = token_json.get("expires_in") + if expires_in: + now_utc = datetime.now(UTC) + expires_at = now_utc + timedelta(seconds=int(expires_in)) + + # Get user information and workspace information from ClickUp API + user_info = {} + workspace_info = {} + try: + async with httpx.AsyncClient() as client: + # Get user info + user_response = await client.get( + "https://api.clickup.com/api/v2/user", + headers={"Authorization": f"Bearer {access_token}"}, + timeout=30.0, + ) + if user_response.status_code == 200: + user_data = user_response.json().get("user", {}) + user_info = { + "user_id": str(user_data.get("id")) + if user_data.get("id") is not None + else None, + "user_email": user_data.get("email"), + "user_name": user_data.get("username"), + } + + # Get workspace (team) info - get the first workspace + team_response = await client.get( + "https://api.clickup.com/api/v2/team", + headers={"Authorization": f"Bearer {access_token}"}, + timeout=30.0, + ) + if team_response.status_code == 200: + teams_data = team_response.json().get("teams", []) + if teams_data and len(teams_data) > 0: + first_team = teams_data[0] + workspace_info = { + "workspace_id": str(first_team.get("id")) + if first_team.get("id") is not None + else None, + "workspace_name": first_team.get("name"), + } + except Exception as e: + logger.warning(f"Failed to fetch user/workspace info from ClickUp: {e!s}") + + # Store the encrypted tokens and user/workspace info in connector config + connector_config = { + "access_token": token_encryption.encrypt_token(access_token), + "refresh_token": token_encryption.encrypt_token(refresh_token) + if refresh_token + else None, + "expires_in": expires_in, + "expires_at": expires_at.isoformat() if expires_at else None, + "user_id": user_info.get("user_id"), + "user_email": user_info.get("user_email"), + "user_name": user_info.get("user_name"), + "workspace_id": workspace_info.get("workspace_id"), + "workspace_name": workspace_info.get("workspace_name"), + # Mark that token is encrypted for backward compatibility + "_token_encrypted": True, + } + + # Check if connector already exists for this search space and user + existing_connector_result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.search_space_id == space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.connector_type + == SearchSourceConnectorType.CLICKUP_CONNECTOR, + ) + ) + existing_connector = existing_connector_result.scalars().first() + + if existing_connector: + # Update existing connector + existing_connector.config = connector_config + existing_connector.name = "ClickUp Connector" + existing_connector.is_indexable = True + logger.info( + f"Updated existing ClickUp connector for user {user_id} in space {space_id}" + ) + else: + # Create new connector + new_connector = SearchSourceConnector( + name="ClickUp Connector", + connector_type=SearchSourceConnectorType.CLICKUP_CONNECTOR, + is_indexable=True, + config=connector_config, + search_space_id=space_id, + user_id=user_id, + ) + session.add(new_connector) + logger.info( + f"Created new ClickUp connector for user {user_id} in space {space_id}" + ) + + try: + await session.commit() + logger.info(f"Successfully saved ClickUp connector for user {user_id}") + + # Redirect to the frontend with success params + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=clickup-connector" + ) + + except ValidationError as e: + await session.rollback() + raise HTTPException( + status_code=422, detail=f"Validation error: {e!s}" + ) from e + except IntegrityError as e: + await session.rollback() + raise HTTPException( + status_code=409, + detail=f"Integrity error: A connector with this type already exists. {e!s}", + ) from e + except Exception as e: + logger.error(f"Failed to create search source connector: {e!s}") + await session.rollback() + raise HTTPException( + status_code=500, + detail=f"Failed to create search source connector: {e!s}", + ) from e + + except HTTPException: + raise + except Exception as e: + logger.error(f"Failed to complete ClickUp OAuth: {e!s}", exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to complete ClickUp OAuth: {e!s}" + ) from e + + +async def refresh_clickup_token( + session: AsyncSession, connector: SearchSourceConnector +) -> SearchSourceConnector: + """ + Refresh the ClickUp access token for a connector. + + Args: + session: Database session + connector: ClickUp connector to refresh + + Returns: + Updated connector object + """ + try: + logger.info(f"Refreshing ClickUp token for connector {connector.id}") + + credentials = ClickUpAuthCredentialsBase.from_dict(connector.config) + + # Decrypt tokens if they are encrypted + token_encryption = get_token_encryption() + is_encrypted = connector.config.get("_token_encrypted", False) + + refresh_token = credentials.refresh_token + if is_encrypted and refresh_token: + try: + refresh_token = token_encryption.decrypt_token(refresh_token) + except Exception as e: + logger.error(f"Failed to decrypt refresh token: {e!s}") + raise HTTPException( + status_code=500, detail="Failed to decrypt stored refresh token" + ) from e + + if not refresh_token: + raise HTTPException( + status_code=400, + detail="No refresh token available. Please re-authenticate.", + ) + + # Prepare token refresh data + refresh_data = { + "client_id": config.CLICKUP_CLIENT_ID, + "client_secret": config.CLICKUP_CLIENT_SECRET, + "refresh_token": refresh_token, + } + + async with httpx.AsyncClient() as client: + token_response = await client.post( + TOKEN_URL, + json=refresh_data, + headers={"Content-Type": "application/json"}, + timeout=30.0, + ) + + if token_response.status_code != 200: + error_detail = token_response.text + try: + error_json = token_response.json() + error_detail = error_json.get("error", error_detail) + except Exception: + pass + raise HTTPException( + status_code=400, detail=f"Token refresh failed: {error_detail}" + ) + + token_json = token_response.json() + + # Calculate expiration time (UTC, tz-aware) + expires_at = None + expires_in = token_json.get("expires_in") + if expires_in: + now_utc = datetime.now(UTC) + expires_at = now_utc + timedelta(seconds=int(expires_in)) + + # Encrypt new tokens before storing + access_token = token_json.get("access_token") + new_refresh_token = token_json.get("refresh_token") + + if not access_token: + raise HTTPException( + status_code=400, detail="No access token received from ClickUp refresh" + ) + + # Update credentials object with encrypted tokens + credentials.access_token = token_encryption.encrypt_token(access_token) + if new_refresh_token: + credentials.refresh_token = token_encryption.encrypt_token( + new_refresh_token + ) + credentials.expires_in = expires_in + credentials.expires_at = expires_at + + # Preserve user and workspace info + if not credentials.user_id: + credentials.user_id = connector.config.get("user_id") + if not credentials.user_email: + credentials.user_email = connector.config.get("user_email") + if not credentials.user_name: + credentials.user_name = connector.config.get("user_name") + if not credentials.workspace_id: + credentials.workspace_id = connector.config.get("workspace_id") + if not credentials.workspace_name: + credentials.workspace_name = connector.config.get("workspace_name") + + # Update connector config with encrypted tokens + credentials_dict = credentials.to_dict() + credentials_dict["_token_encrypted"] = True + connector.config = credentials_dict + await session.commit() + await session.refresh(connector) + + logger.info( + f"Successfully refreshed ClickUp token for connector {connector.id}" + ) + + return connector + except HTTPException: + raise + except Exception as e: + logger.error(f"Failed to refresh ClickUp token: {e!s}", exc_info=True) + raise HTTPException( + status_code=500, detail=f"Failed to refresh ClickUp token: {e!s}" + ) from e diff --git a/surfsense_backend/app/routes/confluence_add_connector_route.py b/surfsense_backend/app/routes/confluence_add_connector_route.py index e86d411b6..6c5830b17 100644 --- a/surfsense_backend/app/routes/confluence_add_connector_route.py +++ b/surfsense_backend/app/routes/confluence_add_connector_route.py @@ -14,7 +14,6 @@ from fastapi.responses import RedirectResponse from pydantic import ValidationError from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.future import select from app.config import config from app.db import ( @@ -25,6 +24,11 @@ from app.db import ( ) from app.schemas.atlassian_auth_credentials import AtlassianAuthCredentialsBase from app.users import current_active_user +from app.utils.connector_naming import ( + check_duplicate_connector, + extract_identifier_from_credentials, + generate_unique_connector_name, +) from app.utils.oauth_security import OAuthStateManager, TokenEncryption logger = logging.getLogger(__name__) @@ -288,47 +292,56 @@ async def confluence_callback( "_token_encrypted": True, } - # Check if connector already exists for this search space and user - existing_connector_result = await session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type - == SearchSourceConnectorType.CONFLUENCE_CONNECTOR, - ) + # Extract unique identifier from connector credentials + connector_identifier = extract_identifier_from_credentials( + SearchSourceConnectorType.CONFLUENCE_CONNECTOR, connector_config ) - existing_connector = existing_connector_result.scalars().first() - if existing_connector: - # Update existing connector - existing_connector.config = connector_config - existing_connector.name = "Confluence Connector" - existing_connector.is_indexable = True - logger.info( - f"Updated existing Confluence connector for user {user_id} in space {space_id}" + # Check for duplicate connector (same Confluence instance already connected) + is_duplicate = await check_duplicate_connector( + session, + SearchSourceConnectorType.CONFLUENCE_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + if is_duplicate: + logger.warning( + f"Duplicate Confluence connector detected for user {user_id} with instance {connector_identifier}" ) - else: - # Create new connector - new_connector = SearchSourceConnector( - name="Confluence Connector", - connector_type=SearchSourceConnectorType.CONFLUENCE_CONNECTOR, - is_indexable=True, - config=connector_config, - search_space_id=space_id, - user_id=user_id, - ) - session.add(new_connector) - logger.info( - f"Created new Confluence connector for user {user_id} in space {space_id}" + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=confluence-connector" ) + # Generate a unique, user-friendly connector name + connector_name = await generate_unique_connector_name( + session, + SearchSourceConnectorType.CONFLUENCE_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + # Create new connector + new_connector = SearchSourceConnector( + name=connector_name, + connector_type=SearchSourceConnectorType.CONFLUENCE_CONNECTOR, + is_indexable=True, + config=connector_config, + search_space_id=space_id, + user_id=user_id, + ) + session.add(new_connector) + logger.info( + f"Created new Confluence connector for user {user_id} in space {space_id}" + ) + try: await session.commit() logger.info(f"Successfully saved Confluence connector for user {user_id}") # Redirect to the frontend with success params return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=confluence-connector" + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=confluence-connector&connectorId={new_connector.id}" ) except ValidationError as e: @@ -340,7 +353,7 @@ async def confluence_callback( await session.rollback() raise HTTPException( status_code=409, - detail=f"Integrity error: A connector with this type already exists. {e!s}", + detail=f"Database integrity error: {e!s}", ) from e except Exception as e: logger.error(f"Failed to create search source connector: {e!s}") diff --git a/surfsense_backend/app/routes/discord_add_connector_route.py b/surfsense_backend/app/routes/discord_add_connector_route.py index 6bebac718..1d8b40fcf 100644 --- a/surfsense_backend/app/routes/discord_add_connector_route.py +++ b/surfsense_backend/app/routes/discord_add_connector_route.py @@ -14,7 +14,6 @@ from fastapi.responses import RedirectResponse from pydantic import ValidationError from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.future import select from app.config import config from app.db import ( @@ -25,6 +24,11 @@ from app.db import ( ) from app.schemas.discord_auth_credentials import DiscordAuthCredentialsBase from app.users import current_active_user +from app.utils.connector_naming import ( + check_duplicate_connector, + extract_identifier_from_credentials, + generate_unique_connector_name, +) from app.utils.oauth_security import OAuthStateManager, TokenEncryption logger = logging.getLogger(__name__) @@ -284,47 +288,56 @@ async def discord_callback( "_token_encrypted": True, } - # Check if connector already exists for this search space and user - existing_connector_result = await session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type - == SearchSourceConnectorType.DISCORD_CONNECTOR, - ) + # Extract unique identifier from connector credentials + connector_identifier = extract_identifier_from_credentials( + SearchSourceConnectorType.DISCORD_CONNECTOR, connector_config ) - existing_connector = existing_connector_result.scalars().first() - if existing_connector: - # Update existing connector - existing_connector.config = connector_config - existing_connector.name = "Discord Connector" - existing_connector.is_indexable = True - logger.info( - f"Updated existing Discord connector for user {user_id} in space {space_id}" + # Check for duplicate connector (same server already connected) + is_duplicate = await check_duplicate_connector( + session, + SearchSourceConnectorType.DISCORD_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + if is_duplicate: + logger.warning( + f"Duplicate Discord connector detected for user {user_id} with server {connector_identifier}" ) - else: - # Create new connector - new_connector = SearchSourceConnector( - name="Discord Connector", - connector_type=SearchSourceConnectorType.DISCORD_CONNECTOR, - is_indexable=True, - config=connector_config, - search_space_id=space_id, - user_id=user_id, - ) - session.add(new_connector) - logger.info( - f"Created new Discord connector for user {user_id} in space {space_id}" + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=discord-connector" ) + # Generate a unique, user-friendly connector name + connector_name = await generate_unique_connector_name( + session, + SearchSourceConnectorType.DISCORD_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + # Create new connector + new_connector = SearchSourceConnector( + name=connector_name, + connector_type=SearchSourceConnectorType.DISCORD_CONNECTOR, + is_indexable=True, + config=connector_config, + search_space_id=space_id, + user_id=user_id, + ) + session.add(new_connector) + logger.info( + f"Created new Discord connector for user {user_id} in space {space_id}" + ) + try: await session.commit() logger.info(f"Successfully saved Discord connector for user {user_id}") # Redirect to the frontend with success params return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=discord-connector" + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=discord-connector&connectorId={new_connector.id}" ) except ValidationError as e: @@ -336,7 +349,7 @@ async def discord_callback( await session.rollback() raise HTTPException( status_code=409, - detail=f"Integrity error: A connector with this type already exists. {e!s}", + detail=f"Database integrity error: {e!s}", ) from e except Exception as e: logger.error(f"Failed to create search source connector: {e!s}") diff --git a/surfsense_backend/app/routes/google_calendar_add_connector_route.py b/surfsense_backend/app/routes/google_calendar_add_connector_route.py index 6c6ae4e40..08e5c2f04 100644 --- a/surfsense_backend/app/routes/google_calendar_add_connector_route.py +++ b/surfsense_backend/app/routes/google_calendar_add_connector_route.py @@ -12,9 +12,9 @@ from google_auth_oauthlib.flow import Flow from pydantic import ValidationError from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.future import select from app.config import config +from app.connectors.google_gmail_connector import fetch_google_user_email from app.db import ( SearchSourceConnector, SearchSourceConnectorType, @@ -22,6 +22,10 @@ from app.db import ( get_async_session, ) from app.users import current_active_user +from app.utils.connector_naming import ( + check_duplicate_connector, + generate_unique_connector_name, +) from app.utils.oauth_security import OAuthStateManager, TokenEncryption logger = logging.getLogger(__name__) @@ -172,6 +176,9 @@ async def calendar_callback( creds = flow.credentials creds_dict = json.loads(creds.to_json()) + # Fetch user email + user_email = fetch_google_user_email(creds) + # Encrypt sensitive credentials before storing token_encryption = get_token_encryption() @@ -190,24 +197,33 @@ async def calendar_callback( # Mark that credentials are encrypted for backward compatibility creds_dict["_token_encrypted"] = True - try: - # Check if a connector with the same type already exists for this search space and user - result = await session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type - == SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR, - ) + # Check for duplicate connector (same account already connected) + is_duplicate = await check_duplicate_connector( + session, + SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR, + space_id, + user_id, + user_email, + ) + if is_duplicate: + logger.warning( + f"Duplicate Google Calendar connector detected for user {user_id} with email {user_email}" + ) + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=google-calendar-connector" + ) + + try: + # Generate a unique, user-friendly connector name + connector_name = await generate_unique_connector_name( + session, + SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR, + space_id, + user_id, + user_email, ) - existing_connector = result.scalars().first() - if existing_connector: - raise HTTPException( - status_code=409, - detail="A GOOGLE_CALENDAR_CONNECTOR connector already exists in this search space. Each search space can have only one connector of each type per user.", - ) db_connector = SearchSourceConnector( - name="Google Calendar Connector", + name=connector_name, connector_type=SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR, config=creds_dict, search_space_id=space_id, @@ -220,7 +236,7 @@ async def calendar_callback( # Redirect to the frontend with success params for indexing config # Using query params to auto-open the popup with config view on new-chat page return RedirectResponse( - f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-calendar-connector" + f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-calendar-connector&connectorId={db_connector.id}" ) except ValidationError as e: await session.rollback() @@ -231,7 +247,7 @@ async def calendar_callback( await session.rollback() raise HTTPException( status_code=409, - detail=f"Integrity error: A connector with this type already exists. {e!s}", + detail=f"Database integrity error: {e!s}", ) from e except HTTPException: await session.rollback() diff --git a/surfsense_backend/app/routes/google_drive_add_connector_route.py b/surfsense_backend/app/routes/google_drive_add_connector_route.py index 6caf3f204..e15aed762 100644 --- a/surfsense_backend/app/routes/google_drive_add_connector_route.py +++ b/surfsense_backend/app/routes/google_drive_add_connector_route.py @@ -29,6 +29,7 @@ from app.connectors.google_drive import ( get_start_page_token, list_folder_contents, ) +from app.connectors.google_gmail_connector import fetch_google_user_email from app.db import ( SearchSourceConnector, SearchSourceConnectorType, @@ -36,6 +37,10 @@ from app.db import ( get_async_session, ) from app.users import current_active_user +from app.utils.connector_naming import ( + check_duplicate_connector, + generate_unique_connector_name, +) from app.utils.oauth_security import OAuthStateManager, TokenEncryption # Relax token scope validation for Google OAuth @@ -227,6 +232,9 @@ async def drive_callback( creds = flow.credentials creds_dict = json.loads(creds.to_json()) + # Fetch user email + user_email = fetch_google_user_email(creds) + # Encrypt sensitive credentials before storing token_encryption = get_token_encryption() @@ -245,26 +253,33 @@ async def drive_callback( # Mark that credentials are encrypted for backward compatibility creds_dict["_token_encrypted"] = True - # Check if connector already exists for this space/user - result = await session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type - == SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR, - ) + # Check for duplicate connector (same account already connected) + is_duplicate = await check_duplicate_connector( + session, + SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR, + space_id, + user_id, + user_email, ) - existing_connector = result.scalars().first() - - if existing_connector: - raise HTTPException( - status_code=409, - detail="A GOOGLE_DRIVE_CONNECTOR already exists in this search space. Each search space can have only one connector of each type per user.", + if is_duplicate: + logger.warning( + f"Duplicate Google Drive connector detected for user {user_id} with email {user_email}" + ) + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=google-drive-connector" ) - # Create new connector (NO folder selection here - happens at index time) + # Generate a unique, user-friendly connector name + connector_name = await generate_unique_connector_name( + session, + SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR, + space_id, + user_id, + user_email, + ) + db_connector = SearchSourceConnector( - name="Google Drive Connector", + name=connector_name, connector_type=SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR, config={ **creds_dict, @@ -301,7 +316,7 @@ async def drive_callback( ) return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-drive-connector" + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-drive-connector&connectorId={db_connector.id}" ) except HTTPException: @@ -318,7 +333,7 @@ async def drive_callback( logger.error(f"Database integrity error: {e!s}", exc_info=True) raise HTTPException( status_code=409, - detail="A connector with this configuration already exists.", + detail=f"Database integrity error: {e!s}", ) from e except Exception as e: await session.rollback() diff --git a/surfsense_backend/app/routes/google_gmail_add_connector_route.py b/surfsense_backend/app/routes/google_gmail_add_connector_route.py index 20a51c1a1..19fa019ce 100644 --- a/surfsense_backend/app/routes/google_gmail_add_connector_route.py +++ b/surfsense_backend/app/routes/google_gmail_add_connector_route.py @@ -12,9 +12,9 @@ from google_auth_oauthlib.flow import Flow from pydantic import ValidationError from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.future import select from app.config import config +from app.connectors.google_gmail_connector import fetch_google_user_email from app.db import ( SearchSourceConnector, SearchSourceConnectorType, @@ -22,6 +22,10 @@ from app.db import ( get_async_session, ) from app.users import current_active_user +from app.utils.connector_naming import ( + check_duplicate_connector, + generate_unique_connector_name, +) from app.utils.oauth_security import OAuthStateManager, TokenEncryption logger = logging.getLogger(__name__) @@ -203,6 +207,9 @@ async def gmail_callback( creds = flow.credentials creds_dict = json.loads(creds.to_json()) + # Fetch user email + user_email = fetch_google_user_email(creds) + # Encrypt sensitive credentials before storing token_encryption = get_token_encryption() @@ -221,24 +228,33 @@ async def gmail_callback( # Mark that credentials are encrypted for backward compatibility creds_dict["_token_encrypted"] = True - try: - # Check if a connector with the same type already exists for this search space and user - result = await session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type - == SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR, - ) + # Check for duplicate connector (same account already connected) + is_duplicate = await check_duplicate_connector( + session, + SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR, + space_id, + user_id, + user_email, + ) + if is_duplicate: + logger.warning( + f"Duplicate Gmail connector detected for user {user_id} with email {user_email}" + ) + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=google-gmail-connector" + ) + + try: + # Generate a unique, user-friendly connector name + connector_name = await generate_unique_connector_name( + session, + SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR, + space_id, + user_id, + user_email, ) - existing_connector = result.scalars().first() - if existing_connector: - raise HTTPException( - status_code=409, - detail="A GOOGLE_GMAIL_CONNECTOR connector already exists in this search space. Each search space can have only one connector of each type per user.", - ) db_connector = SearchSourceConnector( - name="Google Gmail Connector", + name=connector_name, connector_type=SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR, config=creds_dict, search_space_id=space_id, @@ -256,7 +272,7 @@ async def gmail_callback( # Redirect to the frontend with success params for indexing config # Using query params to auto-open the popup with config view on new-chat page return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-gmail-connector" + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-gmail-connector&connectorId={db_connector.id}" ) except IntegrityError as e: @@ -264,7 +280,7 @@ async def gmail_callback( logger.error(f"Database integrity error: {e!s}") raise HTTPException( status_code=409, - detail="A connector with this configuration already exists.", + detail=f"Database integrity error: {e!s}", ) from e except ValidationError as e: await session.rollback() diff --git a/surfsense_backend/app/routes/jira_add_connector_route.py b/surfsense_backend/app/routes/jira_add_connector_route.py index 740c30300..fb66f4da7 100644 --- a/surfsense_backend/app/routes/jira_add_connector_route.py +++ b/surfsense_backend/app/routes/jira_add_connector_route.py @@ -15,7 +15,6 @@ from fastapi.responses import RedirectResponse from pydantic import ValidationError from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.future import select from app.config import config from app.db import ( @@ -26,6 +25,11 @@ from app.db import ( ) from app.schemas.atlassian_auth_credentials import AtlassianAuthCredentialsBase from app.users import current_active_user +from app.utils.connector_naming import ( + check_duplicate_connector, + extract_identifier_from_credentials, + generate_unique_connector_name, +) from app.utils.oauth_security import OAuthStateManager, TokenEncryption logger = logging.getLogger(__name__) @@ -306,47 +310,56 @@ async def jira_callback( "_token_encrypted": True, } - # Check if connector already exists for this search space and user - existing_connector_result = await session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type - == SearchSourceConnectorType.JIRA_CONNECTOR, - ) + # Extract unique identifier from connector credentials + connector_identifier = extract_identifier_from_credentials( + SearchSourceConnectorType.JIRA_CONNECTOR, connector_config ) - existing_connector = existing_connector_result.scalars().first() - if existing_connector: - # Update existing connector - existing_connector.config = connector_config - existing_connector.name = "Jira Connector" - existing_connector.is_indexable = True - logger.info( - f"Updated existing Jira connector for user {user_id} in space {space_id}" + # Check for duplicate connector (same Jira instance already connected) + is_duplicate = await check_duplicate_connector( + session, + SearchSourceConnectorType.JIRA_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + if is_duplicate: + logger.warning( + f"Duplicate Jira connector detected for user {user_id} with instance {connector_identifier}" ) - else: - # Create new connector - new_connector = SearchSourceConnector( - name="Jira Connector", - connector_type=SearchSourceConnectorType.JIRA_CONNECTOR, - is_indexable=True, - config=connector_config, - search_space_id=space_id, - user_id=user_id, - ) - session.add(new_connector) - logger.info( - f"Created new Jira connector for user {user_id} in space {space_id}" + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=jira-connector" ) + # Generate a unique, user-friendly connector name + connector_name = await generate_unique_connector_name( + session, + SearchSourceConnectorType.JIRA_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + # Create new connector + new_connector = SearchSourceConnector( + name=connector_name, + connector_type=SearchSourceConnectorType.JIRA_CONNECTOR, + is_indexable=True, + config=connector_config, + search_space_id=space_id, + user_id=user_id, + ) + session.add(new_connector) + logger.info( + f"Created new Jira connector for user {user_id} in space {space_id}" + ) + try: await session.commit() logger.info(f"Successfully saved Jira connector for user {user_id}") # Redirect to the frontend with success params return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=jira-connector" + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=jira-connector&connectorId={new_connector.id}" ) except ValidationError as e: @@ -358,7 +371,7 @@ async def jira_callback( await session.rollback() raise HTTPException( status_code=409, - detail=f"Integrity error: A connector with this type already exists. {e!s}", + detail=f"Database integrity error: {e!s}", ) from e except Exception as e: logger.error(f"Failed to create search source connector: {e!s}") diff --git a/surfsense_backend/app/routes/linear_add_connector_route.py b/surfsense_backend/app/routes/linear_add_connector_route.py index 7a7fc196a..fc9501bfb 100644 --- a/surfsense_backend/app/routes/linear_add_connector_route.py +++ b/surfsense_backend/app/routes/linear_add_connector_route.py @@ -14,9 +14,9 @@ from fastapi.responses import RedirectResponse from pydantic import ValidationError from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.future import select from app.config import config +from app.connectors.linear_connector import fetch_linear_organization_name from app.db import ( SearchSourceConnector, SearchSourceConnectorType, @@ -25,6 +25,10 @@ from app.db import ( ) from app.schemas.linear_auth_credentials import LinearAuthCredentialsBase from app.users import current_active_user +from app.utils.connector_naming import ( + check_duplicate_connector, + generate_unique_connector_name, +) from app.utils.oauth_security import OAuthStateManager, TokenEncryption logger = logging.getLogger(__name__) @@ -240,6 +244,9 @@ async def linear_callback( status_code=400, detail="No access token received from Linear" ) + # Fetch organization name + org_name = await fetch_linear_organization_name(access_token) + # Calculate expiration time (UTC, tz-aware) expires_at = None if token_json.get("expires_in"): @@ -260,39 +267,43 @@ async def linear_callback( "_token_encrypted": True, } - # Check if connector already exists for this search space and user - existing_connector_result = await session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type - == SearchSourceConnectorType.LINEAR_CONNECTOR, - ) + # Check for duplicate connector (same organization already connected) + is_duplicate = await check_duplicate_connector( + session, + SearchSourceConnectorType.LINEAR_CONNECTOR, + space_id, + user_id, + org_name, ) - existing_connector = existing_connector_result.scalars().first() + if is_duplicate: + logger.warning( + f"Duplicate Linear connector detected for user {user_id} with org {org_name}" + ) + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=linear-connector" + ) - if existing_connector: - # Update existing connector - existing_connector.config = connector_config - existing_connector.name = "Linear Connector" - existing_connector.is_indexable = True - logger.info( - f"Updated existing Linear connector for user {user_id} in space {space_id}" - ) - else: - # Create new connector - new_connector = SearchSourceConnector( - name="Linear Connector", - connector_type=SearchSourceConnectorType.LINEAR_CONNECTOR, - is_indexable=True, - config=connector_config, - search_space_id=space_id, - user_id=user_id, - ) - session.add(new_connector) - logger.info( - f"Created new Linear connector for user {user_id} in space {space_id}" - ) + # Generate a unique, user-friendly connector name + connector_name = await generate_unique_connector_name( + session, + SearchSourceConnectorType.LINEAR_CONNECTOR, + space_id, + user_id, + org_name, + ) + # Create new connector + new_connector = SearchSourceConnector( + name=connector_name, + connector_type=SearchSourceConnectorType.LINEAR_CONNECTOR, + is_indexable=True, + config=connector_config, + search_space_id=space_id, + user_id=user_id, + ) + session.add(new_connector) + logger.info( + f"Created new Linear connector for user {user_id} in space {space_id}" + ) try: await session.commit() @@ -300,7 +311,7 @@ async def linear_callback( # Redirect to the frontend with success params return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=linear-connector" + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=linear-connector&connectorId={new_connector.id}" ) except ValidationError as e: @@ -312,7 +323,7 @@ async def linear_callback( await session.rollback() raise HTTPException( status_code=409, - detail=f"Integrity error: A connector with this type already exists. {e!s}", + detail=f"Database integrity error: {e!s}", ) from e except Exception as e: logger.error(f"Failed to create search source connector: {e!s}") diff --git a/surfsense_backend/app/routes/notion_add_connector_route.py b/surfsense_backend/app/routes/notion_add_connector_route.py index 462ac398c..aac821793 100644 --- a/surfsense_backend/app/routes/notion_add_connector_route.py +++ b/surfsense_backend/app/routes/notion_add_connector_route.py @@ -14,7 +14,6 @@ from fastapi.responses import RedirectResponse from pydantic import ValidationError from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.future import select from app.config import config from app.db import ( @@ -25,6 +24,11 @@ from app.db import ( ) from app.schemas.notion_auth_credentials import NotionAuthCredentialsBase from app.users import current_active_user +from app.utils.connector_naming import ( + check_duplicate_connector, + extract_identifier_from_credentials, + generate_unique_connector_name, +) from app.utils.oauth_security import OAuthStateManager, TokenEncryption logger = logging.getLogger(__name__) @@ -262,47 +266,56 @@ async def notion_callback( "_token_encrypted": True, } - # Check if connector already exists for this search space and user - existing_connector_result = await session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type - == SearchSourceConnectorType.NOTION_CONNECTOR, - ) + # Extract unique identifier from connector credentials + connector_identifier = extract_identifier_from_credentials( + SearchSourceConnectorType.NOTION_CONNECTOR, connector_config ) - existing_connector = existing_connector_result.scalars().first() - if existing_connector: - # Update existing connector - existing_connector.config = connector_config - existing_connector.name = "Notion Connector" - existing_connector.is_indexable = True - logger.info( - f"Updated existing Notion connector for user {user_id} in space {space_id}" + # Check for duplicate connector (same workspace already connected) + is_duplicate = await check_duplicate_connector( + session, + SearchSourceConnectorType.NOTION_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + if is_duplicate: + logger.warning( + f"Duplicate Notion connector detected for user {user_id} with workspace {connector_identifier}" ) - else: - # Create new connector - new_connector = SearchSourceConnector( - name="Notion Connector", - connector_type=SearchSourceConnectorType.NOTION_CONNECTOR, - is_indexable=True, - config=connector_config, - search_space_id=space_id, - user_id=user_id, - ) - session.add(new_connector) - logger.info( - f"Created new Notion connector for user {user_id} in space {space_id}" + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=notion-connector" ) + # Generate a unique, user-friendly connector name + connector_name = await generate_unique_connector_name( + session, + SearchSourceConnectorType.NOTION_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + # Create new connector + new_connector = SearchSourceConnector( + name=connector_name, + connector_type=SearchSourceConnectorType.NOTION_CONNECTOR, + is_indexable=True, + config=connector_config, + search_space_id=space_id, + user_id=user_id, + ) + session.add(new_connector) + logger.info( + f"Created new Notion connector for user {user_id} in space {space_id}" + ) + try: await session.commit() logger.info(f"Successfully saved Notion connector for user {user_id}") # Redirect to the frontend with success params return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=notion-connector" + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=notion-connector&connectorId={new_connector.id}" ) except ValidationError as e: @@ -314,7 +327,7 @@ async def notion_callback( await session.rollback() raise HTTPException( status_code=409, - detail=f"Integrity error: A connector with this type already exists. {e!s}", + detail=f"Database integrity error: {e!s}", ) from e except Exception as e: logger.error(f"Failed to create search source connector: {e!s}") diff --git a/surfsense_backend/app/routes/search_source_connectors_routes.py b/surfsense_backend/app/routes/search_source_connectors_routes.py index d6fdedd7c..c9831484d 100644 --- a/surfsense_backend/app/routes/search_source_connectors_routes.py +++ b/surfsense_backend/app/routes/search_source_connectors_routes.py @@ -7,7 +7,8 @@ PUT /search-source-connectors/{connector_id} - Update a specific connector DELETE /search-source-connectors/{connector_id} - Delete a specific connector POST /search-source-connectors/{connector_id}/index - Index content from a connector to a search space -Note: Each search space can have only one connector of each type per user (based on search_space_id, user_id, and connector_type). +Note: OAuth connectors (Gmail, Drive, Slack, etc.) support multiple accounts per search space. +Non-OAuth connectors (BookStack, GitHub, etc.) are limited to one per search space. """ import logging @@ -125,6 +126,7 @@ async def create_search_source_connector( ) # Check if a connector with the same type already exists for this search space + # (for non-OAuth connectors that don't support multiple accounts) result = await session.execute( select(SearchSourceConnector).filter( SearchSourceConnector.search_space_id == search_space_id, @@ -556,6 +558,7 @@ async def index_connector_content( Currently supports: - SLACK_CONNECTOR: Indexes messages from all accessible Slack channels + - TEAMS_CONNECTOR: Indexes messages from all accessible Microsoft Teams channels - NOTION_CONNECTOR: Indexes pages from all accessible Notion pages - GITHUB_CONNECTOR: Indexes code and documentation from GitHub repositories - LINEAR_CONNECTOR: Indexes issues and comments from Linear @@ -629,6 +632,19 @@ async def index_connector_content( ) response_message = "Slack indexing started in the background." + elif connector.connector_type == SearchSourceConnectorType.TEAMS_CONNECTOR: + from app.tasks.celery_tasks.connector_tasks import ( + index_teams_messages_task, + ) + + logger.info( + f"Triggering Teams indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" + ) + index_teams_messages_task.delay( + connector_id, search_space_id, str(user.id), indexing_from, indexing_to + ) + response_message = "Teams indexing started in the background." + elif connector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR: from app.tasks.celery_tasks.connector_tasks import index_notion_pages_task @@ -1186,6 +1202,64 @@ async def run_discord_indexing( logger.error(f"Error in background Discord indexing task: {e!s}") +async def run_teams_indexing_with_new_session( + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str, + end_date: str, +): + """ + Create a new session and run the Microsoft Teams indexing task. + This prevents session leaks by creating a dedicated session for the background task. + """ + async with async_session_maker() as session: + await run_teams_indexing( + session, connector_id, search_space_id, user_id, start_date, end_date + ) + + +async def run_teams_indexing( + session: AsyncSession, + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str, + end_date: str, +): + """ + Background task to run Microsoft Teams indexing. + Args: + session: Database session + connector_id: ID of the Teams connector + search_space_id: ID of the search space + user_id: ID of the user + start_date: Start date for indexing + end_date: End date for indexing + """ + try: + from app.tasks.connector_indexers.teams_indexer import index_teams_messages + + # Index Teams messages without updating last_indexed_at (we'll do it separately) + documents_processed, error_or_warning = await index_teams_messages( + session=session, + connector_id=connector_id, + search_space_id=search_space_id, + user_id=user_id, + start_date=start_date, + end_date=end_date, + update_last_indexed=False, # Don't update timestamp in the indexing function + ) + + # Update last_indexed_at after successful indexing (even if 0 new docs - they were checked) + await update_connector_last_indexed(session, connector_id) + logger.info( + f"Teams indexing completed successfully: {documents_processed} documents processed. {error_or_warning or ''}" + ) + except Exception as e: + logger.error(f"Error in background Teams indexing task: {e!s}") + + # Add new helper functions for Jira indexing async def run_jira_indexing_with_new_session( connector_id: int, diff --git a/surfsense_backend/app/routes/slack_add_connector_route.py b/surfsense_backend/app/routes/slack_add_connector_route.py index 71a362119..62d2ccaaa 100644 --- a/surfsense_backend/app/routes/slack_add_connector_route.py +++ b/surfsense_backend/app/routes/slack_add_connector_route.py @@ -14,7 +14,6 @@ from fastapi.responses import RedirectResponse from pydantic import ValidationError from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.future import select from app.config import config from app.db import ( @@ -25,6 +24,11 @@ from app.db import ( ) from app.schemas.slack_auth_credentials import SlackAuthCredentialsBase from app.users import current_active_user +from app.utils.connector_naming import ( + check_duplicate_connector, + extract_identifier_from_credentials, + generate_unique_connector_name, +) from app.utils.oauth_security import OAuthStateManager, TokenEncryption logger = logging.getLogger(__name__) @@ -272,47 +276,57 @@ async def slack_callback( "_token_encrypted": True, } - # Check if connector already exists for this search space and user - existing_connector_result = await session.execute( - select(SearchSourceConnector).filter( - SearchSourceConnector.search_space_id == space_id, - SearchSourceConnector.user_id == user_id, - SearchSourceConnector.connector_type - == SearchSourceConnectorType.SLACK_CONNECTOR, - ) + # Extract unique identifier from connector credentials + connector_identifier = extract_identifier_from_credentials( + SearchSourceConnectorType.SLACK_CONNECTOR, connector_config ) - existing_connector = existing_connector_result.scalars().first() - if existing_connector: - # Update existing connector - existing_connector.config = connector_config - existing_connector.name = "Slack Connector" - existing_connector.is_indexable = True - logger.info( - f"Updated existing Slack connector for user {user_id} in space {space_id}" + # Check for duplicate connector (same workspace already connected) + is_duplicate = await check_duplicate_connector( + session, + SearchSourceConnectorType.SLACK_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + if is_duplicate: + logger.warning( + f"Duplicate Slack connector detected for user {user_id} with workspace {connector_identifier}" ) - else: - # Create new connector - new_connector = SearchSourceConnector( - name="Slack Connector", - connector_type=SearchSourceConnectorType.SLACK_CONNECTOR, - is_indexable=True, - config=connector_config, - search_space_id=space_id, - user_id=user_id, - ) - session.add(new_connector) - logger.info( - f"Created new Slack connector for user {user_id} in space {space_id}" + return RedirectResponse( + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=slack-connector" ) + # Generate a unique, user-friendly connector name + connector_name = await generate_unique_connector_name( + session, + SearchSourceConnectorType.SLACK_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + + # Create new connector + new_connector = SearchSourceConnector( + name=connector_name, + connector_type=SearchSourceConnectorType.SLACK_CONNECTOR, + is_indexable=True, + config=connector_config, + search_space_id=space_id, + user_id=user_id, + ) + session.add(new_connector) + logger.info( + f"Created new Slack connector for user {user_id} in space {space_id}" + ) + try: await session.commit() logger.info(f"Successfully saved Slack connector for user {user_id}") # Redirect to the frontend with success params return RedirectResponse( - url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=slack-connector" + url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=slack-connector&connectorId={new_connector.id}" ) except ValidationError as e: @@ -324,7 +338,7 @@ async def slack_callback( await session.rollback() raise HTTPException( status_code=409, - detail=f"Integrity error: A connector with this type already exists. {e!s}", + detail=f"Database integrity error: {e!s}", ) from e except Exception as e: logger.error(f"Failed to create search source connector: {e!s}") diff --git a/surfsense_backend/app/routes/teams_add_connector_route.py b/surfsense_backend/app/routes/teams_add_connector_route.py new file mode 100644 index 000000000..ce014be0d --- /dev/null +++ b/surfsense_backend/app/routes/teams_add_connector_route.py @@ -0,0 +1,474 @@ +""" +Microsoft Teams Connector OAuth Routes. + +Handles OAuth 2.0 authentication flow for Microsoft Teams connector using Microsoft Graph API. +""" + +import logging +from datetime import UTC, datetime, timedelta +from uuid import UUID + +import httpx +from fastapi import APIRouter, Depends, HTTPException +from fastapi.responses import RedirectResponse +from sqlalchemy.exc import IntegrityError +from sqlalchemy.ext.asyncio import AsyncSession + +from app.config import config +from app.db import ( + SearchSourceConnector, + SearchSourceConnectorType, + User, + get_async_session, +) +from app.schemas.teams_auth_credentials import TeamsAuthCredentialsBase +from app.users import current_active_user +from app.utils.connector_naming import ( + check_duplicate_connector, + extract_identifier_from_credentials, + generate_unique_connector_name, +) +from app.utils.oauth_security import OAuthStateManager, TokenEncryption + +logger = logging.getLogger(__name__) + +router = APIRouter() + +# Microsoft identity platform endpoints +AUTHORIZATION_URL = "https://login.microsoftonline.com/common/oauth2/v2.0/authorize" +TOKEN_URL = "https://login.microsoftonline.com/common/oauth2/v2.0/token" + +# OAuth scopes for Microsoft Teams (Graph API) +SCOPES = [ + "offline_access", # Required for refresh tokens + "User.Read", # Read user profile + "Team.ReadBasic.All", # Read basic team information + "Channel.ReadBasic.All", # Read basic channel information + "ChannelMessage.Read.All", # Read messages in channels +] + +# Initialize security utilities +_state_manager = None +_token_encryption = None + + +def get_state_manager() -> OAuthStateManager: + """Get or create OAuth state manager instance.""" + global _state_manager + if _state_manager is None: + if not config.SECRET_KEY: + raise ValueError("SECRET_KEY must be set for OAuth security") + _state_manager = OAuthStateManager(config.SECRET_KEY) + return _state_manager + + +def get_token_encryption() -> TokenEncryption: + """Get or create token encryption instance.""" + global _token_encryption + if _token_encryption is None: + if not config.SECRET_KEY: + raise ValueError("SECRET_KEY must be set for token encryption") + _token_encryption = TokenEncryption(config.SECRET_KEY) + return _token_encryption + + +@router.get("/auth/teams/connector/add") +async def connect_teams(space_id: int, user: User = Depends(current_active_user)): + """ + Initiate Microsoft Teams OAuth flow. + + Args: + space_id: The search space ID + user: Current authenticated user + + Returns: + Authorization URL for redirect + """ + try: + if not space_id: + raise HTTPException(status_code=400, detail="space_id is required") + + if not config.TEAMS_CLIENT_ID: + raise HTTPException( + status_code=500, detail="Microsoft Teams OAuth not configured." + ) + + if not config.SECRET_KEY: + raise HTTPException( + status_code=500, detail="SECRET_KEY not configured for OAuth security." + ) + + # Generate secure state parameter with HMAC signature + state_manager = get_state_manager() + state_encoded = state_manager.generate_secure_state(space_id, user.id) + + # Build authorization URL + from urllib.parse import urlencode + + auth_params = { + "client_id": config.TEAMS_CLIENT_ID, + "response_type": "code", + "redirect_uri": config.TEAMS_REDIRECT_URI, + "response_mode": "query", + "scope": " ".join(SCOPES), + "state": state_encoded, + } + + auth_url = f"{AUTHORIZATION_URL}?{urlencode(auth_params)}" + + logger.info( + "Generated Microsoft Teams OAuth URL for user %s, space %s", + user.id, + space_id, + ) + return {"auth_url": auth_url} + + except Exception as e: + logger.error( + "Failed to initiate Microsoft Teams OAuth: %s", str(e), exc_info=True + ) + raise HTTPException( + status_code=500, + detail=f"Failed to initiate Microsoft Teams OAuth: {e!s}", + ) from e + + +@router.get("/auth/teams/connector/callback") +async def teams_callback( + code: str | None = None, + error: str | None = None, + error_description: str | None = None, + state: str | None = None, + session: AsyncSession = Depends(get_async_session), +): + """ + Handle Microsoft Teams OAuth callback. + + Args: + code: Authorization code from Microsoft (if user granted access) + error: Error code from Microsoft (if user denied access or error occurred) + error_description: Human-readable error description + state: State parameter containing user/space info + session: Database session + + Returns: + Redirect response to frontend + """ + try: + # Handle OAuth errors (e.g., user denied access) + if error: + error_msg = error_description or error + logger.warning("Microsoft Teams OAuth error: %s", error_msg) + redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=teams_auth_failed&message={error_msg}" + return RedirectResponse(url=redirect_url) + + # Validate required parameters + if not code or not state: + raise HTTPException( + status_code=400, detail="Missing required OAuth parameters" + ) + + # Verify and decode state parameter + state_manager = get_state_manager() + try: + data = state_manager.validate_state(state) + space_id = data["space_id"] + user_id = UUID(data["user_id"]) + except (HTTPException, ValueError, KeyError) as e: + logger.error("Invalid OAuth state: %s", str(e)) + redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=invalid_state" + return RedirectResponse(url=redirect_url) + + # Exchange authorization code for access token + token_data = { + "client_id": config.TEAMS_CLIENT_ID, + "client_secret": config.TEAMS_CLIENT_SECRET, + "code": code, + "redirect_uri": config.TEAMS_REDIRECT_URI, + "grant_type": "authorization_code", + } + + async with httpx.AsyncClient() as client: + token_response = await client.post( + TOKEN_URL, + data=token_data, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + timeout=30.0, + ) + + if token_response.status_code != 200: + error_detail = token_response.text + try: + error_json = token_response.json() + error_detail = error_json.get("error_description", error_detail) + except Exception: + pass + raise HTTPException( + status_code=400, detail=f"Token exchange failed: {error_detail}" + ) + + token_json = token_response.json() + + # Extract tokens from response + access_token = token_json.get("access_token") + refresh_token = token_json.get("refresh_token") + + if not access_token: + raise HTTPException( + status_code=400, detail="No access token received from Microsoft" + ) + + # Encrypt sensitive tokens before storing + token_encryption = get_token_encryption() + + # Calculate expiration time (UTC, tz-aware) + expires_at = None + if token_json.get("expires_in"): + now_utc = datetime.now(UTC) + expires_at = now_utc + timedelta(seconds=int(token_json["expires_in"])) + + # Fetch user info from Microsoft Graph API + user_info = {} + tenant_info = {} + try: + async with httpx.AsyncClient() as client: + # Get user profile + user_response = await client.get( + "https://graph.microsoft.com/v1.0/me", + headers={"Authorization": f"Bearer {access_token}"}, + timeout=30.0, + ) + if user_response.status_code == 200: + user_data = user_response.json() + user_info = { + "user_id": user_data.get("id"), + "user_name": user_data.get("displayName"), + "user_email": user_data.get("mail") + or user_data.get("userPrincipalName"), + } + + # Get organization/tenant info + org_response = await client.get( + "https://graph.microsoft.com/v1.0/organization", + headers={"Authorization": f"Bearer {access_token}"}, + timeout=30.0, + ) + if org_response.status_code == 200: + org_data = org_response.json() + if org_data.get("value") and len(org_data["value"]) > 0: + org = org_data["value"][0] + tenant_info = { + "tenant_id": org.get("id"), + "tenant_name": org.get("displayName"), + } + except Exception as e: + logger.warning( + "Failed to fetch user/tenant info from Microsoft Graph: %s", str(e) + ) + + # Store the encrypted tokens and user/tenant info in connector config + connector_config = { + "access_token": token_encryption.encrypt_token(access_token), + "refresh_token": token_encryption.encrypt_token(refresh_token) + if refresh_token + else None, + "token_type": token_json.get("token_type", "Bearer"), + "expires_in": token_json.get("expires_in"), + "expires_at": expires_at.isoformat() if expires_at else None, + "scope": token_json.get("scope"), + "tenant_id": tenant_info.get("tenant_id"), + "tenant_name": tenant_info.get("tenant_name"), + "user_id": user_info.get("user_id"), + # Mark that token is encrypted for backward compatibility + "_token_encrypted": True, + } + + # Extract unique identifier from connector credentials + connector_identifier = extract_identifier_from_credentials( + SearchSourceConnectorType.TEAMS_CONNECTOR, connector_config + ) + + # Check for duplicate connector (same tenant already connected) + is_duplicate = await check_duplicate_connector( + session, + SearchSourceConnectorType.TEAMS_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + + if is_duplicate: + logger.warning( + "Duplicate Microsoft Teams connector for user %s, space %s, tenant %s", + user_id, + space_id, + tenant_info.get("tenant_name"), + ) + redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=duplicate_connector&message=This Microsoft Teams tenant is already connected to this space" + return RedirectResponse(url=redirect_url) + + # Generate unique connector name + connector_name = await generate_unique_connector_name( + session, + SearchSourceConnectorType.TEAMS_CONNECTOR, + space_id, + user_id, + connector_identifier, + ) + + # Create new connector + new_connector = SearchSourceConnector( + name=connector_name, + connector_type=SearchSourceConnectorType.TEAMS_CONNECTOR, + is_indexable=True, + config=connector_config, + search_space_id=space_id, + user_id=user_id, + ) + + try: + session.add(new_connector) + await session.commit() + await session.refresh(new_connector) + + logger.info( + "Successfully created Microsoft Teams connector %s for user %s", + new_connector.id, + user_id, + ) + + # Redirect to frontend with success + redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?success=teams_connected&connector_id={new_connector.id}" + return RedirectResponse(url=redirect_url) + + except IntegrityError as e: + await session.rollback() + logger.error("Database integrity error creating Teams connector: %s", str(e)) + redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=connector_creation_failed" + return RedirectResponse(url=redirect_url) + + except HTTPException: + raise + except (IntegrityError, ValueError) as e: + logger.error("Teams OAuth callback error: %s", str(e), exc_info=True) + redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=teams_auth_error" + return RedirectResponse(url=redirect_url) + + +async def refresh_teams_token( + session: AsyncSession, connector: SearchSourceConnector +) -> SearchSourceConnector: + """ + Refresh Microsoft Teams OAuth tokens. + + Args: + session: Database session + connector: The connector to refresh + + Returns: + Updated connector with refreshed tokens + + Raises: + HTTPException: If token refresh fails + """ + logger.info( + "Refreshing Microsoft Teams OAuth tokens for connector %s", connector.id + ) + + credentials = TeamsAuthCredentialsBase.from_dict(connector.config) + + # Decrypt tokens if they are encrypted + token_encryption = get_token_encryption() + is_encrypted = connector.config.get("_token_encrypted", False) + refresh_token = credentials.refresh_token + + if is_encrypted and refresh_token: + try: + refresh_token = token_encryption.decrypt_token(refresh_token) + except Exception as e: + logger.error("Failed to decrypt refresh token: %s", str(e)) + raise HTTPException( + status_code=500, detail="Failed to decrypt stored refresh token" + ) from e + + if not refresh_token: + raise HTTPException( + status_code=400, + detail=f"No refresh token available for connector {connector.id}", + ) + + # Microsoft uses oauth2/v2.0/token for token refresh + refresh_data = { + "client_id": config.TEAMS_CLIENT_ID, + "client_secret": config.TEAMS_CLIENT_SECRET, + "grant_type": "refresh_token", + "refresh_token": refresh_token, + "scope": " ".join(SCOPES), + } + + async with httpx.AsyncClient() as client: + token_response = await client.post( + TOKEN_URL, + data=refresh_data, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + timeout=30.0, + ) + + if token_response.status_code != 200: + error_detail = token_response.text + try: + error_json = token_response.json() + error_detail = error_json.get("error_description", error_detail) + except Exception: + pass + raise HTTPException( + status_code=400, detail=f"Token refresh failed: {error_detail}" + ) + + token_json = token_response.json() + + # Extract new tokens + access_token = token_json.get("access_token") + new_refresh_token = token_json.get("refresh_token") + + if not access_token: + raise HTTPException( + status_code=400, detail="No access token received from Microsoft refresh" + ) + + # Calculate expiration time (UTC, tz-aware) + expires_at = None + expires_in = token_json.get("expires_in") + if expires_in: + now_utc = datetime.now(UTC) + expires_at = now_utc + timedelta(seconds=int(expires_in)) + + # Update credentials object with encrypted tokens + credentials.access_token = token_encryption.encrypt_token(access_token) + if new_refresh_token: + credentials.refresh_token = token_encryption.encrypt_token(new_refresh_token) + credentials.expires_in = expires_in + credentials.expires_at = expires_at + credentials.scope = token_json.get("scope") + + # Preserve tenant/user info + if not credentials.tenant_id: + credentials.tenant_id = connector.config.get("tenant_id") + if not credentials.tenant_name: + credentials.tenant_name = connector.config.get("tenant_name") + if not credentials.user_id: + credentials.user_id = connector.config.get("user_id") + + # Update connector config with encrypted tokens + credentials_dict = credentials.to_dict() + credentials_dict["_token_encrypted"] = True + connector.config = credentials_dict + + await session.commit() + await session.refresh(connector) + + logger.info( + "Successfully refreshed Microsoft Teams tokens for connector %s", connector.id + ) + + return connector diff --git a/surfsense_backend/app/schemas/clickup_auth_credentials.py b/surfsense_backend/app/schemas/clickup_auth_credentials.py new file mode 100644 index 000000000..d116ca92b --- /dev/null +++ b/surfsense_backend/app/schemas/clickup_auth_credentials.py @@ -0,0 +1,85 @@ +from datetime import UTC, datetime + +from pydantic import BaseModel, field_validator + + +class ClickUpAuthCredentialsBase(BaseModel): + access_token: str + refresh_token: str | None = None + expires_in: int | None = None + expires_at: datetime | None = None + user_id: str | None = None + user_email: str | None = None + user_name: str | None = None + workspace_id: str | None = None + workspace_name: str | None = None + + @property + def is_expired(self) -> bool: + """Check if the credentials have expired.""" + if self.expires_at is None: + return False # Long-lived token, treat as not expired + return self.expires_at <= datetime.now(UTC) + + @property + def is_refreshable(self) -> bool: + """Check if the credentials can be refreshed.""" + return self.refresh_token is not None + + def to_dict(self) -> dict: + """Convert credentials to dictionary for storage.""" + return { + "access_token": self.access_token, + "refresh_token": self.refresh_token, + "expires_in": self.expires_in, + "expires_at": self.expires_at.isoformat() if self.expires_at else None, + "user_id": self.user_id, + "user_email": self.user_email, + "user_name": self.user_name, + "workspace_id": self.workspace_id, + "workspace_name": self.workspace_name, + } + + @classmethod + def from_dict(cls, data: dict) -> "ClickUpAuthCredentialsBase": + """Create credentials from dictionary.""" + expires_at = None + if data.get("expires_at"): + expires_at = datetime.fromisoformat(data["expires_at"]) + + # Convert user_id to string if it's an integer (for backward compatibility) + user_id = data.get("user_id") + if user_id is not None and not isinstance(user_id, str): + user_id = str(user_id) + + # Convert workspace_id to string if it's an integer (for backward compatibility) + workspace_id = data.get("workspace_id") + if workspace_id is not None and not isinstance(workspace_id, str): + workspace_id = str(workspace_id) + + return cls( + access_token=data.get("access_token", ""), + refresh_token=data.get("refresh_token"), + expires_in=data.get("expires_in"), + expires_at=expires_at, + user_id=user_id, + user_email=data.get("user_email"), + user_name=data.get("user_name"), + workspace_id=workspace_id, + workspace_name=data.get("workspace_name"), + ) + + @field_validator("expires_at", mode="before") + @classmethod + def ensure_aware_utc(cls, v): + # Strings like "2025-08-26T14:46:57.367184" + if isinstance(v, str): + # add +00:00 if missing tz info + if v.endswith("Z"): + return datetime.fromisoformat(v.replace("Z", "+00:00")) + dt = datetime.fromisoformat(v) + return dt if dt.tzinfo else dt.replace(tzinfo=UTC) + # datetime objects + if isinstance(v, datetime): + return v if v.tzinfo else v.replace(tzinfo=UTC) + return v diff --git a/surfsense_backend/app/schemas/teams_auth_credentials.py b/surfsense_backend/app/schemas/teams_auth_credentials.py new file mode 100644 index 000000000..41688b102 --- /dev/null +++ b/surfsense_backend/app/schemas/teams_auth_credentials.py @@ -0,0 +1,79 @@ +""" +Microsoft Teams OAuth credentials schema. +""" + +from datetime import UTC, datetime + +from pydantic import BaseModel, field_validator + + +class TeamsAuthCredentialsBase(BaseModel): + """Microsoft Teams OAuth credentials.""" + + access_token: str + refresh_token: str | None = None + token_type: str = "Bearer" + expires_in: int | None = None + expires_at: datetime | None = None + scope: str | None = None + tenant_id: str | None = None + tenant_name: str | None = None + user_id: str | None = None + + @property + def is_expired(self) -> bool: + """Check if the credentials have expired.""" + if self.expires_at is None: + return False + return self.expires_at <= datetime.now(UTC) + + @property + def is_refreshable(self) -> bool: + """Check if the credentials can be refreshed.""" + return self.refresh_token is not None + + def to_dict(self) -> dict: + """Convert credentials to dictionary for storage.""" + return { + "access_token": self.access_token, + "refresh_token": self.refresh_token, + "token_type": self.token_type, + "expires_in": self.expires_in, + "expires_at": self.expires_at.isoformat() if self.expires_at else None, + "scope": self.scope, + "tenant_id": self.tenant_id, + "tenant_name": self.tenant_name, + "user_id": self.user_id, + } + + @classmethod + def from_dict(cls, data: dict) -> "TeamsAuthCredentialsBase": + """Create credentials from dictionary.""" + expires_at = None + if data.get("expires_at"): + expires_at = datetime.fromisoformat(data["expires_at"]) + + return cls( + access_token=data.get("access_token", ""), + refresh_token=data.get("refresh_token"), + token_type=data.get("token_type", "Bearer"), + expires_in=data.get("expires_in"), + expires_at=expires_at, + scope=data.get("scope"), + tenant_id=data.get("tenant_id"), + tenant_name=data.get("tenant_name"), + user_id=data.get("user_id"), + ) + + @field_validator("expires_at", mode="before") + @classmethod + def ensure_aware_utc(cls, v): + """Ensure datetime is timezone-aware (UTC).""" + if isinstance(v, str): + if v.endswith("Z"): + return datetime.fromisoformat(v.replace("Z", "+00:00")) + dt = datetime.fromisoformat(v) + return dt if dt.tzinfo else dt.replace(tzinfo=UTC) + if isinstance(v, datetime): + return v if v.tzinfo else v.replace(tzinfo=UTC) + return v diff --git a/surfsense_backend/app/services/connector_service.py b/surfsense_backend/app/services/connector_service.py index 4e874729c..832aee4cc 100644 --- a/surfsense_backend/app/services/connector_service.py +++ b/surfsense_backend/app/services/connector_service.py @@ -2269,6 +2269,80 @@ class ConnectorService: return result_object, discord_docs + async def search_teams( + self, + user_query: str, + search_space_id: int, + top_k: int = 20, + start_date: datetime | None = None, + end_date: datetime | None = None, + ) -> tuple: + """ + Search for Microsoft Teams messages and return both the source information and langchain documents. + + Uses combined chunk-level and document-level hybrid search with RRF fusion. + + Args: + user_query: The user's query + search_space_id: The search space ID to search in + top_k: Maximum number of results to return + start_date: Optional start date for filtering documents by updated_at + end_date: Optional end date for filtering documents by updated_at + + Returns: + tuple: (sources_info, langchain_documents) + """ + teams_docs = await self._combined_rrf_search( + query_text=user_query, + search_space_id=search_space_id, + document_type="TEAMS_CONNECTOR", + top_k=top_k, + start_date=start_date, + end_date=end_date, + ) + + # Early return if no results + if not teams_docs: + return { + "id": 53, + "name": "Microsoft Teams", + "type": "TEAMS_CONNECTOR", + "sources": [], + }, [] + + def _title_fn(_doc_info: dict[str, Any], metadata: dict[str, Any]) -> str: + team_name = metadata.get("team_name", "Unknown Team") + channel_name = metadata.get("channel_name", "Unknown Channel") + message_date = metadata.get("start_date", "") + title = f"Teams: {team_name} - {channel_name}" + if message_date: + title += f" ({message_date})" + return title + + def _url_fn(_doc_info: dict[str, Any], metadata: dict[str, Any]) -> str: + team_id = metadata.get("team_id", "") + channel_id = metadata.get("channel_id", "") + if team_id and channel_id: + return f"https://teams.microsoft.com/l/channel/{channel_id}/General?groupId={team_id}" + return "" + + sources_list = self._build_chunk_sources_from_documents( + teams_docs, + title_fn=_title_fn, + url_fn=_url_fn, + description_fn=lambda chunk, _doc_info, _metadata: chunk.get("content", ""), + ) + + # Create result object + result_object = { + "id": 53, + "name": "Microsoft Teams", + "type": "TEAMS_CONNECTOR", + "sources": sources_list, + } + + return result_object, teams_docs + async def search_luma( self, user_query: str, diff --git a/surfsense_backend/app/services/docling_service.py b/surfsense_backend/app/services/docling_service.py index a61148c6d..82eaf7f74 100644 --- a/surfsense_backend/app/services/docling_service.py +++ b/surfsense_backend/app/services/docling_service.py @@ -128,42 +128,6 @@ class DoclingService: logger.error(f"❌ Docling initialization failed: {e}") raise RuntimeError(f"Docling initialization failed: {e}") from e - def _configure_easyocr_local_models(self): - """Configure EasyOCR to use pre-downloaded local models.""" - try: - import os - - import easyocr - - # Set SSL environment for EasyOCR downloads - os.environ["CURL_CA_BUNDLE"] = "" - os.environ["REQUESTS_CA_BUNDLE"] = "" - - # Try to use local models first, fallback to download if needed - try: - reader = easyocr.Reader( - ["en"], - download_enabled=False, - model_storage_directory="/root/.EasyOCR/model", - ) - logger.info("✅ EasyOCR configured for local models") - return reader - except Exception: - # If local models fail, allow download with SSL bypass - logger.info( - "🔄 Local models failed, attempting download with SSL bypass..." - ) - reader = easyocr.Reader( - ["en"], - download_enabled=True, - model_storage_directory="/root/.EasyOCR/model", - ) - logger.info("✅ EasyOCR configured with downloaded models") - return reader - except Exception as e: - logger.warning(f"⚠️ EasyOCR configuration failed: {e}") - return None - async def process_document( self, file_path: str, filename: str | None = None ) -> dict[str, Any]: diff --git a/surfsense_backend/app/services/llm_service.py b/surfsense_backend/app/services/llm_service.py index 68dd167b5..33f073d61 100644 --- a/surfsense_backend/app/services/llm_service.py +++ b/surfsense_backend/app/services/llm_service.py @@ -342,40 +342,7 @@ async def get_document_summary_llm( ) -# Backward-compatible aliases (deprecated - will be removed in future versions) -async def get_user_llm_instance( - session: AsyncSession, user_id: str, search_space_id: int, role: str -) -> ChatLiteLLM | None: - """ - Deprecated: Use get_search_space_llm_instance instead. - LLM preferences are now stored at the search space level, not per-user. - """ - return await get_search_space_llm_instance(session, search_space_id, role) - - -# Legacy aliases for backward compatibility -async def get_long_context_llm( - session: AsyncSession, search_space_id: int -) -> ChatLiteLLM | None: - """Deprecated: Use get_document_summary_llm instead.""" - return await get_document_summary_llm(session, search_space_id) - - -async def get_fast_llm( - session: AsyncSession, search_space_id: int -) -> ChatLiteLLM | None: - """Deprecated: Use get_agent_llm instead.""" - return await get_agent_llm(session, search_space_id) - - -async def get_strategic_llm( - session: AsyncSession, search_space_id: int -) -> ChatLiteLLM | None: - """Deprecated: Use get_document_summary_llm instead.""" - return await get_document_summary_llm(session, search_space_id) - - -# User-based legacy aliases (LLM preferences are now per-search-space, not per-user) +# Backward-compatible alias (LLM preferences are now per-search-space, not per-user) async def get_user_long_context_llm( session: AsyncSession, user_id: str, search_space_id: int ) -> ChatLiteLLM | None: @@ -384,23 +351,3 @@ async def get_user_long_context_llm( The user_id parameter is ignored as LLM preferences are now per-search-space. """ return await get_document_summary_llm(session, search_space_id) - - -async def get_user_fast_llm( - session: AsyncSession, user_id: str, search_space_id: int -) -> ChatLiteLLM | None: - """ - Deprecated: Use get_agent_llm instead. - The user_id parameter is ignored as LLM preferences are now per-search-space. - """ - return await get_agent_llm(session, search_space_id) - - -async def get_user_strategic_llm( - session: AsyncSession, user_id: str, search_space_id: int -) -> ChatLiteLLM | None: - """ - Deprecated: Use get_document_summary_llm instead. - The user_id parameter is ignored as LLM preferences are now per-search-space. - """ - return await get_document_summary_llm(session, search_space_id) diff --git a/surfsense_backend/app/services/query_service.py b/surfsense_backend/app/services/query_service.py deleted file mode 100644 index 863ff58a4..000000000 --- a/surfsense_backend/app/services/query_service.py +++ /dev/null @@ -1,114 +0,0 @@ -import datetime -from typing import Any - -from langchain_core.messages import AIMessage, HumanMessage, SystemMessage -from sqlalchemy.ext.asyncio import AsyncSession - -from app.services.llm_service import get_document_summary_llm - - -class QueryService: - """ - Service for query-related operations, including reformulation and processing. - """ - - @staticmethod - async def reformulate_query_with_chat_history( - user_query: str, - session: AsyncSession, - search_space_id: int, - chat_history_str: str | None = None, - ) -> str: - """ - Reformulate the user query using the search space's document summary LLM to make it more - effective for information retrieval and research purposes. - - Args: - user_query: The original user query - session: Database session for accessing LLM configs - search_space_id: Search Space ID to get LLM preferences - chat_history_str: Optional chat history string - - Returns: - str: The reformulated query - """ - if not user_query or not user_query.strip(): - return user_query - - try: - # Get the search space's document summary LLM instance - llm = await get_document_summary_llm(session, search_space_id) - if not llm: - print( - f"Warning: No document summary LLM configured for search space {search_space_id}. Using original query." - ) - return user_query - - # Create system message with instructions - system_message = SystemMessage( - content=f""" - Today's date: {datetime.datetime.now().strftime("%Y-%m-%d")} - You are a highly skilled AI assistant specializing in query optimization for advanced research. - Your primary objective is to transform a user's initial query into a highly effective search query. - This reformulated query will be used to retrieve information from diverse data sources. - - **Chat History Context:** - {chat_history_str if chat_history_str else "No prior conversation history is available."} - If chat history is provided, analyze it to understand the user's evolving information needs and the broader context of their request. Use this understanding to refine the current query, ensuring it builds upon or clarifies previous interactions. - - **Query Reformulation Guidelines:** - Your reformulated query should: - 1. **Enhance Specificity and Detail:** Add precision to narrow the search focus effectively, making the query less ambiguous and more targeted. - 2. **Resolve Ambiguities:** Identify and clarify vague terms or phrases. If a term has multiple meanings, orient the query towards the most likely one given the context. - 3. **Expand Key Concepts:** Incorporate relevant synonyms, related terms, and alternative phrasings for core concepts. This helps capture a wider range of relevant documents. - 4. **Deconstruct Complex Questions:** If the original query is multifaceted, break it down into its core searchable components or rephrase it to address each aspect clearly. The final output must still be a single, coherent query string. - 5. **Optimize for Comprehensiveness:** Ensure the query is structured to uncover all essential facets of the original request, aiming for thorough information retrieval suitable for research. - 6. **Maintain User Intent:** The reformulated query must stay true to the original intent of the user's query. Do not introduce new topics or shift the focus significantly. - - **Crucial Constraints:** - * **Conciseness and Effectiveness:** While aiming for comprehensiveness, the reformulated query MUST be as concise as possible. Eliminate all unnecessary verbosity. Focus on essential keywords, entities, and concepts that directly contribute to effective retrieval. - * **Single, Direct Output:** Return ONLY the reformulated query itself. Do NOT include any explanations, introductory phrases (e.g., "Reformulated query:", "Here is the optimized query:"), or any other surrounding text or markdown formatting. - - Your output should be a single, optimized query string, ready for immediate use in a search system. - """ - ) - - # Create human message with the user query - human_message = HumanMessage( - content=f"Reformulate this query for better research results: {user_query}" - ) - - # Get the response from the LLM - response = await llm.agenerate(messages=[[system_message, human_message]]) - - # Extract the reformulated query from the response - reformulated_query = response.generations[0][0].text.strip() - - # Return the original query if the reformulation is empty - if not reformulated_query: - return user_query - - return reformulated_query - - except Exception as e: - # Log the error and return the original query - print(f"Error reformulating query: {e}") - return user_query - - @staticmethod - async def langchain_chat_history_to_str(chat_history: list[Any]) -> str: - """ - Convert a list of chat history messages to a string. - """ - chat_history_str = "\n" - - for chat_message in chat_history: - if isinstance(chat_message, HumanMessage): - chat_history_str += f"{chat_message.content}\n" - elif isinstance(chat_message, AIMessage): - chat_history_str += f"{chat_message.content}\n" - elif isinstance(chat_message, SystemMessage): - chat_history_str += f"{chat_message.content}\n" - - chat_history_str += "" - return chat_history_str diff --git a/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py b/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py index 3cae1bbdb..1d1cbe361 100644 --- a/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py +++ b/surfsense_backend/app/tasks/celery_tasks/connector_tasks.py @@ -564,6 +564,49 @@ async def _index_discord_messages( ) +@celery_app.task(name="index_teams_messages", bind=True) +def index_teams_messages_task( + self, + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str, + end_date: str, +): + """Celery task to index Microsoft Teams messages.""" + import asyncio + + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + try: + loop.run_until_complete( + _index_teams_messages( + connector_id, search_space_id, user_id, start_date, end_date + ) + ) + finally: + loop.close() + + +async def _index_teams_messages( + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str, + end_date: str, +): + """Index Microsoft Teams messages with new session.""" + from app.routes.search_source_connectors_routes import ( + run_teams_indexing, + ) + + async with get_celery_session_maker()() as session: + await run_teams_indexing( + session, connector_id, search_space_id, user_id, start_date, end_date + ) + + @celery_app.task(name="index_luma_events", bind=True) def index_luma_events_task( self, diff --git a/surfsense_backend/app/tasks/connector_indexers/airtable_indexer.py b/surfsense_backend/app/tasks/connector_indexers/airtable_indexer.py index 3ea6dccc9..4d5a33b79 100644 --- a/surfsense_backend/app/tasks/connector_indexers/airtable_indexer.py +++ b/surfsense_backend/app/tasks/connector_indexers/airtable_indexer.py @@ -6,10 +6,8 @@ from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.ext.asyncio import AsyncSession from app.config import config -from app.connectors.airtable_connector import AirtableConnector +from app.connectors.airtable_history import AirtableHistoryConnector from app.db import Document, DocumentType, SearchSourceConnectorType -from app.routes.airtable_add_connector_route import refresh_airtable_token -from app.schemas.airtable_auth_credentials import AirtableAuthCredentialsBase from app.services.llm_service import get_user_long_context_llm from app.services.task_logging_service import TaskLoggingService from app.utils.document_converters import ( @@ -18,7 +16,6 @@ from app.utils.document_converters import ( generate_document_summary, generate_unique_identifier_hash, ) -from app.utils.oauth_security import TokenEncryption from .base import ( calculate_date_range, @@ -85,76 +82,11 @@ async def index_airtable_records( ) return 0, f"Connector with ID {connector_id} not found" - # Create credentials from connector config - config_data = ( - connector.config.copy() - ) # Work with a copy to avoid modifying original - - # Decrypt tokens if they are encrypted (only when explicitly marked) - token_encrypted = config_data.get("_token_encrypted", False) - if token_encrypted: - # Tokens are explicitly marked as encrypted, attempt decryption - if not config.SECRET_KEY: - await task_logger.log_task_failure( - log_entry, - f"SECRET_KEY not configured but tokens are marked as encrypted for connector {connector_id}", - "Missing SECRET_KEY for token decryption", - {"error_type": "MissingSecretKey"}, - ) - return 0, "SECRET_KEY not configured but tokens are marked as encrypted" - try: - token_encryption = TokenEncryption(config.SECRET_KEY) - - # Decrypt access_token - if config_data.get("access_token"): - config_data["access_token"] = token_encryption.decrypt_token( - config_data["access_token"] - ) - logger.info( - f"Decrypted Airtable access token for connector {connector_id}" - ) - - # Decrypt refresh_token if present - if config_data.get("refresh_token"): - config_data["refresh_token"] = token_encryption.decrypt_token( - config_data["refresh_token"] - ) - logger.info( - f"Decrypted Airtable refresh token for connector {connector_id}" - ) - except Exception as e: - await task_logger.log_task_failure( - log_entry, - f"Failed to decrypt Airtable tokens for connector {connector_id}: {e!s}", - "Token decryption failed", - {"error_type": "TokenDecryptionError"}, - ) - return 0, f"Failed to decrypt Airtable tokens: {e!s}" - # If _token_encrypted is False or not set, treat tokens as plaintext - - try: - credentials = AirtableAuthCredentialsBase.from_dict(config_data) - except Exception as e: - await task_logger.log_task_failure( - log_entry, - f"Invalid Airtable credentials in connector {connector_id}", - str(e), - {"error_type": "InvalidCredentials"}, - ) - return 0, f"Invalid Airtable credentials: {e!s}" - - # Check if credentials are expired - if credentials.is_expired: - await task_logger.log_task_failure( - log_entry, - f"Airtable credentials expired for connector {connector_id}", - "Credentials expired", - {"error_type": "ExpiredCredentials"}, - ) - - connector = await refresh_airtable_token(session, connector) - - # return 0, "Airtable credentials have expired. Please re-authenticate." + # Normalize "undefined" strings to None (from frontend) + if start_date == "undefined" or start_date == "": + start_date = None + if end_date == "undefined" or end_date == "": + end_date = None # Calculate date range for indexing start_date_str, end_date_str = calculate_date_range( @@ -166,8 +98,9 @@ async def index_airtable_records( f"from {start_date_str} to {end_date_str}" ) - # Initialize Airtable connector - airtable_connector = AirtableConnector(credentials) + # Initialize Airtable history connector with auto-refresh capability + airtable_history = AirtableHistoryConnector(session, connector_id) + airtable_connector = await airtable_history._get_connector() total_processed = 0 try: @@ -459,47 +392,56 @@ async def index_airtable_records( documents_skipped += 1 continue # Skip this message and continue with others - # Update the last_indexed_at timestamp for the connector only if requested - total_processed = documents_indexed - if total_processed > 0: - await update_connector_last_indexed( - session, connector, update_last_indexed - ) + # Accumulate total processed across all tables + total_processed += documents_indexed # Final commit for any remaining documents not yet committed in batches - logger.info( - f"Final commit: Total {documents_indexed} Airtable records processed" - ) - await session.commit() - logger.info( - "Successfully committed all Airtable document changes to database" - ) + if documents_indexed > 0: + logger.info( + f"Final commit for table {table_name}: {documents_indexed} Airtable records processed" + ) + await session.commit() + logger.info( + f"Successfully committed all Airtable document changes for table {table_name}" + ) - # Log success - await task_logger.log_task_success( - log_entry, - f"Successfully completed Airtable indexing for connector {connector_id}", - { - "events_processed": total_processed, - "documents_indexed": documents_indexed, - "documents_skipped": documents_skipped, - "skipped_messages_count": len(skipped_messages), - }, - ) + # Update the last_indexed_at timestamp for the connector only if requested + # (after all tables in all bases are processed) + if total_processed > 0: + await update_connector_last_indexed( + session, connector, update_last_indexed + ) - logger.info( - f"Airtable indexing completed: {documents_indexed} new records, {documents_skipped} skipped" - ) - return ( - total_processed, - None, - ) # Return None as the error message to indicate success + # Log success after processing all bases and tables + await task_logger.log_task_success( + log_entry, + f"Successfully completed Airtable indexing for connector {connector_id}", + { + "events_processed": total_processed, + "documents_indexed": total_processed, + }, + ) + + logger.info( + f"Airtable indexing completed: {total_processed} total records processed" + ) + return ( + total_processed, + None, + ) # Return None as the error message to indicate success except Exception as e: logger.error( f"Fetching Airtable bases for connector {connector_id} failed: {e!s}", exc_info=True, ) + await task_logger.log_task_failure( + log_entry, + f"Failed to fetch Airtable bases for connector {connector_id}", + str(e), + {"error_type": type(e).__name__}, + ) + return 0, f"Failed to fetch Airtable bases: {e!s}" except SQLAlchemyError as db_error: await session.rollback() diff --git a/surfsense_backend/app/tasks/connector_indexers/clickup_indexer.py b/surfsense_backend/app/tasks/connector_indexers/clickup_indexer.py index b4a349163..e459584f8 100644 --- a/surfsense_backend/app/tasks/connector_indexers/clickup_indexer.py +++ b/surfsense_backend/app/tasks/connector_indexers/clickup_indexer.py @@ -2,13 +2,14 @@ ClickUp connector indexer. """ +import contextlib from datetime import datetime from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.ext.asyncio import AsyncSession from app.config import config -from app.connectors.clickup_connector import ClickUpConnector +from app.connectors.clickup_history import ClickUpHistoryConnector from app.db import Document, DocumentType, SearchSourceConnectorType from app.services.llm_service import get_user_long_context_llm from app.services.task_logging_service import TaskLoggingService @@ -82,26 +83,30 @@ async def index_clickup_tasks( ) return 0, error_msg - # Extract ClickUp configuration - clickup_api_token = connector.config.get("CLICKUP_API_TOKEN") + # Check if using OAuth (has access_token in config) or legacy (has CLICKUP_API_TOKEN) + has_oauth = connector.config.get("access_token") is not None + has_legacy = connector.config.get("CLICKUP_API_TOKEN") is not None - if not clickup_api_token: - error_msg = "ClickUp API token not found in connector configuration" + if not has_oauth and not has_legacy: + error_msg = "ClickUp credentials not found in connector configuration (neither OAuth nor API token)" await task_logger.log_task_failure( log_entry, - f"ClickUp API token not found in connector config for connector {connector_id}", - "Missing ClickUp token", - {"error_type": "MissingToken"}, + f"ClickUp credentials not found in connector config for connector {connector_id}", + "Missing ClickUp credentials", + {"error_type": "MissingCredentials"}, ) return 0, error_msg await task_logger.log_task_progress( log_entry, - f"Initializing ClickUp client for connector {connector_id}", + f"Initializing ClickUp client for connector {connector_id} ({'OAuth' if has_oauth else 'API Token'})", {"stage": "client_initialization"}, ) - clickup_client = ClickUpConnector(api_token=clickup_api_token) + # Use history connector which supports both OAuth and legacy API tokens + clickup_client = ClickUpHistoryConnector( + session=session, connector_id=connector_id + ) # Get authorized workspaces await task_logger.log_task_progress( @@ -110,7 +115,7 @@ async def index_clickup_tasks( {"stage": "workspace_fetching"}, ) - workspaces_response = clickup_client.get_authorized_workspaces() + workspaces_response = await clickup_client.get_authorized_workspaces() workspaces = workspaces_response.get("teams", []) if not workspaces: @@ -141,7 +146,7 @@ async def index_clickup_tasks( # Fetch tasks for date range if provided if start_date and end_date: - tasks, error = clickup_client.get_tasks_in_date_range( + tasks, error = await clickup_client.get_tasks_in_date_range( workspace_id=workspace_id, start_date=start_date, end_date=end_date, @@ -153,7 +158,7 @@ async def index_clickup_tasks( ) continue else: - tasks = clickup_client.get_workspace_tasks( + tasks = await clickup_client.get_workspace_tasks( workspace_id=workspace_id, include_closed=True ) @@ -393,10 +398,21 @@ async def index_clickup_tasks( logger.info( f"clickup indexing completed: {documents_indexed} new tasks, {documents_skipped} skipped" ) + + # Close client connection + try: + await clickup_client.close() + except Exception as e: + logger.warning(f"Error closing ClickUp client: {e!s}") + return total_processed, None except SQLAlchemyError as db_error: await session.rollback() + # Clean up the connector in case of error + if "clickup_client" in locals(): + with contextlib.suppress(Exception): + await clickup_client.close() await task_logger.log_task_failure( log_entry, f"Database error during ClickUp indexing for connector {connector_id}", @@ -407,6 +423,10 @@ async def index_clickup_tasks( return 0, f"Database error: {db_error!s}" except Exception as e: await session.rollback() + # Clean up the connector in case of error + if "clickup_client" in locals(): + with contextlib.suppress(Exception): + await clickup_client.close() await task_logger.log_task_failure( log_entry, f"Failed to index ClickUp tasks for connector {connector_id}", diff --git a/surfsense_backend/app/tasks/connector_indexers/confluence_indexer.py b/surfsense_backend/app/tasks/connector_indexers/confluence_indexer.py index 09022a30b..7289b0ccd 100644 --- a/surfsense_backend/app/tasks/connector_indexers/confluence_indexer.py +++ b/surfsense_backend/app/tasks/connector_indexers/confluence_indexer.py @@ -2,6 +2,7 @@ Confluence connector indexer. """ +import contextlib from datetime import datetime from sqlalchemy.exc import SQLAlchemyError @@ -142,10 +143,8 @@ async def index_confluence_pages( ) # Close client before returning if confluence_client: - try: + with contextlib.suppress(Exception): await confluence_client.close() - except Exception: - pass return 0, None else: await task_logger.log_task_failure( @@ -156,10 +155,8 @@ async def index_confluence_pages( ) # Close client on error if confluence_client: - try: + with contextlib.suppress(Exception): await confluence_client.close() - except Exception: - pass return 0, f"Failed to get Confluence pages: {error}" logger.info(f"Retrieved {len(pages)} pages from Confluence API") @@ -168,10 +165,8 @@ async def index_confluence_pages( logger.error(f"Error fetching Confluence pages: {e!s}", exc_info=True) # Close client on error if confluence_client: - try: + with contextlib.suppress(Exception): await confluence_client.close() - except Exception: - pass return 0, f"Error fetching Confluence pages: {e!s}" # Process and index each page @@ -437,10 +432,8 @@ async def index_confluence_pages( await session.rollback() # Close client if it exists if confluence_client: - try: + with contextlib.suppress(Exception): await confluence_client.close() - except Exception: - pass await task_logger.log_task_failure( log_entry, f"Database error during Confluence indexing for connector {connector_id}", @@ -453,10 +446,8 @@ async def index_confluence_pages( await session.rollback() # Close client if it exists if confluence_client: - try: + with contextlib.suppress(Exception): await confluence_client.close() - except Exception: - pass await task_logger.log_task_failure( log_entry, f"Failed to index Confluence pages for connector {connector_id}", diff --git a/surfsense_backend/app/tasks/connector_indexers/jira_indexer.py b/surfsense_backend/app/tasks/connector_indexers/jira_indexer.py index 7209deb49..fdbeb93b0 100644 --- a/surfsense_backend/app/tasks/connector_indexers/jira_indexer.py +++ b/surfsense_backend/app/tasks/connector_indexers/jira_indexer.py @@ -2,6 +2,7 @@ Jira connector indexer. """ +import contextlib from datetime import datetime from sqlalchemy.exc import SQLAlchemyError @@ -413,10 +414,8 @@ async def index_jira_issues( logger.error(f"Database error: {db_error!s}", exc_info=True) # Clean up the connector in case of error if "jira_client" in locals(): - try: + with contextlib.suppress(Exception): await jira_client.close() - except Exception: - pass return 0, f"Database error: {db_error!s}" except Exception as e: await session.rollback() @@ -429,8 +428,6 @@ async def index_jira_issues( logger.error(f"Failed to index JIRA issues: {e!s}", exc_info=True) # Clean up the connector in case of error if "jira_client" in locals(): - try: + with contextlib.suppress(Exception): await jira_client.close() - except Exception: - pass return 0, f"Failed to index JIRA issues: {e!s}" diff --git a/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py b/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py new file mode 100644 index 000000000..c1e778768 --- /dev/null +++ b/surfsense_backend/app/tasks/connector_indexers/teams_indexer.py @@ -0,0 +1,473 @@ +""" +Microsoft Teams connector indexer. +""" + +from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy.ext.asyncio import AsyncSession + +from app.config import config +from app.connectors.teams_history import TeamsHistory +from app.db import Document, DocumentType, SearchSourceConnectorType +from app.services.task_logging_service import TaskLoggingService +from app.utils.document_converters import ( + create_document_chunks, + generate_content_hash, + generate_unique_identifier_hash, +) + +from .base import ( + build_document_metadata_markdown, + calculate_date_range, + check_document_by_unique_identifier, + get_connector_by_id, + get_current_timestamp, + logger, + update_connector_last_indexed, +) + + +async def index_teams_messages( + session: AsyncSession, + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str | None = None, + end_date: str | None = None, + update_last_indexed: bool = True, +) -> tuple[int, str | None]: + """ + Index Microsoft Teams messages from all accessible teams and channels. + + Args: + session: Database session + connector_id: ID of the Teams connector + search_space_id: ID of the search space to store documents in + user_id: ID of the user + start_date: Start date for indexing (YYYY-MM-DD format) + end_date: End date for indexing (YYYY-MM-DD format) + update_last_indexed: Whether to update the last_indexed_at timestamp (default: True) + + Returns: + Tuple containing (number of documents indexed, error message or None) + """ + task_logger = TaskLoggingService(session, search_space_id) + + # Log task start + log_entry = await task_logger.log_task_start( + task_name="teams_messages_indexing", + source="connector_indexing_task", + message=f"Starting Microsoft Teams messages indexing for connector {connector_id}", + metadata={ + "connector_id": connector_id, + "user_id": str(user_id), + "start_date": start_date, + "end_date": end_date, + }, + ) + + try: + # Get the connector + await task_logger.log_task_progress( + log_entry, + f"Retrieving Teams connector {connector_id} from database", + {"stage": "connector_retrieval"}, + ) + + connector = await get_connector_by_id( + session, connector_id, SearchSourceConnectorType.TEAMS_CONNECTOR + ) + + if not connector: + await task_logger.log_task_failure( + log_entry, + f"Connector with ID {connector_id} not found or is not a Teams connector", + "Connector not found", + {"error_type": "ConnectorNotFound"}, + ) + return ( + 0, + f"Connector with ID {connector_id} not found or is not a Teams connector", + ) + + # Initialize Teams client with auto-refresh support + await task_logger.log_task_progress( + log_entry, + f"Initializing Teams client for connector {connector_id}", + {"stage": "client_initialization"}, + ) + + teams_client = TeamsHistory(session=session, connector_id=connector_id) + + # Handle 'undefined' string from frontend (treat as None) + if start_date == "undefined" or start_date == "": + start_date = None + if end_date == "undefined" or end_date == "": + end_date = None + + # Calculate date range + await task_logger.log_task_progress( + log_entry, + "Calculating date range for Teams indexing", + { + "stage": "date_calculation", + "provided_start_date": start_date, + "provided_end_date": end_date, + }, + ) + + start_date_str, end_date_str = calculate_date_range( + connector, start_date, end_date, default_days_back=365 + ) + + logger.info( + "Indexing Teams messages from %s to %s", start_date_str, end_date_str + ) + + await task_logger.log_task_progress( + log_entry, + f"Fetching Teams from {start_date_str} to {end_date_str}", + { + "stage": "fetch_teams", + "start_date": start_date_str, + "end_date": end_date_str, + }, + ) + + # Get all teams + try: + teams = await teams_client.get_all_teams() + except Exception as e: + await task_logger.log_task_failure( + log_entry, + f"Failed to get Teams for connector {connector_id}", + str(e), + {"error_type": "TeamsFetchError"}, + ) + return 0, f"Failed to get Teams: {e!s}" + + if not teams: + await task_logger.log_task_success( + log_entry, + f"No Teams found for connector {connector_id}", + {"teams_found": 0}, + ) + return 0, "No Teams found" + + # Track the number of documents indexed + documents_indexed = 0 + documents_skipped = 0 + skipped_channels = [] + + await task_logger.log_task_progress( + log_entry, + f"Starting to process {len(teams)} Teams", + {"stage": "process_teams", "total_teams": len(teams)}, + ) + + # Convert date strings to datetime objects for filtering + from datetime import datetime, timezone + + start_datetime = None + end_datetime = None + if start_date_str: + # Parse as naive datetime and make it timezone-aware (UTC) + start_datetime = datetime.strptime(start_date_str, "%Y-%m-%d").replace(tzinfo=timezone.utc) + if end_date_str: + # Parse as naive datetime, set to end of day, and make it timezone-aware (UTC) + end_datetime = datetime.strptime(end_date_str, "%Y-%m-%d").replace(hour=23, minute=59, second=59, tzinfo=timezone.utc) + + # Process each team + for team in teams: + team_id = team.get("id") + team_name = team.get("displayName", "Unknown Team") + + try: + # Get channels for this team + channels = await teams_client.get_channels_for_team(team_id) + + if not channels: + logger.info("No channels found in team %s", team_name) + continue + + # Process each channel in the team + for channel in channels: + channel_id = channel.get("id") + channel_name = channel.get("displayName", "Unknown Channel") + + try: + # Get messages for this channel + messages = await teams_client.get_messages_from_channel( + team_id, + channel_id, + start_datetime, + end_datetime, + include_replies=True, + ) + + if not messages: + logger.info( + "No messages found in channel %s of team %s for the specified date range.", + channel_name, + team_name, + ) + documents_skipped += 1 + continue + + # Process each message + for msg in messages: + # Skip deleted messages or empty content + if msg.get("deletedDateTime"): + continue + + # Extract message details + message_id = msg.get("id", "") + created_datetime = msg.get("createdDateTime", "") + from_user = msg.get("from", {}) + user_name = from_user.get("user", {}).get( + "displayName", "Unknown User" + ) + user_email = from_user.get("user", {}).get( + "userPrincipalName", "Unknown Email" + ) + + # Extract message content + body = msg.get("body", {}) + content_type = body.get("contentType", "text") + msg_text = body.get("content", "") + + # Skip empty messages + if not msg_text or msg_text.strip() == "": + continue + + # Format document metadata + metadata_sections = [ + ( + "METADATA", + [ + f"TEAM_NAME: {team_name}", + f"TEAM_ID: {team_id}", + f"CHANNEL_NAME: {channel_name}", + f"CHANNEL_ID: {channel_id}", + f"MESSAGE_TIMESTAMP: {created_datetime}", + f"MESSAGE_USER_NAME: {user_name}", + f"MESSAGE_USER_EMAIL: {user_email}", + f"CONTENT_TYPE: {content_type}", + ], + ), + ( + "CONTENT", + [ + f"FORMAT: {content_type}", + "TEXT_START", + msg_text, + "TEXT_END", + ], + ), + ] + + # Build the document string + combined_document_string = build_document_metadata_markdown( + metadata_sections + ) + + # Generate unique identifier hash for this Teams message + unique_identifier = f"{team_id}_{channel_id}_{message_id}" + unique_identifier_hash = generate_unique_identifier_hash( + DocumentType.TEAMS_CONNECTOR, + unique_identifier, + search_space_id, + ) + + # Generate content hash + content_hash = generate_content_hash( + combined_document_string, search_space_id + ) + + # Check if document with this unique identifier already exists + existing_document = ( + await check_document_by_unique_identifier( + session, unique_identifier_hash + ) + ) + + if existing_document: + # Document exists - check if content has changed + if existing_document.content_hash == content_hash: + logger.info( + "Document for Teams message %s in channel %s unchanged. Skipping.", + message_id, + channel_name, + ) + documents_skipped += 1 + continue + else: + # Content has changed - update the existing document + logger.info( + "Content changed for Teams message %s in channel %s. Updating document.", + message_id, + channel_name, + ) + + # Update chunks and embedding + chunks = await create_document_chunks( + combined_document_string + ) + doc_embedding = config.embedding_model_instance.embed( + combined_document_string + ) + + # Update existing document + existing_document.content = combined_document_string + existing_document.content_hash = content_hash + existing_document.embedding = doc_embedding + existing_document.document_metadata = { + "team_name": team_name, + "team_id": team_id, + "channel_name": channel_name, + "channel_id": channel_id, + "start_date": start_date_str, + "end_date": end_date_str, + "message_count": len(messages), + "indexed_at": datetime.now().strftime( + "%Y-%m-%d %H:%M:%S" + ), + } + + # Delete old chunks and add new ones + existing_document.chunks = chunks + existing_document.updated_at = get_current_timestamp() + + documents_indexed += 1 + logger.info( + "Successfully updated Teams message %s", message_id + ) + continue + + # Document doesn't exist - create new one + # Process chunks + chunks = await create_document_chunks( + combined_document_string + ) + doc_embedding = config.embedding_model_instance.embed( + combined_document_string + ) + + # Create and store new document + document = Document( + search_space_id=search_space_id, + title=f"Teams - {team_name} - {channel_name}", + document_type=DocumentType.TEAMS_CONNECTOR, + document_metadata={ + "team_name": team_name, + "team_id": team_id, + "channel_name": channel_name, + "channel_id": channel_id, + "start_date": start_date_str, + "end_date": end_date_str, + "message_count": len(messages), + "indexed_at": datetime.now().strftime( + "%Y-%m-%d %H:%M:%S" + ), + }, + content=combined_document_string, + embedding=doc_embedding, + chunks=chunks, + content_hash=content_hash, + unique_identifier_hash=unique_identifier_hash, + updated_at=get_current_timestamp(), + ) + + session.add(document) + documents_indexed += 1 + + # Batch commit every 10 documents + if documents_indexed % 10 == 0: + logger.info( + "Committing batch: %s Teams messages processed so far", + documents_indexed, + ) + await session.commit() + + logger.info( + "Successfully indexed channel %s in team %s with %s messages", + channel_name, + team_name, + len(messages), + ) + + except Exception as e: + logger.error( + "Error processing channel %s in team %s: %s", + channel_name, + team_name, + str(e), + ) + skipped_channels.append( + f"{team_name}/{channel_name} (processing error)" + ) + documents_skipped += 1 + continue + + except Exception as e: + logger.error("Error processing team %s: %s", team_name, str(e)) + continue + + # Update the last_indexed_at timestamp for the connector only if requested + # and if we successfully indexed at least one document + total_processed = documents_indexed + if total_processed > 0: + await update_connector_last_indexed(session, connector, update_last_indexed) + + # Final commit for any remaining documents not yet committed in batches + logger.info( + "Final commit: Total %s Teams messages processed", documents_indexed + ) + await session.commit() + + # Prepare result message + result_message = None + if skipped_channels: + result_message = f"Processed {total_processed} messages. Skipped {len(skipped_channels)} channels: {', '.join(skipped_channels)}" + else: + result_message = f"Processed {total_processed} messages." + + # Log success + await task_logger.log_task_success( + log_entry, + f"Successfully completed Teams indexing for connector {connector_id}", + { + "messages_processed": total_processed, + "documents_indexed": documents_indexed, + "documents_skipped": documents_skipped, + "skipped_channels_count": len(skipped_channels), + "result_message": result_message, + }, + ) + + logger.info( + "Teams indexing completed: %s new messages, %s skipped", + documents_indexed, + documents_skipped, + ) + return total_processed, result_message + + except SQLAlchemyError as db_error: + await session.rollback() + await task_logger.log_task_failure( + log_entry, + f"Database error during Teams indexing for connector {connector_id}", + str(db_error), + {"error_type": "SQLAlchemyError"}, + ) + logger.error("Database error: %s", str(db_error)) + return 0, f"Database error: {db_error!s}" + except Exception as e: + await session.rollback() + await task_logger.log_task_failure( + log_entry, + f"Failed to index Teams messages for connector {connector_id}", + str(e), + {"error_type": type(e).__name__}, + ) + logger.error("Failed to index Teams messages: %s", str(e)) + return 0, f"Failed to index Teams messages: {e!s}" diff --git a/surfsense_backend/app/utils/connector_naming.py b/surfsense_backend/app/utils/connector_naming.py new file mode 100644 index 000000000..731f419d6 --- /dev/null +++ b/surfsense_backend/app/utils/connector_naming.py @@ -0,0 +1,193 @@ +""" +Connector Naming Utilities. + +Provides functions for generating unique, user-friendly connector names. +""" + +from typing import Any +from urllib.parse import urlparse +from uuid import UUID + +from sqlalchemy import func +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.future import select + +from app.db import SearchSourceConnector, SearchSourceConnectorType + +# Friendly display names for connector types +BASE_NAME_FOR_TYPE = { + SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR: "Gmail", + SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR: "Google Drive", + SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR: "Google Calendar", + SearchSourceConnectorType.SLACK_CONNECTOR: "Slack", + SearchSourceConnectorType.TEAMS_CONNECTOR: "Microsoft Teams", + SearchSourceConnectorType.NOTION_CONNECTOR: "Notion", + SearchSourceConnectorType.LINEAR_CONNECTOR: "Linear", + SearchSourceConnectorType.JIRA_CONNECTOR: "Jira", + SearchSourceConnectorType.DISCORD_CONNECTOR: "Discord", + SearchSourceConnectorType.CONFLUENCE_CONNECTOR: "Confluence", + SearchSourceConnectorType.AIRTABLE_CONNECTOR: "Airtable", +} + + +def get_base_name_for_type(connector_type: SearchSourceConnectorType) -> str: + """Get a friendly display name for a connector type.""" + return BASE_NAME_FOR_TYPE.get( + connector_type, connector_type.replace("_", " ").title() + ) + + +def extract_identifier_from_credentials( + connector_type: SearchSourceConnectorType, + credentials: dict[str, Any], +) -> str | None: + """ + Extract a unique identifier from connector credentials. + + Args: + connector_type: The type of connector + credentials: The connector credentials dict + + Returns: + Identifier string (workspace name, email, etc.) or None + """ + if connector_type == SearchSourceConnectorType.SLACK_CONNECTOR: + return credentials.get("team_name") + + if connector_type == SearchSourceConnectorType.TEAMS_CONNECTOR: + return credentials.get("tenant_name") + + if connector_type == SearchSourceConnectorType.NOTION_CONNECTOR: + return credentials.get("workspace_name") + + if connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR: + return credentials.get("guild_name") + + if connector_type in ( + SearchSourceConnectorType.JIRA_CONNECTOR, + SearchSourceConnectorType.CONFLUENCE_CONNECTOR, + ): + base_url = credentials.get("base_url", "") + if base_url: + try: + parsed = urlparse(base_url) + hostname = parsed.netloc or parsed.path + if ".atlassian.net" in hostname: + return hostname.replace(".atlassian.net", "") + return hostname + except Exception: + pass + return None + + # Google, Linear, Airtable require API calls - return None + return None + + +def generate_connector_name_with_identifier( + connector_type: SearchSourceConnectorType, + identifier: str | None, +) -> str: + """ + Generate a connector name with an identifier. + + Args: + connector_type: The type of connector + identifier: User identifier (email, workspace name, etc.) + + Returns: + Name like "Gmail - john@example.com" or just "Gmail" if no identifier + """ + base = get_base_name_for_type(connector_type) + if identifier: + return f"{base} - {identifier}" + return base + + +async def count_connectors_of_type( + session: AsyncSession, + connector_type: SearchSourceConnectorType, + search_space_id: int, + user_id: UUID, +) -> int: + """Count existing connectors of a type for a user in a search space.""" + result = await session.execute( + select(func.count(SearchSourceConnector.id)).where( + SearchSourceConnector.connector_type == connector_type, + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + ) + ) + return result.scalar() or 0 + + +async def check_duplicate_connector( + session: AsyncSession, + connector_type: SearchSourceConnectorType, + search_space_id: int, + user_id: UUID, + identifier: str | None, +) -> bool: + """ + Check if a connector with the same identifier already exists. + + Args: + session: Database session + connector_type: The type of connector + search_space_id: The search space ID + user_id: The user ID + identifier: User identifier (email, workspace name, etc.) + + Returns: + True if a duplicate exists, False otherwise + """ + if not identifier: + return False + + expected_name = f"{get_base_name_for_type(connector_type)} - {identifier}" + result = await session.execute( + select(func.count(SearchSourceConnector.id)).where( + SearchSourceConnector.connector_type == connector_type, + SearchSourceConnector.search_space_id == search_space_id, + SearchSourceConnector.user_id == user_id, + SearchSourceConnector.name == expected_name, + ) + ) + return (result.scalar() or 0) > 0 + + +async def generate_unique_connector_name( + session: AsyncSession, + connector_type: SearchSourceConnectorType, + search_space_id: int, + user_id: UUID, + identifier: str | None = None, +) -> str: + """ + Generate a unique connector name. + + If an identifier is provided (email, workspace name, etc.), uses it with base name. + Otherwise, falls back to counting existing connectors for uniqueness. + + Args: + session: Database session + connector_type: The type of connector + search_space_id: The search space ID + user_id: The user ID + identifier: Optional user identifier (email, workspace name, etc.) + + Returns: + Unique name like "Gmail - john@example.com" or "Gmail (2)" + """ + base = get_base_name_for_type(connector_type) + + if identifier: + return f"{base} - {identifier}" + + # Fallback: use counter for uniqueness + count = await count_connectors_of_type( + session, connector_type, search_space_id, user_id + ) + + if count == 0: + return base + return f"{base} ({count + 1})" diff --git a/surfsense_backend/app/utils/document_converters.py b/surfsense_backend/app/utils/document_converters.py index 9883a74ed..279b1dbf6 100644 --- a/surfsense_backend/app/utils/document_converters.py +++ b/surfsense_backend/app/utils/document_converters.py @@ -222,88 +222,6 @@ async def convert_document_to_markdown(elements): return "".join(markdown_parts) -def convert_chunks_to_langchain_documents(chunks): - """ - Convert chunks from hybrid search results to LangChain Document objects. - - Args: - chunks: List of chunk dictionaries from hybrid search results - - Returns: - List of LangChain Document objects - """ - try: - from langchain_core.documents import Document as LangChainDocument - except ImportError: - raise ImportError( - "LangChain is not installed. Please install it with `pip install langchain langchain-core`" - ) from None - - langchain_docs = [] - - for chunk in chunks: - # Extract content from the chunk - content = chunk.get("content", "") - - # Create metadata dictionary - metadata = { - "chunk_id": chunk.get("chunk_id"), - "score": chunk.get("score"), - "rank": chunk.get("rank") if "rank" in chunk else None, - } - - # Add document information to metadata - if "document" in chunk: - doc = chunk["document"] - metadata.update( - { - "document_id": doc.get("id"), - "document_title": doc.get("title"), - "document_type": doc.get("document_type"), - } - ) - - # Add document metadata if available - if "metadata" in doc: - # Prefix document metadata keys to avoid conflicts - doc_metadata = { - f"doc_meta_{k}": v for k, v in doc.get("metadata", {}).items() - } - metadata.update(doc_metadata) - - # Add source URL if available in metadata - if "url" in doc.get("metadata", {}): - metadata["source"] = doc["metadata"]["url"] - elif "sourceURL" in doc.get("metadata", {}): - metadata["source"] = doc["metadata"]["sourceURL"] - - # Ensure source_id is set for citation purposes - # Use document_id as the source_id if available - if "document_id" in metadata: - metadata["source_id"] = metadata["document_id"] - - # Update content for citation mode - format as XML with explicit source_id - new_content = f""" - - - {metadata.get("source_id", metadata.get("document_id", "unknown"))} - - - - {content} - - - - """ - - # Create LangChain Document - langchain_doc = LangChainDocument(page_content=new_content, metadata=metadata) - - langchain_docs.append(langchain_doc) - - return langchain_docs - - def generate_content_hash(content: str, search_space_id: int) -> str: """Generate SHA-256 hash for the given content combined with search space ID.""" combined_data = f"{search_space_id}:{content}" diff --git a/surfsense_backend/app/utils/periodic_scheduler.py b/surfsense_backend/app/utils/periodic_scheduler.py index c95f407a4..219641933 100644 --- a/surfsense_backend/app/utils/periodic_scheduler.py +++ b/surfsense_backend/app/utils/periodic_scheduler.py @@ -19,6 +19,7 @@ logger = logging.getLogger(__name__) # Mapping of connector types to their corresponding Celery task names CONNECTOR_TASK_MAP = { SearchSourceConnectorType.SLACK_CONNECTOR: "index_slack_messages", + SearchSourceConnectorType.TEAMS_CONNECTOR: "index_teams_messages", SearchSourceConnectorType.NOTION_CONNECTOR: "index_notion_pages", SearchSourceConnectorType.GITHUB_CONNECTOR: "index_github_repos", SearchSourceConnectorType.LINEAR_CONNECTOR: "index_linear_issues", diff --git a/surfsense_backend/app/utils/validators.py b/surfsense_backend/app/utils/validators.py index adc8f9ee7..54e681518 100644 --- a/surfsense_backend/app/utils/validators.py +++ b/surfsense_backend/app/utils/validators.py @@ -551,7 +551,7 @@ def validate_connector_config( # ], # "validators": {}, # }, - "CLICKUP_CONNECTOR": {"required": ["CLICKUP_API_TOKEN"], "validators": {}}, + # "CLICKUP_CONNECTOR": {"required": ["CLICKUP_API_TOKEN"], "validators": {}}, # "GOOGLE_CALENDAR_CONNECTOR": { # "required": ["token", "refresh_token", "token_uri", "client_id", "expiry", "scopes", "client_secret"], # "validators": {}, diff --git a/surfsense_backend/pyproject.toml b/surfsense_backend/pyproject.toml index ba1d69939..e3e7583f8 100644 --- a/surfsense_backend/pyproject.toml +++ b/surfsense_backend/pyproject.toml @@ -1,8 +1,7 @@ [project] name = "surf-new-backend" -version = "0.0.10" +version = "0.0.11" description = "SurfSense Backend" -readme = "README.md" requires-python = ">=3.12" dependencies = [ "alembic>=1.13.0", @@ -153,3 +152,11 @@ line-ending = "auto" known-first-party = ["app"] force-single-line = false combine-as-imports = true + +[tool.setuptools.packages.find] +where = ["."] +include = ["app*", "alembic*"] + +[build-system] +requires = ["setuptools>=61.0", "wheel"] +build-backend = "setuptools.build_meta" diff --git a/surfsense_backend/uv.lock b/surfsense_backend/uv.lock index a6ef20cca..8ec09ddd9 100644 --- a/surfsense_backend/uv.lock +++ b/surfsense_backend/uv.lock @@ -6409,8 +6409,8 @@ wheels = [ [[package]] name = "surf-new-backend" -version = "0.0.10" -source = { virtual = "." } +version = "0.0.11" +source = { editable = "." } dependencies = [ { name = "alembic" }, { name = "asyncpg" }, diff --git a/surfsense_browser_extension/package.json b/surfsense_browser_extension/package.json index d7edcc95b..b225bc206 100644 --- a/surfsense_browser_extension/package.json +++ b/surfsense_browser_extension/package.json @@ -1,7 +1,7 @@ { "name": "surfsense_browser_extension", "displayName": "Surfsense Browser Extension", - "version": "0.0.10", + "version": "0.0.11", "description": "Extension to collect Browsing History for SurfSense.", "author": "https://github.com/MODSetter", "engines": { diff --git a/surfsense_web/app/(home)/layout.tsx b/surfsense_web/app/(home)/layout.tsx index f6a9e5d42..9488ee875 100644 --- a/surfsense_web/app/(home)/layout.tsx +++ b/surfsense_web/app/(home)/layout.tsx @@ -5,7 +5,7 @@ import { Navbar } from "@/components/homepage/navbar"; export default function HomePageLayout({ children }: { children: React.ReactNode }) { return ( -
+
{children} diff --git a/surfsense_web/app/dashboard/[search_space_id]/team/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/team/page.tsx index 80f477001..13124d756 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/team/page.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/team/page.tsx @@ -28,7 +28,7 @@ import { } from "lucide-react"; import { motion } from "motion/react"; import { useParams, useRouter } from "next/navigation"; -import { useCallback, useMemo, useState } from "react"; +import { useCallback, useEffect, useMemo, useState } from "react"; import { toast } from "sonner"; import { createInviteMutationAtom, @@ -116,6 +116,7 @@ import type { } from "@/contracts/types/roles.types"; import { invitesApiService } from "@/lib/apis/invites-api.service"; import { rolesApiService } from "@/lib/apis/roles-api.service"; +import { trackSearchSpaceInviteSent, trackSearchSpaceUsersViewed } from "@/lib/posthog/events"; import { cacheKeys } from "@/lib/query-client/cache-keys"; import { cn } from "@/lib/utils"; @@ -297,6 +298,14 @@ export default function TeamManagementPage() { toast.success("Team data refreshed"); }, [fetchMembers, fetchRoles, fetchInvites]); + // Track users per search space when team page is viewed + useEffect(() => { + if (members.length > 0 && !membersLoading) { + const ownerCount = members.filter((m) => m.is_owner).length; + trackSearchSpaceUsersViewed(searchSpaceId, members.length, ownerCount); + } + }, [members, membersLoading, searchSpaceId]); + if (accessLoading) { return (
@@ -1088,10 +1097,12 @@ function InvitesTab({ function CreateInviteDialog({ roles, onCreateInvite, + searchSpaceId, className, }: { roles: Role[]; onCreateInvite: (data: CreateInviteRequest["data"]) => Promise; + searchSpaceId: number; className?: string; }) { const [open, setOpen] = useState(false); @@ -1114,6 +1125,17 @@ function CreateInviteDialog({ const invite = await onCreateInvite(data); setCreatedInvite(invite); + + // Track invite sent event + const roleName = + roleId && roleId !== "default" + ? roles.find((r) => r.id.toString() === roleId)?.name + : undefined; + trackSearchSpaceInviteSent(searchSpaceId, { + roleName, + hasExpiry: !!expiresAt, + hasMaxUses: !!maxUses, + }); } catch (error) { console.error("Failed to create invite:", error); } finally { diff --git a/surfsense_web/app/dashboard/page.tsx b/surfsense_web/app/dashboard/page.tsx index 951e17a8c..ad1c6ad9d 100644 --- a/surfsense_web/app/dashboard/page.tsx +++ b/surfsense_web/app/dashboard/page.tsx @@ -181,7 +181,7 @@ const DashboardPage = () => { email: user?.email || (isLoadingUser ? "Loading..." : userError ? "Error loading user" : "Unknown User"), - avatar: "/icon-128.png", // Default avatar + avatar: "/icon-128.svg", // Default avatar }; // Show loading while loading or auto-redirecting (single search space) diff --git a/surfsense_web/app/favicon.ico b/surfsense_web/app/favicon.ico index 171ab2c49..ae733d09c 100644 Binary files a/surfsense_web/app/favicon.ico and b/surfsense_web/app/favicon.ico differ diff --git a/surfsense_web/app/invite/[invite_code]/page.tsx b/surfsense_web/app/invite/[invite_code]/page.tsx index 30e93c022..1f2a786a5 100644 --- a/surfsense_web/app/invite/[invite_code]/page.tsx +++ b/surfsense_web/app/invite/[invite_code]/page.tsx @@ -33,6 +33,11 @@ import { import type { AcceptInviteResponse } from "@/contracts/types/invites.types"; import { invitesApiService } from "@/lib/apis/invites-api.service"; import { getBearerToken } from "@/lib/auth-utils"; +import { + trackSearchSpaceInviteAccepted, + trackSearchSpaceInviteDeclined, + trackSearchSpaceUserAdded, +} from "@/lib/posthog/events"; import { cacheKeys } from "@/lib/query-client/cache-keys"; export default function InviteAcceptPage() { @@ -91,6 +96,18 @@ export default function InviteAcceptPage() { if (result) { setAccepted(true); setAcceptedData(result); + + // Track invite accepted and user added events + trackSearchSpaceInviteAccepted( + result.search_space_id, + result.search_space_name, + result.role_name + ); + trackSearchSpaceUserAdded( + result.search_space_id, + result.search_space_name, + result.role_name + ); } } catch (err: any) { setError(err.message || "Failed to accept invite"); @@ -99,6 +116,12 @@ export default function InviteAcceptPage() { } }; + const handleDecline = () => { + // Track invite declined event + trackSearchSpaceInviteDeclined(inviteInfo?.search_space_name); + router.push("/dashboard"); + }; + const handleLoginRedirect = () => { // Store the invite code to redirect back after login localStorage.setItem("pending_invite_code", inviteCode); @@ -324,11 +347,7 @@ export default function InviteAcceptPage() { )} -
diff --git a/surfsense_web/components/assistant-ui/connector-popup/components/connector-card.tsx b/surfsense_web/components/assistant-ui/connector-popup/components/connector-card.tsx index 855be95a2..e8fe6da33 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/components/connector-card.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/components/connector-card.tsx @@ -17,6 +17,7 @@ interface ConnectorCardProps { isConnected?: boolean; isConnecting?: boolean; documentCount?: number; + accountCount?: number; lastIndexedAt?: string | null; isIndexing?: boolean; activeTask?: LogActiveTask; @@ -96,6 +97,7 @@ export const ConnectorCard: FC = ({ isConnected = false, isConnecting = false, documentCount, + accountCount, lastIndexedAt, isIndexing = false, activeTask, @@ -139,7 +141,7 @@ export const ConnectorCard: FC = ({ return (
-
+
{connectorType ? ( getConnectorIcon(connectorType, "size-6") ) : id === "youtube-crawler" ? ( @@ -150,12 +152,20 @@ export const ConnectorCard: FC = ({
- {title} + {title}
{getStatusContent()}
{isConnected && documentCount !== undefined && ( -

- {formatDocumentCount(documentCount)} +

+ {formatDocumentCount(documentCount)} + {accountCount !== undefined && accountCount > 0 && ( + <> + + + {accountCount} {accountCount === 1 ? "Account" : "Accounts"} + + + )}

)}
@@ -163,7 +173,7 @@ export const ConnectorCard: FC = ({ size="sm" variant={isConnected ? "secondary" : "default"} className={cn( - "h-8 text-[11px] px-3 rounded-lg flex-shrink-0 font-medium", + "h-8 text-[11px] px-3 rounded-lg shrink-0 font-medium", isConnected && "bg-white text-slate-700 hover:bg-slate-50 border-0 shadow-xs dark:bg-secondary dark:text-secondary-foreground dark:hover:bg-secondary/80", !isConnected && "shadow-xs" diff --git a/surfsense_web/components/assistant-ui/connector-popup/connect-forms/components/clickup-connect-form.tsx b/surfsense_web/components/assistant-ui/connector-popup/connect-forms/components/clickup-connect-form.tsx deleted file mode 100644 index 9f33c6ed9..000000000 --- a/surfsense_web/components/assistant-ui/connector-popup/connect-forms/components/clickup-connect-form.tsx +++ /dev/null @@ -1,385 +0,0 @@ -"use client"; - -import { zodResolver } from "@hookform/resolvers/zod"; -import { Info } from "lucide-react"; -import type { FC } from "react"; -import { useRef, useState } from "react"; -import { useForm } from "react-hook-form"; -import * as z from "zod"; -import { - Accordion, - AccordionContent, - AccordionItem, - AccordionTrigger, -} from "@/components/ui/accordion"; -import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert"; -import { - Form, - FormControl, - FormDescription, - FormField, - FormItem, - FormLabel, - FormMessage, -} from "@/components/ui/form"; -import { Input } from "@/components/ui/input"; -import { Label } from "@/components/ui/label"; -import { - Select, - SelectContent, - SelectItem, - SelectTrigger, - SelectValue, -} from "@/components/ui/select"; -import { Switch } from "@/components/ui/switch"; -import { EnumConnectorName } from "@/contracts/enums/connector"; -import { DateRangeSelector } from "../../components/date-range-selector"; -import { getConnectorBenefits } from "../connector-benefits"; -import type { ConnectFormProps } from "../index"; - -const clickupConnectorFormSchema = z.object({ - name: z.string().min(3, { - message: "Connector name must be at least 3 characters.", - }), - api_token: z.string().min(10, { - message: "ClickUp API Token is required and must be valid.", - }), -}); - -type ClickUpConnectorFormValues = z.infer; - -export const ClickUpConnectForm: FC = ({ onSubmit, isSubmitting }) => { - const isSubmittingRef = useRef(false); - const [startDate, setStartDate] = useState(undefined); - const [endDate, setEndDate] = useState(undefined); - const [periodicEnabled, setPeriodicEnabled] = useState(false); - const [frequencyMinutes, setFrequencyMinutes] = useState("1440"); - const form = useForm({ - resolver: zodResolver(clickupConnectorFormSchema), - defaultValues: { - name: "ClickUp Connector", - api_token: "", - }, - }); - - const handleSubmit = async (values: ClickUpConnectorFormValues) => { - // Prevent multiple submissions - if (isSubmittingRef.current || isSubmitting) { - return; - } - - isSubmittingRef.current = true; - try { - await onSubmit({ - name: values.name, - connector_type: EnumConnectorName.CLICKUP_CONNECTOR, - config: { - CLICKUP_API_TOKEN: values.api_token, - }, - is_indexable: true, - last_indexed_at: null, - periodic_indexing_enabled: periodicEnabled, - indexing_frequency_minutes: periodicEnabled ? parseInt(frequencyMinutes, 10) : null, - next_scheduled_at: null, - startDate, - endDate, - periodicEnabled, - frequencyMinutes, - }); - } finally { - isSubmittingRef.current = false; - } - }; - - return ( -
- - -
- API Token Required - - You'll need a ClickUp API Token to use this connector. You can create one from{" "} - - ClickUp Settings - - -
-
- -
-
- - ( - - Connector Name - - - - - A friendly name to identify this connector. - - - - )} - /> - - ( - - ClickUp API Token - - - - - Your ClickUp API Token will be encrypted and stored securely. - - - - )} - /> - - {/* Indexing Configuration */} -
-

Indexing Configuration

- - {/* Date Range Selector */} - - - {/* Periodic Sync Config */} -
-
-
-

Enable Periodic Sync

-

- Automatically re-index at regular intervals -

-
- -
- - {periodicEnabled && ( -
-
- - -
-
- )} -
-
- - -
- - {/* What you get section */} - {getConnectorBenefits(EnumConnectorName.CLICKUP_CONNECTOR) && ( -
-

What you get with ClickUp integration:

-
    - {getConnectorBenefits(EnumConnectorName.CLICKUP_CONNECTOR)?.map((benefit) => ( -
  • {benefit}
  • - ))} -
-
- )} - - {/* Documentation Section */} - - - - Documentation - - -
-

How it works

-

- The ClickUp connector uses the ClickUp API to fetch all tasks and projects that your - API token has access to within your workspace. -

-
    -
  • - For follow up indexing runs, the connector retrieves tasks that have been updated - since the last indexing attempt. -
  • -
  • - Indexing is configured to run periodically, so updates should appear in your - search results within minutes. -
  • -
-
- -
-
-

Authorization

- - - API Token Required - - You need a ClickUp personal API token to use this connector. The token will be - used to read your ClickUp data. - - - -
-
-

- Step 1: Get Your API Token -

-
    -
  1. Log in to your ClickUp account
  2. -
  3. Click your avatar in the upper-right corner and select "Settings"
  4. -
  5. In the sidebar, click "Apps"
  6. -
  7. - Under "API Token", click Generate or{" "} - Regenerate -
  8. -
  9. Copy the generated token (it typically starts with "pk_")
  10. -
  11. - Paste it in the form above. You can also visit{" "} - - ClickUp API Settings - {" "} - directly. -
  12. -
-
- -
-

- Step 2: Grant necessary access -

-

- The API Token will have access to all tasks and projects that your user - account can see. Make sure your account has appropriate permissions for the - workspaces you want to index. -

- - - Data Privacy - - Only tasks, comments, and basic metadata will be indexed. ClickUp - attachments and linked files are not indexed by this connector. - - -
-
-
-
- -
-
-

Indexing

-
    -
  1. - Navigate to the Connector Dashboard and select the ClickUp{" "} - Connector. -
  2. -
  3. - Place your API Token in the form field. -
  4. -
  5. - Click Connect to establish the connection. -
  6. -
  7. Once connected, your ClickUp tasks will be indexed automatically.
  8. -
- - - - What Gets Indexed - -

The ClickUp connector indexes the following data:

-
    -
  • Task names and descriptions
  • -
  • Task comments and discussion threads
  • -
  • Task status, priority, and assignee information
  • -
  • Project and workspace information
  • -
-
-
-
-
-
-
-
-
- ); -}; diff --git a/surfsense_web/components/assistant-ui/connector-popup/connect-forms/index.tsx b/surfsense_web/components/assistant-ui/connector-popup/connect-forms/index.tsx index 86a70b5bf..024e0dc04 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connect-forms/index.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connect-forms/index.tsx @@ -2,7 +2,6 @@ import type { FC } from "react"; import { BaiduSearchApiConnectForm } from "./components/baidu-search-api-connect-form"; import { BookStackConnectForm } from "./components/bookstack-connect-form"; import { CirclebackConnectForm } from "./components/circleback-connect-form"; -import { ClickUpConnectForm } from "./components/clickup-connect-form"; import { ElasticsearchConnectForm } from "./components/elasticsearch-connect-form"; import { GithubConnectForm } from "./components/github-connect-form"; import { LinkupApiConnectForm } from "./components/linkup-api-connect-form"; @@ -51,8 +50,6 @@ export function getConnectFormComponent(connectorType: string): ConnectFormCompo return BookStackConnectForm; case "GITHUB_CONNECTOR": return GithubConnectForm; - case "CLICKUP_CONNECTOR": - return ClickUpConnectForm; case "LUMA_CONNECTOR": return LumaConnectForm; case "CIRCLEBACK_CONNECTOR": diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/clickup-config.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/clickup-config.tsx index 7355d1c0c..5b7ddaeb8 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/clickup-config.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/clickup-config.tsx @@ -1,6 +1,6 @@ "use client"; -import { KeyRound } from "lucide-react"; +import { Info, KeyRound } from "lucide-react"; import type { FC } from "react"; import { useEffect, useState } from "react"; import { Input } from "@/components/ui/input"; @@ -16,17 +16,22 @@ export const ClickUpConfig: FC = ({ onConfigChange, onNameChange, }) => { + // Check if this is an OAuth connector (has access_token or _token_encrypted flag) + const isOAuth = !!(connector.config?.access_token || connector.config?._token_encrypted); + const [apiToken, setApiToken] = useState( (connector.config?.CLICKUP_API_TOKEN as string) || "" ); const [name, setName] = useState(connector.name || ""); - // Update API token and name when connector changes + // Update values when connector changes (only for legacy connectors) useEffect(() => { - const token = (connector.config?.CLICKUP_API_TOKEN as string) || ""; - setApiToken(token); + if (!isOAuth) { + const token = (connector.config?.CLICKUP_API_TOKEN as string) || ""; + setApiToken(token); + } setName(connector.name || ""); - }, [connector.config, connector.name]); + }, [connector.config, connector.name, isOAuth]); const handleApiTokenChange = (value: string) => { setApiToken(value); @@ -45,6 +50,32 @@ export const ClickUpConfig: FC = ({ } }; + // For OAuth connectors, show simple info message + if (isOAuth) { + const workspaceName = (connector.config?.workspace_name as string) || "Unknown Workspace"; + return ( +
+ {/* OAuth Info */} +
+
+ +
+
+

Connected via OAuth

+

+ Workspace:{" "} + {workspaceName} +

+

+ To update your connection, reconnect this connector. +

+
+
+
+ ); + } + + // For legacy API token connectors, show the form return (
{/* Connector Name */} @@ -82,7 +113,8 @@ export const ClickUpConfig: FC = ({ className="border-slate-400/20 focus-visible:border-slate-400/40" />

- Update your ClickUp API Token if needed. + Update your ClickUp API Token if needed. For better security and automatic token + refresh, consider disconnecting and reconnecting using OAuth 2.0.

diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx new file mode 100644 index 000000000..ac08a6c03 --- /dev/null +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/components/teams-config.tsx @@ -0,0 +1,29 @@ +"use client"; + +import { Info } from "lucide-react"; +import type { FC } from "react"; +import type { ConnectorConfigProps } from "../index"; + +export interface TeamsConfigProps extends ConnectorConfigProps { + onNameChange?: (name: string) => void; +} + +export const TeamsConfig: FC = () => { + return ( +
+
+
+ +
+
+

Microsoft Teams Access

+

+ SurfSense will index messages from Teams channels that you have access to. The app can + only read messages from teams and channels where you are a member. Make sure you're a + member of the teams you want to index before connecting. +

+
+
+
+ ); +}; diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/index.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/index.tsx index 2575b3a69..267e85115 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/index.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/index.tsx @@ -17,6 +17,7 @@ import { LumaConfig } from "./components/luma-config"; import { SearxngConfig } from "./components/searxng-config"; import { SlackConfig } from "./components/slack-config"; import { TavilyApiConfig } from "./components/tavily-api-config"; +import { TeamsConfig } from "./components/teams-config"; import { WebcrawlerConfig } from "./components/webcrawler-config"; export interface ConnectorConfigProps { @@ -52,6 +53,8 @@ export function getConnectorConfigComponent( return SlackConfig; case "DISCORD_CONNECTOR": return DiscordConfig; + case "TEAMS_CONNECTOR": + return TeamsConfig; case "CONFLUENCE_CONNECTOR": return ConfluenceConfig; case "BOOKSTACK_CONNECTOR": diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-connect-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-connect-view.tsx index 22dff4322..e3941367b 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-connect-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-connect-view.tsx @@ -54,7 +54,6 @@ export const ConnectorConnectView: FC = ({ ELASTICSEARCH_CONNECTOR: "elasticsearch-connect-form", BOOKSTACK_CONNECTOR: "bookstack-connect-form", GITHUB_CONNECTOR: "github-connect-form", - CLICKUP_CONNECTOR: "clickup-connect-form", LUMA_CONNECTOR: "luma-connect-form", CIRCLEBACK_CONNECTOR: "circleback-connect-form", }; diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx index 7776c9a9d..bdfe9af77 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/connector-edit-view.tsx @@ -143,12 +143,14 @@ export const ConnectorEditView: FC = ({ {/* Connector header */}
-
-
+
+
{getConnectorIcon(connector.connector_type, "size-7")}
-

{connector.name}

+

+ {connector.name} +

Manage your connector settings and sync configuration

diff --git a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/indexing-configuration-view.tsx b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/indexing-configuration-view.tsx index d479dda8d..8f4a29e61 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/indexing-configuration-view.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/connector-configs/views/indexing-configuration-view.tsx @@ -1,14 +1,16 @@ "use client"; import { ArrowLeft, Check, Info, Loader2 } from "lucide-react"; -import { type FC, useCallback, useEffect, useMemo, useRef, useState } from "react"; import { useSearchParams } from "next/navigation"; +import { type FC, useCallback, useEffect, useMemo, useRef, useState } from "react"; import { Button } from "@/components/ui/button"; import type { SearchSourceConnector } from "@/contracts/types/connector.types"; +import { getConnectorTypeDisplay } from "@/lib/connectors/utils"; import { cn } from "@/lib/utils"; import { DateRangeSelector } from "../../components/date-range-selector"; import { PeriodicSyncConfig } from "../../components/periodic-sync-config"; -import type { IndexingConfigState } from "../../constants/connector-constants"; +import { type IndexingConfigState, OAUTH_CONNECTORS } from "../../constants/connector-constants"; +import { getConnectorDisplayName } from "../../tabs/all-connectors-tab"; import { getConnectorConfigComponent } from "../index"; interface IndexingConfigurationViewProps { @@ -89,12 +91,14 @@ export const IndexingConfigurationView: FC = ({ }; }, [checkScrollState]); + const authConnector = OAUTH_CONNECTORS.find((c) => c.connectorType === connector?.connector_type); + return (
{/* Fixed Header */}
@@ -111,14 +115,19 @@ export const IndexingConfigurationView: FC = ({ )} {/* Success header */} -
+
-

- {config.connectorTitle} Connected! -

+
+ + {getConnectorTypeDisplay(connector?.connector_type || "")} Connected ! + {" "} + + {getConnectorDisplayName(connector?.name || "")} + +

Configure when to start syncing your data

diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts index 4d15d0989..23982e6f3 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-constants.ts @@ -51,6 +51,13 @@ export const OAUTH_CONNECTORS = [ connectorType: EnumConnectorName.SLACK_CONNECTOR, authEndpoint: "/api/v1/auth/slack/connector/add/", }, + { + id: "teams-connector", + title: "Microsoft Teams", + description: "Search Teams messages", + connectorType: EnumConnectorName.TEAMS_CONNECTOR, + authEndpoint: "/api/v1/auth/teams/connector/add/", + }, { id: "discord-connector", title: "Discord", @@ -72,6 +79,13 @@ export const OAUTH_CONNECTORS = [ connectorType: EnumConnectorName.CONFLUENCE_CONNECTOR, authEndpoint: "/api/v1/auth/confluence/connector/add/", }, + { + id: "clickup-connector", + title: "ClickUp", + description: "Search ClickUp tasks", + connectorType: EnumConnectorName.CLICKUP_CONNECTOR, + authEndpoint: "/api/v1/auth/clickup/connector/add/", + }, ] as const; // Content Sources (tools that extract and import content from external sources) @@ -104,12 +118,6 @@ export const OTHER_CONNECTORS = [ description: "Search repositories", connectorType: EnumConnectorName.GITHUB_CONNECTOR, }, - { - id: "clickup-connector", - title: "ClickUp", - description: "Search ClickUp tasks", - connectorType: EnumConnectorName.CLICKUP_CONNECTOR, - }, { id: "luma-connector", title: "Luma", diff --git a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-popup.schemas.ts b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-popup.schemas.ts index 65456689c..a1b303163 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/constants/connector-popup.schemas.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/constants/connector-popup.schemas.ts @@ -7,11 +7,12 @@ import { searchSourceConnectorTypeEnum } from "@/contracts/types/connector.types export const connectorPopupQueryParamsSchema = z.object({ modal: z.enum(["connectors"]).optional(), tab: z.enum(["all", "active"]).optional(), - view: z.enum(["configure", "edit", "connect", "youtube"]).optional(), + view: z.enum(["configure", "edit", "connect", "youtube", "accounts"]).optional(), connector: z.string().optional(), connectorId: z.string().optional(), connectorType: z.string().optional(), success: z.enum(["true", "false"]).optional(), + error: z.string().optional(), }); export type ConnectorPopupQueryParams = z.infer; diff --git a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts index 8ddaa973a..2693013ef 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts +++ b/surfsense_web/components/assistant-ui/connector-popup/hooks/use-connector-dialog.ts @@ -15,6 +15,14 @@ import { EnumConnectorName } from "@/contracts/enums/connector"; import type { SearchSourceConnector } from "@/contracts/types/connector.types"; import { searchSourceConnector } from "@/contracts/types/connector.types"; import { authenticatedFetch } from "@/lib/auth-utils"; +import { + trackConnectorConnected, + trackConnectorDeleted, + trackIndexWithDateRangeOpened, + trackIndexWithDateRangeStarted, + trackPeriodicIndexingStarted, + trackQuickIndexClicked, +} from "@/lib/posthog/events"; import { cacheKeys } from "@/lib/query-client/cache-keys"; import { queryClient } from "@/lib/query-client/client"; import type { IndexingConfigState } from "../constants/connector-constants"; @@ -66,6 +74,12 @@ export const useConnectorDialog = () => { const [isCreatingConnector, setIsCreatingConnector] = useState(false); const isCreatingConnectorRef = useRef(false); + // Accounts list view state (for OAuth connectors with multiple accounts) + const [viewingAccountsType, setViewingAccountsType] = useState<{ + connectorType: string; + connectorTitle: string; + } | null>(null); + // Helper function to get frequency label const getFrequencyLabel = useCallback((minutes: string): string => { switch (minutes) { @@ -114,24 +128,50 @@ export const useConnectorDialog = () => { setConnectingConnectorType(null); } + // Clear viewing accounts type if view is not "accounts" anymore + if (params.view !== "accounts" && viewingAccountsType) { + setViewingAccountsType(null); + } + // Handle connect view if (params.view === "connect" && params.connectorType && !connectingConnectorType) { setConnectingConnectorType(params.connectorType); } + // Handle accounts view + if (params.view === "accounts" && params.connectorType && !viewingAccountsType) { + const oauthConnector = OAUTH_CONNECTORS.find( + (c) => c.connectorType === params.connectorType + ); + if (oauthConnector) { + setViewingAccountsType({ + connectorType: oauthConnector.connectorType, + connectorTitle: oauthConnector.title, + }); + } + } + // Handle YouTube view if (params.view === "youtube") { // YouTube view is active - no additional state needed } - if (params.view === "configure" && params.connector && !indexingConfig) { + // Handle configure view (for page refresh support) + if (params.view === "configure" && params.connector && !indexingConfig && allConnectors) { const oauthConnector = OAUTH_CONNECTORS.find((c) => c.id === params.connector); - if (oauthConnector && allConnectors) { - const existingConnector = allConnectors.find( - (c: SearchSourceConnector) => c.connector_type === oauthConnector.connectorType - ); + if (oauthConnector) { + let existingConnector: SearchSourceConnector | undefined; + if (params.connectorId) { + const connectorId = parseInt(params.connectorId, 10); + existingConnector = allConnectors.find( + (c: SearchSourceConnector) => c.id === connectorId + ); + } else { + existingConnector = allConnectors.find( + (c: SearchSourceConnector) => c.connector_type === oauthConnector.connectorType + ); + } if (existingConnector) { - // Validate connector data before setting state const connectorValidation = searchSourceConnector.safeParse(existingConnector); if (connectorValidation.success) { const config = validateIndexingConfigState({ @@ -200,6 +240,10 @@ export const useConnectorDialog = () => { if (connectingConnectorType) { setConnectingConnectorType(null); } + // Clear viewing accounts type when modal is closed + if (viewingAccountsType) { + setViewingAccountsType(null); + } // Clear YouTube view when modal is closed (handled by view param check) } } catch (error) { @@ -207,13 +251,48 @@ export const useConnectorDialog = () => { console.warn("Invalid connector popup query params:", error); } // eslint-disable-next-line react-hooks/exhaustive-deps - }, [searchParams, allConnectors, editingConnector, indexingConfig, connectingConnectorType]); + }, [ + searchParams, + allConnectors, + editingConnector, + indexingConfig, + connectingConnectorType, + viewingAccountsType, + ]); - // Detect OAuth success and transition to config view + // Detect OAuth success / Failure and transition to config view useEffect(() => { try { const params = parseConnectorPopupQueryParams(searchParams); + // Handle OAuth errors (e.g., duplicate account) + if (params.error && params.modal === "connectors") { + const oauthConnector = params.connector + ? OAUTH_CONNECTORS.find((c) => c.id === params.connector) + : null; + const connectorName = oauthConnector?.title || "connector"; + + if (params.error === "duplicate_account") { + toast.error(`This ${connectorName} account is already connected`, { + description: "Please use a different account or manage the existing connection.", + }); + } else { + toast.error(`Failed to connect ${connectorName}`, { + description: params.error.replace(/_/g, " "), + }); + } + + // Clean up error params from URL + const url = new URL(window.location.href); + url.searchParams.delete("error"); + url.searchParams.delete("connector"); + window.history.replaceState({}, "", url.toString()); + + // Open the popup to show the connectors + setIsOpen(true); + return; + } + if ( params.success === "true" && params.connector && @@ -225,13 +304,26 @@ export const useConnectorDialog = () => { refetchAllConnectors().then((result) => { if (!result.data) return; - const newConnector = result.data.find( - (c: SearchSourceConnector) => c.connector_type === oauthConnector.connectorType - ); + let newConnector: SearchSourceConnector | undefined; + if (params.connectorId) { + const connectorId = parseInt(params.connectorId, 10); + newConnector = result.data.find((c: SearchSourceConnector) => c.id === connectorId); + } else { + newConnector = result.data.find( + (c: SearchSourceConnector) => c.connector_type === oauthConnector.connectorType + ); + } + if (newConnector) { - // Validate connector data before setting state const connectorValidation = searchSourceConnector.safeParse(newConnector); if (connectorValidation.success) { + // Track connector connected event for OAuth connectors + trackConnectorConnected( + Number(searchSpaceId), + oauthConnector.connectorType, + newConnector.id + ); + const config = validateIndexingConfigState({ connectorType: oauthConnector.connectorType, connectorId: newConnector.id, @@ -243,6 +335,7 @@ export const useConnectorDialog = () => { setIsOpen(true); const url = new URL(window.location.href); url.searchParams.delete("success"); + url.searchParams.set("connectorId", newConnector.id.toString()); url.searchParams.set("view", "configure"); window.history.replaceState({}, "", url.toString()); } else { @@ -341,6 +434,13 @@ export const useConnectorDialog = () => { if (connector) { const connectorValidation = searchSourceConnector.safeParse(connector); if (connectorValidation.success) { + // Track webcrawler connector connected + trackConnectorConnected( + Number(searchSpaceId), + EnumConnectorName.WEBCRAWLER_CONNECTOR, + connector.id + ); + const config = validateIndexingConfigState({ connectorType: EnumConnectorName.WEBCRAWLER_CONNECTOR, connectorId: connector.id, @@ -436,6 +536,9 @@ export const useConnectorDialog = () => { // Store connectingConnectorType before clearing it const currentConnectorType = connectingConnectorType; + // Track connector connected event for non-OAuth connectors + trackConnectorConnected(Number(searchSpaceId), currentConnectorType, connector.id); + // Find connector title from constants const connectorInfo = OTHER_CONNECTORS.find( (c) => c.connectorType === currentConnectorType @@ -632,6 +735,38 @@ export const useConnectorDialog = () => { router.replace(url.pathname + url.search, { scroll: false }); }, [router]); + // Handle viewing accounts list for OAuth connector type + const handleViewAccountsList = useCallback( + (connectorType: string, connectorTitle: string) => { + if (!searchSpaceId) return; + + setViewingAccountsType({ + connectorType, + connectorTitle, + }); + + // Update URL to show accounts view, preserving current tab + const url = new URL(window.location.href); + url.searchParams.set("modal", "connectors"); + url.searchParams.set("view", "accounts"); + url.searchParams.set("connectorType", connectorType); + // Keep the current tab in URL so we can go back to it + window.history.pushState({ modal: true }, "", url.toString()); + }, + [searchSpaceId] + ); + + // Handle going back from accounts list view + const handleBackFromAccountsList = useCallback(() => { + setViewingAccountsType(null); + const url = new URL(window.location.href); + url.searchParams.set("modal", "connectors"); + // Keep the current tab (don't change it) - just remove view-specific params + url.searchParams.delete("view"); + url.searchParams.delete("connectorType"); + router.replace(url.pathname + url.search, { scroll: false }); + }, [router]); + // Handle starting indexing const handleStartIndexing = useCallback( async (refreshConnectors: () => void) => { @@ -738,6 +873,27 @@ export const useConnectorDialog = () => { }); } + // Track index with date range started event + trackIndexWithDateRangeStarted( + Number(searchSpaceId), + indexingConfig.connectorType, + indexingConfig.connectorId, + { + hasStartDate: !!startDate, + hasEndDate: !!endDate, + } + ); + + // Track periodic indexing started if enabled + if (periodicEnabled && indexingConfig.connectorType !== "GOOGLE_DRIVE_CONNECTOR") { + trackPeriodicIndexingStarted( + Number(searchSpaceId), + indexingConfig.connectorType, + indexingConfig.connectorId, + parseInt(frequencyMinutes, 10) + ); + } + toast.success(`${indexingConfig.connectorTitle} indexing started`, { description: periodicEnabled ? `Periodic sync enabled every ${getFrequencyLabel(frequencyMinutes)}.` @@ -804,6 +960,15 @@ export const useConnectorDialog = () => { return; } + // Track index with date range opened event + if (connector.is_indexable) { + trackIndexWithDateRangeOpened( + Number(searchSpaceId), + connector.connector_type, + connector.id + ); + } + setEditingConnector(connector); setConnectorName(connector.name); // Load existing periodic sync settings (disabled for Google Drive and non-indexable connectors) @@ -939,6 +1104,36 @@ export const useConnectorDialog = () => { indexingDescription = "Re-indexing started with new date range."; } + // Track indexing started if re-indexing was performed + if ( + editingConnector.is_indexable && + (indexingDescription.includes("Re-indexing") || indexingDescription.includes("indexing")) + ) { + trackIndexWithDateRangeStarted( + Number(searchSpaceId), + editingConnector.connector_type, + editingConnector.id, + { + hasStartDate: !!startDateStr, + hasEndDate: !!endDateStr, + } + ); + } + + // Track periodic indexing if enabled (for non-Google Drive connectors) + if ( + periodicEnabled && + editingConnector.is_indexable && + editingConnector.connector_type !== "GOOGLE_DRIVE_CONNECTOR" + ) { + trackPeriodicIndexingStarted( + Number(searchSpaceId), + editingConnector.connector_type, + editingConnector.id, + frequency || parseInt(frequencyMinutes, 10) + ); + } + toast.success(`${editingConnector.name} updated successfully`, { description: periodicEnabled ? `Periodic sync ${frequency ? `enabled every ${getFrequencyLabel(frequencyMinutes)}` : "enabled"}. ${indexingDescription}` @@ -991,6 +1186,13 @@ export const useConnectorDialog = () => { id: editingConnector.id, }); + // Track connector deleted event + trackConnectorDeleted( + Number(searchSpaceId), + editingConnector.connector_type, + editingConnector.id + ); + toast.success(`${editingConnector.name} disconnected successfully`); // Update URL - the effect will handle closing the modal and clearing state @@ -1017,9 +1219,14 @@ export const useConnectorDialog = () => { // Handle quick index (index without date picker, uses backend defaults) const handleQuickIndexConnector = useCallback( - async (connectorId: number) => { + async (connectorId: number, connectorType?: string) => { if (!searchSpaceId) return; + // Track quick index clicked event + if (connectorType) { + trackQuickIndexClicked(Number(searchSpaceId), connectorType, connectorId); + } + try { await indexConnector({ connector_id: connectorId, @@ -1081,6 +1288,7 @@ export const useConnectorDialog = () => { setConnectorName(null); setConnectorConfig(null); setConnectingConnectorType(null); + setViewingAccountsType(null); setStartDate(undefined); setEndDate(undefined); setPeriodicEnabled(false); @@ -1126,6 +1334,7 @@ export const useConnectorDialog = () => { frequencyMinutes, searchSpaceId, allConnectors, + viewingAccountsType, // Setters setSearchQuery, @@ -1152,6 +1361,8 @@ export const useConnectorDialog = () => { handleBackFromEdit, handleBackFromConnect, handleBackFromYouTube, + handleViewAccountsList, + handleBackFromAccountsList, handleQuickIndexConnector, connectorConfig, setConnectorConfig, diff --git a/surfsense_web/components/assistant-ui/connector-popup/tabs/active-connectors-tab.tsx b/surfsense_web/components/assistant-ui/connector-popup/tabs/active-connectors-tab.tsx index 3dd4fd1d0..7f1bd28f0 100644 --- a/surfsense_web/components/assistant-ui/connector-popup/tabs/active-connectors-tab.tsx +++ b/surfsense_web/components/assistant-ui/connector-popup/tabs/active-connectors-tab.tsx @@ -11,6 +11,7 @@ import { getConnectorIcon } from "@/contracts/enums/connectorIcons"; import type { SearchSourceConnector } from "@/contracts/types/connector.types"; import type { LogActiveTask, LogSummary } from "@/contracts/types/log.types"; import { cn } from "@/lib/utils"; +import { OAUTH_CONNECTORS } from "../constants/connector-constants"; import { getDocumentCountForConnector } from "../utils/connector-document-mapping"; interface ActiveConnectorsTabProps { @@ -24,6 +25,7 @@ interface ActiveConnectorsTabProps { searchSpaceId: string; onTabChange: (value: string) => void; onManage?: (connector: SearchSourceConnector) => void; + onViewAccountsList?: (connectorType: string, connectorTitle: string) => void; } export const ActiveConnectorsTab: FC = ({ @@ -36,6 +38,7 @@ export const ActiveConnectorsTab: FC = ({ searchSpaceId, onTabChange, onManage, + onViewAccountsList, }) => { const router = useRouter(); @@ -71,38 +74,26 @@ export const ActiveConnectorsTab: FC = ({ const minutesAgo = differenceInMinutes(now, date); const daysAgo = differenceInDays(now, date); - // Just now (within last minute) - if (minutesAgo < 1) { - return "Just now"; - } - - // X minutes ago (less than 1 hour) - if (minutesAgo < 60) { - return `${minutesAgo} ${minutesAgo === 1 ? "minute" : "minutes"} ago`; - } - - // Today at [time] - if (isToday(date)) { - return `Today at ${format(date, "h:mm a")}`; - } - - // Yesterday at [time] - if (isYesterday(date)) { - return `Yesterday at ${format(date, "h:mm a")}`; - } - - // X days ago (less than 7 days) - if (daysAgo < 7) { - return `${daysAgo} ${daysAgo === 1 ? "day" : "days"} ago`; - } - - // Full date for older entries + if (minutesAgo < 1) return "Just now"; + if (minutesAgo < 60) return `${minutesAgo} ${minutesAgo === 1 ? "minute" : "minutes"} ago`; + if (isToday(date)) return `Today at ${format(date, "h:mm a")}`; + if (isYesterday(date)) return `Yesterday at ${format(date, "h:mm a")}`; + if (daysAgo < 7) return `${daysAgo} ${daysAgo === 1 ? "day" : "days"} ago`; return format(date, "MMM d, yyyy"); }; - // Document types that should be shown as cards (not from connectors) - // These are: EXTENSION (browser extension), FILE (uploaded files), NOTE (editor notes), - // YOUTUBE_VIDEO (YouTube videos), and CRAWLED_URL (web pages - shown separately even though it can come from WEBCRAWLER_CONNECTOR) + // Get most recent last indexed date from a list of connectors + const getMostRecentLastIndexed = ( + connectorsList: SearchSourceConnector[] + ): string | undefined => { + return connectorsList.reduce((latest, c) => { + if (!c.last_indexed_at) return latest; + if (!latest) return c.last_indexed_at; + return new Date(c.last_indexed_at) > new Date(latest) ? c.last_indexed_at : latest; + }, undefined); + }; + + // Document types that should be shown as standalone cards (not from connectors) const standaloneDocumentTypes = ["EXTENSION", "FILE", "NOTE", "YOUTUBE_VIDEO", "CRAWLED_URL"]; // Filter to only show standalone document types that have documents (count > 0) @@ -118,8 +109,54 @@ export const ActiveConnectorsTab: FC = ({ return doc.label.toLowerCase().includes(searchQuery.toLowerCase()); }); - // Filter connectors based on search query - const filteredConnectors = connectors.filter((connector) => { + // Get OAuth connector types set for quick lookup + const oauthConnectorTypes = new Set(OAUTH_CONNECTORS.map((c) => c.connectorType)); + + // Separate OAuth and non-OAuth connectors + const oauthConnectors = connectors.filter((c) => oauthConnectorTypes.has(c.connector_type)); + const nonOauthConnectors = connectors.filter((c) => !oauthConnectorTypes.has(c.connector_type)); + + // Group OAuth connectors by type + const oauthConnectorsByType = oauthConnectors.reduce( + (acc, connector) => { + const type = connector.connector_type; + if (!acc[type]) { + acc[type] = []; + } + acc[type].push(connector); + return acc; + }, + {} as Record + ); + + // Get display info for OAuth connector type + const getOAuthConnectorTypeInfo = (connectorType: string) => { + const oauthConnector = OAUTH_CONNECTORS.find((c) => c.connectorType === connectorType); + return { + title: + oauthConnector?.title || + connectorType + .replace(/_/g, " ") + .replace(/connector/gi, "") + .trim(), + }; + }; + + // Filter OAuth connector types based on search query + const filteredOAuthConnectorTypes = Object.entries(oauthConnectorsByType).filter( + ([connectorType]) => { + if (!searchQuery) return true; + const searchLower = searchQuery.toLowerCase(); + const { title } = getOAuthConnectorTypeInfo(connectorType); + return ( + title.toLowerCase().includes(searchLower) || + connectorType.toLowerCase().includes(searchLower) + ); + } + ); + + // Filter non-OAuth connectors based on search query + const filteredNonOAuthConnectors = nonOauthConnectors.filter((connector) => { if (!searchQuery) return true; const searchLower = searchQuery.toLowerCase(); return ( @@ -128,18 +165,97 @@ export const ActiveConnectorsTab: FC = ({ ); }); + const hasActiveConnectors = + filteredOAuthConnectorTypes.length > 0 || filteredNonOAuthConnectors.length > 0; + return ( {hasSources ? (
{/* Active Connectors Section */} - {filteredConnectors.length > 0 && ( + {hasActiveConnectors && (

Active Connectors

- {filteredConnectors.map((connector) => { + {/* OAuth Connectors - Grouped by Type */} + {filteredOAuthConnectorTypes.map(([connectorType, typeConnectors]) => { + const { title } = getOAuthConnectorTypeInfo(connectorType); + const isAnyIndexing = typeConnectors.some((c: SearchSourceConnector) => + indexingConnectorIds.has(c.id) + ); + const documentCount = getDocumentCountForConnector( + connectorType, + documentTypeCounts + ); + const accountCount = typeConnectors.length; + const mostRecentLastIndexed = getMostRecentLastIndexed(typeConnectors); + + const handleManageClick = () => { + if (onViewAccountsList) { + onViewAccountsList(connectorType, title); + } else if (onManage && typeConnectors[0]) { + onManage(typeConnectors[0]); + } + }; + + return ( +
+
+ {getConnectorIcon(connectorType, "size-6")} +
+
+

{title}

+ {isAnyIndexing ? ( +

+ + Indexing... +

+ ) : ( +

+ {mostRecentLastIndexed + ? `Last indexed: ${formatLastIndexedDate(mostRecentLastIndexed)}` + : "Never indexed"} +

+ )} +

+ {formatDocumentCount(documentCount)} + + + {accountCount} {accountCount === 1 ? "Account" : "Accounts"} + +

+
+ +
+ ); + })} + + {/* Non-OAuth Connectors - Individual Cards */} + {filteredNonOAuthConnectors.map((connector) => { const isIndexing = indexingConnectorIds.has(connector.id); const activeTask = logsSummary?.active_tasks?.find( (task: LogActiveTask) => task.connector_id === connector.id @@ -161,7 +277,7 @@ export const ActiveConnectorsTab: FC = ({ >
= ({ +
+
+ {getConnectorIcon(connectorType, "size-5")} +
+
+

{connectorTitle} Accounts

+

+ {typeConnectors.length} connected account{typeConnectors.length !== 1 ? "s" : ""} +

+
+
+
+ {/* Add Account Button with dashed border */} + +
+
+ + {/* Content */} +
+ {/* Connected Accounts Grid */} +
+ {typeConnectors.map((connector) => { + const isIndexing = indexingConnectorIds.has(connector.id); + const activeTask = logsSummary?.active_tasks?.find( + (task: LogActiveTask) => task.connector_id === connector.id + ); + + return ( +
+
+ {getConnectorIcon(connector.connector_type, "size-6")} +
+
+

+ {getConnectorDisplayName(connector.name)} +

+ {isIndexing ? ( +

+ + Indexing... + {activeTask?.message && ( + + • {activeTask.message} + + )} +

+ ) : ( +

+ {connector.last_indexed_at + ? `Last indexed: ${formatLastIndexedDate(connector.last_indexed_at)}` + : "Never indexed"} +

+ )} +
+ +
+ ); + })} +
+
+
+ ); +}; diff --git a/surfsense_web/components/assistant-ui/document-upload-popup.tsx b/surfsense_web/components/assistant-ui/document-upload-popup.tsx index da3b820e5..6ac1ec979 100644 --- a/surfsense_web/components/assistant-ui/document-upload-popup.tsx +++ b/surfsense_web/components/assistant-ui/document-upload-popup.tsx @@ -1,7 +1,7 @@ "use client"; -import { Upload } from "lucide-react"; import { useAtomValue } from "jotai"; +import { Upload } from "lucide-react"; import { useRouter } from "next/navigation"; import { createContext, diff --git a/surfsense_web/components/copy-button.tsx b/surfsense_web/components/copy-button.tsx deleted file mode 100644 index c1a752997..000000000 --- a/surfsense_web/components/copy-button.tsx +++ /dev/null @@ -1,38 +0,0 @@ -"use client"; -import { Copy, CopyCheck } from "lucide-react"; -import type { RefObject } from "react"; -import { useEffect, useRef, useState } from "react"; -import { Button } from "./ui/button"; - -export default function CopyButton({ ref }: { ref: RefObject }) { - const [copy, setCopy] = useState(false); - const timeoutRef = useRef(null); - - useEffect(() => { - return () => { - if (timeoutRef.current) { - clearTimeout(timeoutRef.current); - } - }; - }, []); - - const handleClick = () => { - if (ref.current) { - const text = ref.current.innerText; - navigator.clipboard.writeText(text); - - setCopy(true); - timeoutRef.current = setTimeout(() => { - setCopy(false); - }, 2000); - } - }; - - return ( -
- -
- ); -} diff --git a/surfsense_web/components/editConnector/EditConnectorLoadingSkeleton.tsx b/surfsense_web/components/editConnector/EditConnectorLoadingSkeleton.tsx deleted file mode 100644 index 4b9965632..000000000 --- a/surfsense_web/components/editConnector/EditConnectorLoadingSkeleton.tsx +++ /dev/null @@ -1,22 +0,0 @@ -"use client"; - -import { Card, CardContent, CardHeader } from "@/components/ui/card"; -import { Skeleton } from "@/components/ui/skeleton"; - -export function EditConnectorLoadingSkeleton() { - return ( -
- - - - - - - - - - - -
- ); -} diff --git a/surfsense_web/components/editConnector/EditConnectorNameForm.tsx b/surfsense_web/components/editConnector/EditConnectorNameForm.tsx deleted file mode 100644 index 0dae174db..000000000 --- a/surfsense_web/components/editConnector/EditConnectorNameForm.tsx +++ /dev/null @@ -1,28 +0,0 @@ -"use client"; - -import type { Control } from "react-hook-form"; -import { FormControl, FormField, FormItem, FormLabel, FormMessage } from "@/components/ui/form"; -import { Input } from "@/components/ui/input"; - -// Assuming EditConnectorFormValues is defined elsewhere or passed as generic -interface EditConnectorNameFormProps { - control: Control; // Use Control if type is available -} - -export function EditConnectorNameForm({ control }: EditConnectorNameFormProps) { - return ( - ( - - Connector Name - - - - - - )} - /> - ); -} diff --git a/surfsense_web/components/editConnector/EditGitHubConnectorConfig.tsx b/surfsense_web/components/editConnector/EditGitHubConnectorConfig.tsx deleted file mode 100644 index aa3eb1404..000000000 --- a/surfsense_web/components/editConnector/EditGitHubConnectorConfig.tsx +++ /dev/null @@ -1,189 +0,0 @@ -import { CircleAlert, Edit, KeyRound, Loader2 } from "lucide-react"; -import type React from "react"; -import type { UseFormReturn } from "react-hook-form"; -import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert"; -import { Button } from "@/components/ui/button"; -import { Checkbox } from "@/components/ui/checkbox"; -import { - FormControl, - FormDescription, - FormField, - FormItem, - FormLabel, - FormMessage, -} from "@/components/ui/form"; -import { Input } from "@/components/ui/input"; -import { Skeleton } from "@/components/ui/skeleton"; - -// Types needed from parent -interface GithubRepo { - id: number; - name: string; - full_name: string; - private: boolean; - url: string; - description: string | null; - last_updated: string | null; -} -type GithubPatFormValues = { github_pat: string }; -type EditMode = "viewing" | "editing_repos"; - -interface EditGitHubConnectorConfigProps { - // State from parent - editMode: EditMode; - originalPat: string; - currentSelectedRepos: string[]; - fetchedRepos: GithubRepo[] | null; - newSelectedRepos: string[]; - isFetchingRepos: boolean; - // Forms from parent - patForm: UseFormReturn; - // Handlers from parent - setEditMode: (mode: EditMode) => void; - handleFetchRepositories: (values: GithubPatFormValues) => Promise; - handleRepoSelectionChange: (repoFullName: string, checked: boolean) => void; - setNewSelectedRepos: React.Dispatch>; - setFetchedRepos: React.Dispatch>; -} - -export function EditGitHubConnectorConfig({ - editMode, - originalPat, - currentSelectedRepos, - fetchedRepos, - newSelectedRepos, - isFetchingRepos, - patForm, - setEditMode, - handleFetchRepositories, - handleRepoSelectionChange, - setNewSelectedRepos, - setFetchedRepos, -}: EditGitHubConnectorConfigProps) { - return ( -
-

Repository Selection & Access

- - {/* Viewing Mode */} - {editMode === "viewing" && ( -
- Currently Indexed Repositories: - {currentSelectedRepos.length > 0 ? ( -
    - {currentSelectedRepos.map((repo) => ( -
  • {repo}
  • - ))} -
- ) : ( -

(No repositories currently selected)

- )} - - - To change repo selections or update the PAT, click above. - -
- )} - - {/* Editing Mode */} - {editMode === "editing_repos" && ( -
- {/* PAT Input */} -
- ( - - - GitHub PAT - - - - - - Enter PAT to fetch/update repos or if you need to update the stored token. - - - - )} - /> - -
- - {/* Repo List */} - {isFetchingRepos && } - {!isFetchingRepos && - fetchedRepos !== null && - (fetchedRepos.length === 0 ? ( - - - No Repositories Found - Check PAT & permissions. - - ) : ( -
- - Select Repositories to Index ({newSelectedRepos.length} selected): - -
- {fetchedRepos.map((repo) => ( -
- - handleRepoSelectionChange(repo.full_name, !!checked) - } - /> - -
- ))} -
-
- ))} - -
- )} -
- ); -} diff --git a/surfsense_web/components/editConnector/EditSimpleTokenForm.tsx b/surfsense_web/components/editConnector/EditSimpleTokenForm.tsx deleted file mode 100644 index 4ad654045..000000000 --- a/surfsense_web/components/editConnector/EditSimpleTokenForm.tsx +++ /dev/null @@ -1,49 +0,0 @@ -"use client"; - -import { KeyRound } from "lucide-react"; -import type { Control } from "react-hook-form"; -import { - FormControl, - FormDescription, - FormField, - FormItem, - FormLabel, - FormMessage, -} from "@/components/ui/form"; -import { Input } from "@/components/ui/input"; - -// Assuming EditConnectorFormValues is defined elsewhere or passed as generic -interface EditSimpleTokenFormProps { - control: Control; - fieldName: string; // e.g., "SLACK_BOT_TOKEN" - fieldLabel: string; // e.g., "Slack Bot Token" - fieldDescription: string; - placeholder?: string; -} - -export function EditSimpleTokenForm({ - control, - fieldName, - fieldLabel, - fieldDescription, - placeholder, -}: EditSimpleTokenFormProps) { - return ( - ( - - - {fieldLabel} - - - - - {fieldDescription} - - - )} - /> - ); -} diff --git a/surfsense_web/components/editConnector/types.ts b/surfsense_web/components/editConnector/types.ts deleted file mode 100644 index 43fab23e0..000000000 --- a/surfsense_web/components/editConnector/types.ts +++ /dev/null @@ -1,59 +0,0 @@ -import * as z from "zod"; - -// Types -export interface GithubRepo { - id: number; - name: string; - full_name: string; - private: boolean; - url: string; - description: string | null; - last_updated: string | null; -} - -export type EditMode = "viewing" | "editing_repos"; - -// Schemas -export const githubPatSchema = z.object({ - github_pat: z - .string() - .min(20, { message: "GitHub Personal Access Token seems too short." }) - .refine((pat) => pat.startsWith("ghp_") || pat.startsWith("github_pat_"), { - message: "GitHub PAT should start with 'ghp_' or 'github_pat_'", - }), -}); -export type GithubPatFormValues = z.infer; - -export const editConnectorSchema = z.object({ - name: z.string().min(3, { message: "Connector name must be at least 3 characters." }), - SLACK_BOT_TOKEN: z.string().optional(), - NOTION_INTEGRATION_TOKEN: z.string().optional(), - TAVILY_API_KEY: z.string().optional(), - SEARXNG_HOST: z.string().optional(), - SEARXNG_API_KEY: z.string().optional(), - SEARXNG_ENGINES: z.string().optional(), - SEARXNG_CATEGORIES: z.string().optional(), - SEARXNG_LANGUAGE: z.string().optional(), - SEARXNG_SAFESEARCH: z.string().optional(), - SEARXNG_VERIFY_SSL: z.string().optional(), - LINKUP_API_KEY: z.string().optional(), - DISCORD_BOT_TOKEN: z.string().optional(), - CONFLUENCE_BASE_URL: z.string().optional(), - CONFLUENCE_EMAIL: z.string().optional(), - CONFLUENCE_API_TOKEN: z.string().optional(), - BOOKSTACK_BASE_URL: z.string().optional(), - BOOKSTACK_TOKEN_ID: z.string().optional(), - BOOKSTACK_TOKEN_SECRET: z.string().optional(), - JIRA_BASE_URL: z.string().optional(), - JIRA_EMAIL: z.string().optional(), - JIRA_API_TOKEN: z.string().optional(), - GOOGLE_CALENDAR_CLIENT_ID: z.string().optional(), - GOOGLE_CALENDAR_CLIENT_SECRET: z.string().optional(), - GOOGLE_CALENDAR_REFRESH_TOKEN: z.string().optional(), - GOOGLE_CALENDAR_CALENDAR_IDS: z.string().optional(), - LUMA_API_KEY: z.string().optional(), - ELASTICSEARCH_API_KEY: z.string().optional(), - FIRECRAWL_API_KEY: z.string().optional(), - INITIAL_URLS: z.string().optional(), -}); -export type EditConnectorFormValues = z.infer; diff --git a/surfsense_web/components/homepage/hero-section.tsx b/surfsense_web/components/homepage/hero-section.tsx index db7525881..a9cfdeba2 100644 --- a/surfsense_web/components/homepage/hero-section.tsx +++ b/surfsense_web/components/homepage/hero-section.tsx @@ -4,8 +4,31 @@ import Image from "next/image"; import Link from "next/link"; import React, { useEffect, useRef, useState } from "react"; import Balancer from "react-wrap-balancer"; +import { trackLoginAttempt } from "@/lib/posthog/events"; import { cn } from "@/lib/utils"; +// Official Google "G" logo with brand colors +const GoogleLogo = ({ className }: { className?: string }) => ( + + + + + + +); + export function HeroSection() { const containerRef = useRef(null); const parentRef = useRef(null); @@ -60,7 +83,7 @@ export function HeroSection() {

The AI Workspace{" "} -
+
Built for Teams
@@ -73,12 +96,7 @@ export function HeroSection() { your team.

- - Get Started - + {/* { + trackLoginAttempt("google"); + window.location.href = `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/auth/google/authorize-redirect`; + }; + + if (isGoogleAuth) { + return ( + + {/* Animated gradient background on hover */} + + {/* Google logo with subtle animation */} + + + + Continue with Google + + ); + } + + return ( + + + Get Started + + + ); +} + const BackgroundGrids = () => { return (
@@ -126,7 +203,7 @@ const BackgroundGrids = () => {
-
+
@@ -237,7 +314,7 @@ const CollisionMechanism = React.forwardRef< repeatDelay: beamOptions.repeatDelay || 0, }} className={cn( - "absolute left-96 top-20 m-auto h-14 w-px rounded-full bg-gradient-to-t from-orange-500 via-yellow-500 to-transparent", + "absolute left-96 top-20 m-auto h-14 w-px rounded-full bg-linear-to-t from-orange-500 via-yellow-500 to-transparent", beamOptions.className )} /> @@ -276,7 +353,7 @@ const Explosion = ({ ...props }: React.HTMLProps) => { animate={{ opacity: [0, 1, 0] }} exit={{ opacity: 0 }} transition={{ duration: 1, ease: "easeOut" }} - className="absolute -inset-x-10 top-0 m-auto h-[4px] w-10 rounded-full bg-gradient-to-r from-transparent via-orange-500 to-transparent blur-sm" + className="absolute -inset-x-10 top-0 m-auto h-[4px] w-10 rounded-full bg-linear-to-r from-transparent via-orange-500 to-transparent blur-sm" > {spans.map((span) => ( ) => { initial={{ x: span.initialX, y: span.initialY, opacity: 1 }} animate={{ x: span.directionX, y: span.directionY, opacity: 0 }} transition={{ duration: Math.random() * 1.5 + 0.5, ease: "easeOut" }} - className="absolute h-1 w-1 rounded-full bg-gradient-to-b from-orange-500 to-yellow-500" + className="absolute h-1 w-1 rounded-full bg-linear-to-b from-orange-500 to-yellow-500" /> ))}
@@ -307,11 +384,11 @@ const GridLineVertical = ({ className, offset }: { className?: string; offset?: } as React.CSSProperties } className={cn( - "absolute top-[calc(var(--offset)/2*-1)] h-[calc(100%+var(--offset))] w-[var(--width)]", + "absolute top-[calc(var(--offset)/2*-1)] h-[calc(100%+var(--offset))] w-(--width)", "bg-[linear-gradient(to_bottom,var(--color),var(--color)_50%,transparent_0,transparent)]", - "[background-size:var(--width)_var(--height)]", - "[mask:linear-gradient(to_top,var(--background)_var(--fade-stop),transparent),_linear-gradient(to_bottom,var(--background)_var(--fade-stop),transparent),_linear-gradient(black,black)]", - "[mask-composite:exclude]", + "bg-size-[var(--width)_var(--height)]", + "[mask:linear-gradient(to_top,var(--background)_var(--fade-stop),transparent),linear-gradient(to_bottom,var(--background)_var(--fade-stop),transparent),linear-gradient(black,black)]", + "mask-exclude", "z-30", "dark:bg-[linear-gradient(to_bottom,var(--color-dark),var(--color-dark)_50%,transparent_0,transparent)]", className diff --git a/surfsense_web/components/homepage/navbar.tsx b/surfsense_web/components/homepage/navbar.tsx index 0b060e548..4d71b0041 100644 --- a/surfsense_web/components/homepage/navbar.tsx +++ b/surfsense_web/components/homepage/navbar.tsx @@ -54,7 +54,7 @@ const DesktopNav = ({ navItems, isScrolled }: any) => { : "bg-transparent border border-transparent" )} > -
+
SurfSense
diff --git a/surfsense_web/components/markdown-viewer.tsx b/surfsense_web/components/markdown-viewer.tsx index 407adba7a..93e3f26e1 100644 --- a/surfsense_web/components/markdown-viewer.tsx +++ b/surfsense_web/components/markdown-viewer.tsx @@ -1,5 +1,5 @@ import Image from "next/image"; -import { type StreamdownProps, Streamdown } from "streamdown"; +import { Streamdown, type StreamdownProps } from "streamdown"; import { cn } from "@/lib/utils"; interface MarkdownViewerProps { diff --git a/surfsense_web/components/onboarding-tour.tsx b/surfsense_web/components/onboarding-tour.tsx index 0fc43160a..958bb43b0 100644 --- a/surfsense_web/components/onboarding-tour.tsx +++ b/surfsense_web/components/onboarding-tour.tsx @@ -1,15 +1,15 @@ "use client"; -import { useAtomValue } from "jotai"; import { useQuery } from "@tanstack/react-query"; +import { useAtomValue } from "jotai"; import { usePathname } from "next/navigation"; import { useTheme } from "next-themes"; import { useCallback, useEffect, useRef, useState } from "react"; import { createPortal } from "react-dom"; -import { currentUserAtom } from "@/atoms/user/user-query.atoms"; -import { activeSearchSpaceIdAtom } from "@/atoms/search-spaces/search-space-query.atoms"; -import { documentTypeCountsAtom } from "@/atoms/documents/document-query.atoms"; import { connectorsAtom } from "@/atoms/connectors/connector-query.atoms"; +import { documentTypeCountsAtom } from "@/atoms/documents/document-query.atoms"; +import { activeSearchSpaceIdAtom } from "@/atoms/search-spaces/search-space-query.atoms"; +import { currentUserAtom } from "@/atoms/user/user-query.atoms"; import { fetchThreads } from "@/lib/chat/thread-persistence"; interface TourStep { diff --git a/surfsense_web/components/settings/llm-role-manager.tsx b/surfsense_web/components/settings/llm-role-manager.tsx index 1bf7a3629..ba4c4970c 100644 --- a/surfsense_web/components/settings/llm-role-manager.tsx +++ b/surfsense_web/components/settings/llm-role-manager.tsx @@ -45,7 +45,7 @@ const ROLE_DESCRIPTIONS = { document_summary: { icon: FileText, title: "Document Summary LLM", - description: "Handles document summarization, long context analysis, and query reformulation", + description: "Handles document summarization", color: "bg-purple-100 text-purple-800 border-purple-200", examples: "Document analysis, podcasts, research synthesis", characteristics: ["Large context window", "Deep reasoning", "Summarization"], @@ -74,7 +74,6 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) { data: preferences = {}, isFetching: preferencesLoading, error: preferencesError, - refetch: refreshPreferences, } = useAtomValue(llmPreferencesAtom); const { mutateAsync: updatePreferences } = useAtomValue(updateLLMPreferencesMutationAtom); @@ -187,19 +186,6 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) { Refresh Configs Configs -
diff --git a/surfsense_web/components/sidebar/app-sidebar.tsx b/surfsense_web/components/sidebar/app-sidebar.tsx index 8030cb9d2..97d7fa9dd 100644 --- a/surfsense_web/components/sidebar/app-sidebar.tsx +++ b/surfsense_web/components/sidebar/app-sidebar.tsx @@ -160,7 +160,7 @@ const defaultData = { user: { name: "Surf", email: "m@example.com", - avatar: "/icon-128.png", + avatar: "/icon-128.svg", }, navMain: [ { diff --git a/surfsense_web/components/sources/types.ts b/surfsense_web/components/sources/types.ts deleted file mode 100644 index 230af7503..000000000 --- a/surfsense_web/components/sources/types.ts +++ /dev/null @@ -1,13 +0,0 @@ -export interface Connector { - id: string; - title: string; - description: string; - icon: React.ReactNode; - status: "available" | "coming-soon" | "connected"; -} - -export interface ConnectorCategory { - id: string; - title: string; - connectors: Connector[]; -} diff --git a/surfsense_web/components/tool-ui/shared/action-buttons.tsx b/surfsense_web/components/tool-ui/shared/action-buttons.tsx deleted file mode 100644 index 4ed280559..000000000 --- a/surfsense_web/components/tool-ui/shared/action-buttons.tsx +++ /dev/null @@ -1,41 +0,0 @@ -"use client"; - -import type { FC } from "react"; -import { Button } from "@/components/ui/button"; -import type { Action, ActionsConfig } from "./schema"; - -interface ActionButtonsProps { - actions?: Action[] | ActionsConfig; - onAction?: (actionId: string) => void; - disabled?: boolean; -} - -export const ActionButtons: FC = ({ actions, onAction, disabled }) => { - if (!actions) return null; - - // Normalize actions to array format - const actionArray: Action[] = Array.isArray(actions) - ? actions - : ([ - actions.confirm && { ...actions.confirm, id: "confirm" }, - actions.cancel && { ...actions.cancel, id: "cancel" }, - ].filter(Boolean) as Action[]); - - if (actionArray.length === 0) return null; - - return ( -
- {actionArray.map((action) => ( - - ))} -
- ); -}; diff --git a/surfsense_web/components/tool-ui/shared/index.ts b/surfsense_web/components/tool-ui/shared/index.ts deleted file mode 100644 index 23f5a27dd..000000000 --- a/surfsense_web/components/tool-ui/shared/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from "./action-buttons"; -export * from "./schema"; diff --git a/surfsense_web/components/tool-ui/shared/schema.ts b/surfsense_web/components/tool-ui/shared/schema.ts deleted file mode 100644 index 8076a8e45..000000000 --- a/surfsense_web/components/tool-ui/shared/schema.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { z } from "zod"; - -/** - * Shared action schema for tool UI components - */ -export const ActionSchema = z.object({ - id: z.string(), - label: z.string(), - variant: z.enum(["default", "secondary", "destructive", "outline", "ghost", "link"]).optional(), - disabled: z.boolean().optional(), -}); - -export type Action = z.infer; - -/** - * Actions configuration schema - */ -export const ActionsConfigSchema = z.object({ - confirm: ActionSchema.optional(), - cancel: ActionSchema.optional(), -}); - -export type ActionsConfig = z.infer; diff --git a/surfsense_web/content/docs/connectors/airtable.mdx b/surfsense_web/content/docs/connectors/airtable.mdx index 1fbe427ec..366a6e8e5 100644 --- a/surfsense_web/content/docs/connectors/airtable.mdx +++ b/surfsense_web/content/docs/connectors/airtable.mdx @@ -3,4 +3,99 @@ title: Airtable description: Connect your Airtable bases to SurfSense --- -# Documentation in progress +# Airtable OAuth Integration Setup Guide + +This guide walks you through setting up an Airtable OAuth integration for SurfSense. + +## Step 1: Access Airtable OAuth Integrations + +1. Navigate to [airtable.com/create/oauth](https://airtable.com/create/oauth) +2. In the **Builder Hub**, under **Developers**, click **"OAuth integrations"** +3. Click **"Register an OAuth integration"** + +![Airtable OAuth Integrations Page](/docs/connectors/airtable/airtable-oauth-integrations.png) + +## Step 2: Register an Integration + +Fill in the basic integration details: + +| Field | Value | +|-------|-------| +| **Name** | `SurfSense` | +| **OAuth redirect URL** | `http://localhost:8000/api/v1/auth/airtable/connector/callback` | + +Click **"Register integration"** + +![Register Integration Form](/docs/connectors/airtable/airtable-register-integration.png) + +## Step 3: Configure Scopes + +After registration, configure the required scopes (permissions) for your integration: + +### Record data and comments + +| Scope | Description | +|-------|-------------| +| ✅ `data.recordComments:read` | See comments in records | +| ✅ `data.records:read` | See the data in records | + +### Base schema + +| Scope | Description | +|-------|-------------| +| ✅ `schema.bases:read` | See the structure of a base, like table names or field types | + +### User metadata + +| Scope | Description | +|-------|-------------| +| ✅ `user.email:read` | See the user's email address | + +![Scopes Configuration](/docs/connectors/airtable/airtable-scopes.png) + +## Step 4: Configure Support Information + +Scroll down to configure the support information and authorization preview: + +| Field | Value | +|-------|-------| +| **Support email** | Your support email address | +| **Privacy policy URL** | Your privacy policy URL | +| **Terms of service URL** | Your terms of service URL | + +The preview shows what users will see when authorizing SurfSense: +- The data in your records +- Comments in your records +- The structure of your base, like table names or field types +- Your email address + +Click **"Save changes"** + +![Support Information & Preview](/docs/connectors/airtable/airtable-support-info.png) + +## Step 5: Get OAuth Credentials + +After saving, you'll find your OAuth credentials on the integration page: + +1. Copy your **Client ID** +2. Copy your **Client Secret** + +> ⚠️ Never share your client secret publicly. + +--- + +## Running SurfSense with Airtable Connector + +Add the Airtable environment variables to your Docker run command: + +```bash +docker run -d -p 3000:3000 -p 8000:8000 \ + -v surfsense-data:/data \ + # Airtable Connector + -e AIRTABLE_CLIENT_ID=your_airtable_client_id \ + -e AIRTABLE_CLIENT_SECRET=your_airtable_client_secret \ + -e AIRTABLE_REDIRECT_URI=http://localhost:8000/api/v1/auth/airtable/connector/callback \ + --name surfsense \ + --restart unless-stopped \ + ghcr.io/modsetter/surfsense:latest +``` \ No newline at end of file diff --git a/surfsense_web/content/docs/connectors/clickup.mdx b/surfsense_web/content/docs/connectors/clickup.mdx index f59030788..1b732c968 100644 --- a/surfsense_web/content/docs/connectors/clickup.mdx +++ b/surfsense_web/content/docs/connectors/clickup.mdx @@ -3,4 +3,55 @@ title: ClickUp description: Connect your ClickUp workspace to SurfSense --- -# Documentation in progress \ No newline at end of file +# ClickUp OAuth Integration Setup Guide + +This guide walks you through setting up a ClickUp OAuth integration for SurfSense. + +## Step 1: Access ClickUp API Settings + +1. Open your ClickUp workspace +2. Navigate to **Settings** (gear icon) → **ClickUp API** +3. You'll see the **ClickUp API Settings** page + +![ClickUp API Settings Page](/docs/connectors/clickup/clickup-api-settings.png) + +## Step 2: Create an App + +1. Click **"+ Create an App"** in the top-right corner +2. Fill in the app details: + +| Field | Value | +|-------|-------| +| **App Name** | `SurfSense` | +| **Redirect URL(s)** | `localhost:8000` | + +3. Click **"Save"** to create the app + +![App Created with Credentials](/docs/connectors/clickup/clickup-app-credentials.png) + +## Step 3: Get OAuth Credentials + +After creating the app, you'll see your credentials: + +1. Copy your **Client ID** +2. Copy your **Client Secret** (click "Show" to reveal, or "Regenerate" if needed) + +> ⚠️ Never share your client secret publicly. + +--- + +## Running SurfSense with ClickUp Connector + +Add the ClickUp environment variables to your Docker run command: + +```bash +docker run -d -p 3000:3000 -p 8000:8000 \ + -v surfsense-data:/data \ + # ClickUp Connector + -e CLICKUP_CLIENT_ID=your_clickup_client_id \ + -e CLICKUP_CLIENT_SECRET=your_clickup_client_secret \ + -e CLICKUP_REDIRECT_URI=http://localhost:8000/api/v1/auth/clickup/connector/callback \ + --name surfsense \ + --restart unless-stopped \ + ghcr.io/modsetter/surfsense:latest +``` \ No newline at end of file diff --git a/surfsense_web/content/docs/connectors/confluence.mdx b/surfsense_web/content/docs/connectors/confluence.mdx index aa220fcbe..fad9f3e3d 100644 --- a/surfsense_web/content/docs/connectors/confluence.mdx +++ b/surfsense_web/content/docs/connectors/confluence.mdx @@ -85,7 +85,7 @@ Select the **"Granular scopes"** tab and enable: 1. In the left sidebar, click **"Settings"** 2. Copy your **Client ID** and **Client Secret** -> ⚠️ Never share your client secret publicly or include it in code repositories. +> ⚠️ Never share your client secret publicly. --- diff --git a/surfsense_web/content/docs/connectors/gmail.mdx b/surfsense_web/content/docs/connectors/gmail.mdx index 6c08804fc..434e6ae4d 100644 --- a/surfsense_web/content/docs/connectors/gmail.mdx +++ b/surfsense_web/content/docs/connectors/gmail.mdx @@ -60,7 +60,7 @@ This guide walks you through setting up a Google OAuth 2.0 integration for SurfS 1. After creating the OAuth client, you'll see a dialog with your credentials 2. Copy your **Client ID** and **Client Secret** -> ⚠️ Never share your client secret publicly or include it in code repositories. +> ⚠️ Never share your client secret publicly. ![Google Developer Console Config](/docs/connectors/google/google_oauth_config.png) diff --git a/surfsense_web/content/docs/connectors/google-calendar.mdx b/surfsense_web/content/docs/connectors/google-calendar.mdx index e6ae4d593..cc1eae545 100644 --- a/surfsense_web/content/docs/connectors/google-calendar.mdx +++ b/surfsense_web/content/docs/connectors/google-calendar.mdx @@ -59,7 +59,7 @@ This guide walks you through setting up a Google OAuth 2.0 integration for SurfS 1. After creating the OAuth client, you'll see a dialog with your credentials 2. Copy your **Client ID** and **Client Secret** -> ⚠️ Never share your client secret publicly or include it in code repositories. +> ⚠️ Never share your client secret publicly. ![Google Developer Console Config](/docs/connectors/google/google_oauth_config.png) diff --git a/surfsense_web/content/docs/connectors/google-drive.mdx b/surfsense_web/content/docs/connectors/google-drive.mdx index f2b0105fc..00ea2f610 100644 --- a/surfsense_web/content/docs/connectors/google-drive.mdx +++ b/surfsense_web/content/docs/connectors/google-drive.mdx @@ -60,7 +60,7 @@ This guide walks you through setting up a Google OAuth 2.0 integration for SurfS 1. After creating the OAuth client, you'll see a dialog with your credentials 2. Copy your **Client ID** and **Client Secret** -> ⚠️ Never share your client secret publicly or include it in code repositories. +> ⚠️ Never share your client secret publicly. ![Google Developer Console Config](/docs/connectors/google/google_oauth_config.png) diff --git a/surfsense_web/content/docs/connectors/jira.mdx b/surfsense_web/content/docs/connectors/jira.mdx index 9d00a56af..ebe639d6d 100644 --- a/surfsense_web/content/docs/connectors/jira.mdx +++ b/surfsense_web/content/docs/connectors/jira.mdx @@ -72,7 +72,7 @@ This guide walks you through setting up an Atlassian OAuth 2.0 (3LO) integration 1. In the left sidebar, click **"Settings"** 2. Copy your **Client ID** and **Client Secret** -> ⚠️ Never share your client secret publicly or include it in code repositories. +> ⚠️ Never share your client secret publicly. --- diff --git a/surfsense_web/content/docs/connectors/meta.json b/surfsense_web/content/docs/connectors/meta.json index 70635c6b3..9b416afdd 100644 --- a/surfsense_web/content/docs/connectors/meta.json +++ b/surfsense_web/content/docs/connectors/meta.json @@ -9,6 +9,7 @@ "discord", "jira", "linear", + "microsoft-teams", "confluence", "airtable", "clickup", @@ -20,4 +21,3 @@ ], "defaultOpen": true } - diff --git a/surfsense_web/content/docs/connectors/microsoft-teams.mdx b/surfsense_web/content/docs/connectors/microsoft-teams.mdx new file mode 100644 index 000000000..daa6eb375 --- /dev/null +++ b/surfsense_web/content/docs/connectors/microsoft-teams.mdx @@ -0,0 +1,101 @@ +--- +title: Microsoft Teams +description: Connect your Microsoft Teams to SurfSense +--- + +# Microsoft Teams OAuth Integration Setup Guide + +This guide walks you through setting up a Microsoft Teams OAuth integration for SurfSense using Azure App Registration. + +## Step 1: Access Azure App Registrations + +1. Navigate to [portal.azure.com](https://portal.azure.com) +2. In the search bar, type **"app reg"** +3. Select **"App registrations"** from the Services results + +![Azure Portal Search](/docs/connectors/microsoft-teams/azure-search-app-reg.png) + +## Step 2: Create New Registration + +1. On the **App registrations** page, click **"+ New registration"** + +![App Registrations Page](/docs/connectors/microsoft-teams/azure-app-registrations.png) + +## Step 3: Register the Application + +Fill in the application details: + +| Field | Value | +|-------|-------| +| **Name** | `SurfSense` | +| **Supported account types** | Select **"Accounts in any organizational directory (Any Microsoft Entra ID tenant - Multitenant) and personal Microsoft accounts"** | +| **Redirect URI** | Platform: `Web`, URI: `http://localhost:8000/api/v1/auth/teams/connector/callback` | + +Click **"Register"** + +![Register Application Form](/docs/connectors/microsoft-teams/azure-register-app.png) + +## Step 4: Get Application (Client) ID + +After registration, you'll be taken to the app's **Overview** page. Here you'll find: + +1. Copy the **Application (client) ID** - this is your Client ID +2. Note the **Directory (tenant) ID** if needed + +![Application Overview](/docs/connectors/microsoft-teams/azure-app-overview.png) + +## Step 5: Create Client Secret + +1. In the left sidebar under **Manage**, click **"Certificates & secrets"** +2. Select the **"Client secrets"** tab +3. Click **"+ New client secret"** +4. Enter a description (e.g., `SurfSense`) and select an expiration period +5. Click **"Add"** + +![Certificates & Secrets - Empty](/docs/connectors/microsoft-teams/azure-certificates-empty.png) + +6. **Important**: Copy the secret **Value** immediately - it won't be shown again! + +![Certificates & Secrets - Created](/docs/connectors/microsoft-teams/azure-certificates-created.png) + +> ⚠️ Never share your client secret publicly or include it in code repositories. + +## Step 6: Configure API Permissions + +1. In the left sidebar under **Manage**, click **"API permissions"** +2. Click **"+ Add a permission"** +3. Select **"Microsoft Graph"** +4. Select **"Delegated permissions"** +5. Add the following permissions: + +| Permission | Type | Description | Admin Consent | +|------------|------|-------------|---------------| +| `Channel.ReadBasic.All` | Delegated | Read the names and descriptions of channels | No | +| `ChannelMessage.Read.All` | Delegated | Read user channel messages | Yes | +| `offline_access` | Delegated | Maintain access to data you have given it access to | No | +| `Team.ReadBasic.All` | Delegated | Read the names and descriptions of teams | No | +| `User.Read` | Delegated | Sign in and read user profile | No | + +6. Click **"Add permissions"** + +> ⚠️ The `ChannelMessage.Read.All` permission requires admin consent. An admin will need to click **"Grant admin consent for [Directory]"** for full functionality. + +![API Permissions](/docs/connectors/microsoft-teams/azure-api-permissions.png) + +--- + +## Running SurfSense with Microsoft Teams Connector + +Add the Microsoft Teams environment variables to your Docker run command: + +```bash +docker run -d -p 3000:3000 -p 8000:8000 \ + -v surfsense-data:/data \ + # Microsoft Teams Connector + -e TEAMS_CLIENT_ID=your_microsoft_client_id \ + -e TEAMS_CLIENT_SECRET=your_microsoft_client_secret \ + -e TEAMS_REDIRECT_URI=http://localhost:8000/api/v1/auth/teams/connector/callback \ + --name surfsense \ + --restart unless-stopped \ + ghcr.io/modsetter/surfsense:latest +``` diff --git a/surfsense_web/content/docs/connectors/slack.mdx b/surfsense_web/content/docs/connectors/slack.mdx index 838408cd7..ccabe6f9e 100644 --- a/surfsense_web/content/docs/connectors/slack.mdx +++ b/surfsense_web/content/docs/connectors/slack.mdx @@ -32,7 +32,7 @@ After creating the app, you'll be taken to the **Basic Information** page. Here 1. Copy your **Client ID** 2. Copy your **Client Secret** (click Show to reveal) -> ⚠️ Never share your app credentials publicly or include them in code repositories. +> ⚠️ Never share your app credentials publicly. ![Basic Information - App Credentials](/docs/connectors/slack/slack-app-credentials.png) diff --git a/surfsense_web/content/docs/docker-installation.mdx b/surfsense_web/content/docs/docker-installation.mdx index d61aa3bc8..6501c7783 100644 --- a/surfsense_web/content/docs/docker-installation.mdx +++ b/surfsense_web/content/docs/docker-installation.mdx @@ -47,31 +47,29 @@ docker run -d -p 3000:3000 -p 8000:8000 ` ### With Custom Configuration -**Using OpenAI Embeddings:** +You can pass any [environment variable](/docs/manual-installation#backend-environment-variables) using `-e` flags: ```bash docker run -d -p 3000:3000 -p 8000:8000 \ -v surfsense-data:/data \ -e EMBEDDING_MODEL=openai://text-embedding-ada-002 \ -e OPENAI_API_KEY=your_openai_api_key \ - --name surfsense \ - --restart unless-stopped \ - ghcr.io/modsetter/surfsense:latest -``` - -**With Google OAuth:** - -```bash -docker run -d -p 3000:3000 -p 8000:8000 \ - -v surfsense-data:/data \ -e AUTH_TYPE=GOOGLE \ - -e GOOGLE_OAUTH_CLIENT_ID=your_client_id \ - -e GOOGLE_OAUTH_CLIENT_SECRET=your_client_secret \ + -e GOOGLE_OAUTH_CLIENT_ID=your_google_client_id \ + -e GOOGLE_OAUTH_CLIENT_SECRET=your_google_client_secret \ + -e ETL_SERVICE=LLAMACLOUD \ + -e LLAMA_CLOUD_API_KEY=your_llama_cloud_key \ --name surfsense \ --restart unless-stopped \ ghcr.io/modsetter/surfsense:latest ``` + +- For Google OAuth, create credentials in the [Google Cloud Console](https://console.cloud.google.com/apis/credentials) +- For Airtable connector, create an OAuth integration in the [Airtable Developer Hub](https://airtable.com/create/oauth) +- If deploying behind a reverse proxy with HTTPS, add `-e BACKEND_URL=https://api.yourdomain.com` + + ### Quick Start with Docker Compose For easier management with environment files: diff --git a/surfsense_web/content/docs/manual-installation.mdx b/surfsense_web/content/docs/manual-installation.mdx index 3a0ee11e1..0dd703758 100644 --- a/surfsense_web/content/docs/manual-installation.mdx +++ b/surfsense_web/content/docs/manual-installation.mdx @@ -233,7 +233,7 @@ redis-cli ping In a new terminal window, start the Celery worker to handle background tasks: -**Linux/macOS/Windows:** +**If using uv:** ```bash # Make sure you're in the surfsense_backend directory @@ -243,13 +243,31 @@ cd surfsense_backend uv run celery -A celery_worker.celery_app worker --loglevel=info --concurrency=1 --pool=solo ``` +**If using pip/venv:** + +```bash +# Make sure you're in the surfsense_backend directory +cd surfsense_backend + +# Activate virtual environment +source .venv/bin/activate # Linux/macOS +# OR +.venv\Scripts\activate # Windows + +# Start Celery worker +celery -A celery_worker.celery_app worker --loglevel=info --concurrency=1 --pool=solo +``` + **Optional: Start Flower for monitoring Celery tasks:** In another terminal window: ```bash -# Start Flower (Celery monitoring tool) +# If using uv uv run celery -A celery_worker.celery_app flower --port=5555 + +# If using pip/venv (activate venv first) +celery -A celery_worker.celery_app flower --port=5555 ``` Access Flower at [http://localhost:5555](http://localhost:5555) to monitor your Celery tasks. @@ -258,7 +276,7 @@ Access Flower at [http://localhost:5555](http://localhost:5555) to monitor your In another new terminal window, start Celery Beat to enable periodic tasks (like scheduled connector indexing): -**Linux/macOS/Windows:** +**If using uv:** ```bash # Make sure you're in the surfsense_backend directory @@ -268,13 +286,28 @@ cd surfsense_backend uv run celery -A celery_worker.celery_app beat --loglevel=info ``` +**If using pip/venv:** + +```bash +# Make sure you're in the surfsense_backend directory +cd surfsense_backend + +# Activate virtual environment +source .venv/bin/activate # Linux/macOS +# OR +.venv\Scripts\activate # Windows + +# Start Celery Beat +celery -A celery_worker.celery_app beat --loglevel=info +``` + **Important**: Celery Beat is required for the periodic indexing functionality to work. Without it, scheduled connector tasks won't run automatically. The schedule interval can be configured using the `SCHEDULE_CHECKER_INTERVAL` environment variable. ### 6. Run the Backend Start the backend server: -**Linux/macOS/Windows:** +**If using uv:** ```bash # Run without hot reloading @@ -284,6 +317,21 @@ uv run main.py uv run main.py --reload ``` +**If using pip/venv:** + +```bash +# Activate virtual environment if not already activated +source .venv/bin/activate # Linux/macOS +# OR +.venv\Scripts\activate # Windows + +# Run without hot reloading +python main.py + +# Or with hot reloading for development +python main.py --reload +``` + If everything is set up correctly, you should see output indicating the server is running on `http://localhost:8000`. ## Frontend Setup diff --git a/surfsense_web/contracts/enums/connector.ts b/surfsense_web/contracts/enums/connector.ts index ae80cf871..fc65585e2 100644 --- a/surfsense_web/contracts/enums/connector.ts +++ b/surfsense_web/contracts/enums/connector.ts @@ -4,6 +4,7 @@ export enum EnumConnectorName { LINKUP_API = "LINKUP_API", BAIDU_SEARCH_API = "BAIDU_SEARCH_API", SLACK_CONNECTOR = "SLACK_CONNECTOR", + TEAMS_CONNECTOR = "TEAMS_CONNECTOR", NOTION_CONNECTOR = "NOTION_CONNECTOR", GITHUB_CONNECTOR = "GITHUB_CONNECTOR", LINEAR_CONNECTOR = "LINEAR_CONNECTOR", diff --git a/surfsense_web/contracts/enums/connectorIcons.tsx b/surfsense_web/contracts/enums/connectorIcons.tsx index 22bc734aa..befe132f9 100644 --- a/surfsense_web/contracts/enums/connectorIcons.tsx +++ b/surfsense_web/contracts/enums/connectorIcons.tsx @@ -31,6 +31,8 @@ export const getConnectorIcon = (connectorType: EnumConnectorName | string, clas return Baidu; case EnumConnectorName.SLACK_CONNECTOR: return Slack; + case EnumConnectorName.TEAMS_CONNECTOR: + return Microsoft Teams; case EnumConnectorName.NOTION_CONNECTOR: return Notion; case EnumConnectorName.DISCORD_CONNECTOR: diff --git a/surfsense_web/contracts/types/connector.types.ts b/surfsense_web/contracts/types/connector.types.ts index 5b67297ae..f864ae16f 100644 --- a/surfsense_web/contracts/types/connector.types.ts +++ b/surfsense_web/contracts/types/connector.types.ts @@ -8,6 +8,7 @@ export const searchSourceConnectorTypeEnum = z.enum([ "LINKUP_API", "BAIDU_SEARCH_API", "SLACK_CONNECTOR", + "TEAMS_CONNECTOR", "NOTION_CONNECTOR", "GITHUB_CONNECTOR", "LINEAR_CONNECTOR", diff --git a/surfsense_web/hooks/use-chat.ts b/surfsense_web/hooks/use-chat.ts deleted file mode 100644 index c31097e11..000000000 --- a/surfsense_web/hooks/use-chat.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { useEffect, useState } from "react"; -import type { ResearchMode } from "@/components/chat"; -import type { Document } from "@/contracts/types/document.types"; -import { getBearerToken } from "@/lib/auth-utils"; - -interface UseChatStateProps { - search_space_id: string; - chat_id?: string; -} - -export function useChatState({ chat_id }: UseChatStateProps) { - const [token, setToken] = useState(null); - const [isLoading, setIsLoading] = useState(false); - const [currentChatId, setCurrentChatId] = useState(chat_id || null); - - // Chat configuration state - const [researchMode, setResearchMode] = useState("QNA"); - const [selectedConnectors, setSelectedConnectors] = useState([]); - const [selectedDocuments, setSelectedDocuments] = useState([]); - const [topK, setTopK] = useState(5); - - useEffect(() => { - const bearerToken = getBearerToken(); - setToken(bearerToken); - }, []); - - return { - token, - setToken, - isLoading, - setIsLoading, - currentChatId, - setCurrentChatId, - researchMode, - setResearchMode, - selectedConnectors, - setSelectedConnectors, - selectedDocuments, - setSelectedDocuments, - topK, - setTopK, - }; -} diff --git a/surfsense_web/hooks/use-connector-edit-page.ts b/surfsense_web/hooks/use-connector-edit-page.ts deleted file mode 100644 index a1a3c88f4..000000000 --- a/surfsense_web/hooks/use-connector-edit-page.ts +++ /dev/null @@ -1,680 +0,0 @@ -import { zodResolver } from "@hookform/resolvers/zod"; -import { useAtomValue } from "jotai"; -import { useRouter } from "next/navigation"; -import { useCallback, useEffect, useState } from "react"; -import { useForm } from "react-hook-form"; -import { toast } from "sonner"; -import { updateConnectorMutationAtom } from "@/atoms/connectors/connector-mutation.atoms"; -import { connectorsAtom } from "@/atoms/connectors/connector-query.atoms"; -import { - type EditConnectorFormValues, - type EditMode, - editConnectorSchema, - type GithubPatFormValues, - type GithubRepo, - githubPatSchema, -} from "@/components/editConnector/types"; -import type { EnumConnectorName } from "@/contracts/enums/connector"; -import type { UpdateConnectorResponse } from "@/contracts/types/connector.types"; -import type { SearchSourceConnector } from "@/hooks/use-search-source-connectors"; -import { authenticatedFetch } from "@/lib/auth-utils"; - -const normalizeListInput = (value: unknown): string[] => { - if (Array.isArray(value)) { - return value.map((item) => String(item).trim()).filter((item) => item.length > 0); - } - if (typeof value === "string") { - return value - .split(",") - .map((item) => item.trim()) - .filter((item) => item.length > 0); - } - return []; -}; - -const arraysEqual = (a: string[], b: string[]): boolean => { - if (a.length !== b.length) return false; - return a.every((value, index) => value === b[index]); -}; - -const normalizeBoolean = (value: unknown): boolean | null => { - if (typeof value === "boolean") return value; - if (typeof value === "string") { - const lowered = value.trim().toLowerCase(); - if (["true", "1", "yes", "on"].includes(lowered)) return true; - if (["false", "0", "no", "off"].includes(lowered)) return false; - } - if (typeof value === "number") { - if (value === 1) return true; - if (value === 0) return false; - } - return null; -}; - -export function useConnectorEditPage(connectorId: number, searchSpaceId: string) { - const router = useRouter(); - const { data: connectors = [], isLoading: connectorsLoading } = useAtomValue(connectorsAtom); - const { mutateAsync: updateConnector } = useAtomValue(updateConnectorMutationAtom); - - // State managed by the hook - const [connector, setConnector] = useState(null); - const [originalConfig, setOriginalConfig] = useState | null>(null); - const [isSaving, setIsSaving] = useState(false); - const [currentSelectedRepos, setCurrentSelectedRepos] = useState([]); - const [originalPat, setOriginalPat] = useState(""); - const [editMode, setEditMode] = useState("viewing"); - const [fetchedRepos, setFetchedRepos] = useState(null); - const [newSelectedRepos, setNewSelectedRepos] = useState([]); - const [isFetchingRepos, setIsFetchingRepos] = useState(false); - - // Forms managed by the hook - const patForm = useForm({ - resolver: zodResolver(githubPatSchema), - defaultValues: { github_pat: "" }, - }); - const editForm = useForm({ - resolver: zodResolver(editConnectorSchema), - defaultValues: { - name: "", - SLACK_BOT_TOKEN: "", - NOTION_INTEGRATION_TOKEN: "", - TAVILY_API_KEY: "", - SEARXNG_HOST: "", - SEARXNG_API_KEY: "", - SEARXNG_ENGINES: "", - SEARXNG_CATEGORIES: "", - SEARXNG_LANGUAGE: "", - SEARXNG_SAFESEARCH: "", - SEARXNG_VERIFY_SSL: "", - DISCORD_BOT_TOKEN: "", - CONFLUENCE_BASE_URL: "", - CONFLUENCE_EMAIL: "", - CONFLUENCE_API_TOKEN: "", - BOOKSTACK_BASE_URL: "", - BOOKSTACK_TOKEN_ID: "", - BOOKSTACK_TOKEN_SECRET: "", - JIRA_BASE_URL: "", - JIRA_EMAIL: "", - JIRA_API_TOKEN: "", - LUMA_API_KEY: "", - ELASTICSEARCH_API_KEY: "", - FIRECRAWL_API_KEY: "", - INITIAL_URLS: "", - }, - }); - - // Effect to load initial data - useEffect(() => { - if (!connectorsLoading && connectors.length > 0 && !connector) { - const currentConnector = connectors.find((c) => c.id === connectorId); - if (currentConnector) { - setConnector(currentConnector); - const config = currentConnector.config || {}; - setOriginalConfig(config); - editForm.reset({ - name: currentConnector.name, - SLACK_BOT_TOKEN: config.SLACK_BOT_TOKEN || "", - NOTION_INTEGRATION_TOKEN: config.NOTION_INTEGRATION_TOKEN || "", - TAVILY_API_KEY: config.TAVILY_API_KEY || "", - SEARXNG_HOST: config.SEARXNG_HOST || "", - SEARXNG_API_KEY: config.SEARXNG_API_KEY || "", - SEARXNG_ENGINES: Array.isArray(config.SEARXNG_ENGINES) - ? config.SEARXNG_ENGINES.join(", ") - : config.SEARXNG_ENGINES || "", - SEARXNG_CATEGORIES: Array.isArray(config.SEARXNG_CATEGORIES) - ? config.SEARXNG_CATEGORIES.join(", ") - : config.SEARXNG_CATEGORIES || "", - SEARXNG_LANGUAGE: config.SEARXNG_LANGUAGE || "", - SEARXNG_SAFESEARCH: - config.SEARXNG_SAFESEARCH !== undefined && config.SEARXNG_SAFESEARCH !== null - ? String(config.SEARXNG_SAFESEARCH) - : "", - SEARXNG_VERIFY_SSL: - config.SEARXNG_VERIFY_SSL !== undefined && config.SEARXNG_VERIFY_SSL !== null - ? String(config.SEARXNG_VERIFY_SSL) - : "", - LINKUP_API_KEY: config.LINKUP_API_KEY || "", - DISCORD_BOT_TOKEN: config.DISCORD_BOT_TOKEN || "", - CONFLUENCE_BASE_URL: config.CONFLUENCE_BASE_URL || "", - CONFLUENCE_EMAIL: config.CONFLUENCE_EMAIL || "", - CONFLUENCE_API_TOKEN: config.CONFLUENCE_API_TOKEN || "", - BOOKSTACK_BASE_URL: config.BOOKSTACK_BASE_URL || "", - BOOKSTACK_TOKEN_ID: config.BOOKSTACK_TOKEN_ID || "", - BOOKSTACK_TOKEN_SECRET: config.BOOKSTACK_TOKEN_SECRET || "", - JIRA_BASE_URL: config.JIRA_BASE_URL || "", - JIRA_EMAIL: config.JIRA_EMAIL || "", - JIRA_API_TOKEN: config.JIRA_API_TOKEN || "", - LUMA_API_KEY: config.LUMA_API_KEY || "", - ELASTICSEARCH_API_KEY: config.ELASTICSEARCH_API_KEY || "", - FIRECRAWL_API_KEY: config.FIRECRAWL_API_KEY || "", - INITIAL_URLS: config.INITIAL_URLS || "", - }); - if (currentConnector.connector_type === "GITHUB_CONNECTOR") { - const savedRepos = config.repo_full_names || []; - const savedPat = config.GITHUB_PAT || ""; - setCurrentSelectedRepos(savedRepos); - setNewSelectedRepos(savedRepos); - setOriginalPat(savedPat); - patForm.reset({ github_pat: savedPat }); - setEditMode("viewing"); - } - } else { - toast.error("Connector not found."); - router.push(`/dashboard/${searchSpaceId}`); - } - } - }, [ - connectorId, - connectors, - connectorsLoading, - router, - searchSpaceId, - connector, - editForm.reset, - patForm.reset, - // Note: editForm and patForm are intentionally excluded from dependencies - // to prevent infinite loops. They are stable form objects from react-hook-form. - ]); - - // Handlers managed by the hook - const handleFetchRepositories = useCallback( - async (values: GithubPatFormValues) => { - setIsFetchingRepos(true); - setFetchedRepos(null); - try { - const response = await authenticatedFetch( - `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/github/repositories`, - { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ github_pat: values.github_pat }), - } - ); - if (!response.ok) { - const err = await response.json(); - throw new Error(err.detail || "Fetch failed"); - } - const data: GithubRepo[] = await response.json(); - setFetchedRepos(data); - setNewSelectedRepos(currentSelectedRepos); - toast.success(`Found ${data.length} repos.`); - } catch (error) { - console.error("Error fetching GitHub repositories:", error); - toast.error(error instanceof Error ? error.message : "Failed to fetch repositories."); - } finally { - setIsFetchingRepos(false); - } - }, - [currentSelectedRepos] - ); // Added dependency - - const handleRepoSelectionChange = useCallback((repoFullName: string, checked: boolean) => { - setNewSelectedRepos((prev) => - checked ? [...prev, repoFullName] : prev.filter((name) => name !== repoFullName) - ); - }, []); - - const handleSaveChanges = useCallback( - async (formData: EditConnectorFormValues) => { - if (!connector || !originalConfig) return; - setIsSaving(true); - const updatePayload: Partial = {}; - let configChanged = false; - let newConfig: Record | null = null; - - if (formData.name !== connector.name) { - updatePayload.name = formData.name; - } - - switch (connector.connector_type) { - case "GITHUB_CONNECTOR": { - const currentPatInForm = patForm.getValues("github_pat"); - const patChanged = currentPatInForm !== originalPat; - const initialRepoSet = new Set(currentSelectedRepos); - const newRepoSet = new Set(newSelectedRepos); - const reposChanged = - initialRepoSet.size !== newRepoSet.size || - ![...initialRepoSet].every((repo) => newRepoSet.has(repo)); - if ( - patChanged || - (editMode === "editing_repos" && reposChanged && fetchedRepos !== null) - ) { - if ( - !currentPatInForm || - !(currentPatInForm.startsWith("ghp_") || currentPatInForm.startsWith("github_pat_")) - ) { - toast.error("Invalid GitHub PAT format. Cannot save."); - setIsSaving(false); - return; - } - newConfig = { - GITHUB_PAT: currentPatInForm, - repo_full_names: newSelectedRepos, - }; - if (reposChanged && newSelectedRepos.length === 0) { - toast.warning("Warning: No repositories selected."); - } - } - break; - } - case "SLACK_CONNECTOR": - if (formData.SLACK_BOT_TOKEN !== originalConfig.SLACK_BOT_TOKEN) { - if (!formData.SLACK_BOT_TOKEN) { - toast.error("Slack Token empty."); - setIsSaving(false); - return; - } - newConfig = { SLACK_BOT_TOKEN: formData.SLACK_BOT_TOKEN }; - } - break; - case "NOTION_CONNECTOR": - if (formData.NOTION_INTEGRATION_TOKEN !== originalConfig.NOTION_INTEGRATION_TOKEN) { - if (!formData.NOTION_INTEGRATION_TOKEN) { - toast.error("Notion Token empty."); - setIsSaving(false); - return; - } - newConfig = { - NOTION_INTEGRATION_TOKEN: formData.NOTION_INTEGRATION_TOKEN, - }; - } - break; - case "TAVILY_API": - if (formData.TAVILY_API_KEY !== originalConfig.TAVILY_API_KEY) { - if (!formData.TAVILY_API_KEY) { - toast.error("Tavily Key empty."); - setIsSaving(false); - return; - } - newConfig = { TAVILY_API_KEY: formData.TAVILY_API_KEY }; - } - break; - case "SEARXNG_API": { - const host = (formData.SEARXNG_HOST || "").trim(); - if (!host) { - toast.error("SearxNG host is required."); - setIsSaving(false); - return; - } - - const candidateConfig: Record = { SEARXNG_HOST: host }; - const originalHost = - typeof originalConfig.SEARXNG_HOST === "string" ? originalConfig.SEARXNG_HOST : ""; - let hasChanges = host !== originalHost.trim(); - - const apiKey = (formData.SEARXNG_API_KEY || "").trim(); - const originalApiKey = - typeof originalConfig.SEARXNG_API_KEY === "string" - ? originalConfig.SEARXNG_API_KEY - : ""; - const originalApiKeyTrimmed = originalApiKey.trim(); - if (apiKey !== originalApiKeyTrimmed) { - candidateConfig.SEARXNG_API_KEY = apiKey || null; - hasChanges = true; - } - - const newEngines = normalizeListInput(formData.SEARXNG_ENGINES || ""); - const originalEngines = normalizeListInput(originalConfig.SEARXNG_ENGINES); - if (!arraysEqual(newEngines, originalEngines)) { - candidateConfig.SEARXNG_ENGINES = newEngines; - hasChanges = true; - } - - const newCategories = normalizeListInput(formData.SEARXNG_CATEGORIES || ""); - const originalCategories = normalizeListInput(originalConfig.SEARXNG_CATEGORIES); - if (!arraysEqual(newCategories, originalCategories)) { - candidateConfig.SEARXNG_CATEGORIES = newCategories; - hasChanges = true; - } - - const language = (formData.SEARXNG_LANGUAGE || "").trim(); - const originalLanguage = - typeof originalConfig.SEARXNG_LANGUAGE === "string" - ? originalConfig.SEARXNG_LANGUAGE - : ""; - const originalLanguageTrimmed = originalLanguage.trim(); - if (language !== originalLanguageTrimmed) { - candidateConfig.SEARXNG_LANGUAGE = language || null; - hasChanges = true; - } - - const safesearchRaw = (formData.SEARXNG_SAFESEARCH || "").trim(); - const originalSafesearch = originalConfig.SEARXNG_SAFESEARCH; - if (safesearchRaw) { - const parsed = Number(safesearchRaw); - if (Number.isNaN(parsed) || !Number.isInteger(parsed) || parsed < 0 || parsed > 2) { - toast.error("SearxNG SafeSearch must be 0, 1, or 2."); - setIsSaving(false); - return; - } - if (parsed !== Number(originalSafesearch)) { - candidateConfig.SEARXNG_SAFESEARCH = parsed; - hasChanges = true; - } - } else if (originalSafesearch !== undefined && originalSafesearch !== null) { - candidateConfig.SEARXNG_SAFESEARCH = null; - hasChanges = true; - } - - const verifyRaw = (formData.SEARXNG_VERIFY_SSL || "").trim().toLowerCase(); - const originalVerifyBool = normalizeBoolean(originalConfig.SEARXNG_VERIFY_SSL); - if (verifyRaw) { - let parsedBool: boolean | null = null; - if (["true", "1", "yes", "on"].includes(verifyRaw)) parsedBool = true; - else if (["false", "0", "no", "off"].includes(verifyRaw)) parsedBool = false; - if (parsedBool === null) { - toast.error("SearxNG SSL verification must be true or false."); - setIsSaving(false); - return; - } - if (parsedBool !== originalVerifyBool) { - candidateConfig.SEARXNG_VERIFY_SSL = parsedBool; - hasChanges = true; - } - } else if (originalVerifyBool !== null) { - candidateConfig.SEARXNG_VERIFY_SSL = null; - hasChanges = true; - } - - if (hasChanges) { - newConfig = candidateConfig; - } - break; - } - - case "LINKUP_API": - if (formData.LINKUP_API_KEY !== originalConfig.LINKUP_API_KEY) { - if (!formData.LINKUP_API_KEY) { - toast.error("Linkup API Key cannot be empty."); - setIsSaving(false); - return; - } - newConfig = { LINKUP_API_KEY: formData.LINKUP_API_KEY }; - } - break; - case "DISCORD_CONNECTOR": - if (formData.DISCORD_BOT_TOKEN !== originalConfig.DISCORD_BOT_TOKEN) { - if (!formData.DISCORD_BOT_TOKEN) { - toast.error("Discord Bot Token cannot be empty."); - setIsSaving(false); - return; - } - newConfig = { DISCORD_BOT_TOKEN: formData.DISCORD_BOT_TOKEN }; - } - break; - case "CONFLUENCE_CONNECTOR": - if ( - formData.CONFLUENCE_BASE_URL !== originalConfig.CONFLUENCE_BASE_URL || - formData.CONFLUENCE_EMAIL !== originalConfig.CONFLUENCE_EMAIL || - formData.CONFLUENCE_API_TOKEN !== originalConfig.CONFLUENCE_API_TOKEN - ) { - if ( - !formData.CONFLUENCE_BASE_URL || - !formData.CONFLUENCE_EMAIL || - !formData.CONFLUENCE_API_TOKEN - ) { - toast.error("All Confluence fields are required."); - setIsSaving(false); - return; - } - newConfig = { - CONFLUENCE_BASE_URL: formData.CONFLUENCE_BASE_URL, - CONFLUENCE_EMAIL: formData.CONFLUENCE_EMAIL, - CONFLUENCE_API_TOKEN: formData.CONFLUENCE_API_TOKEN, - }; - } - break; - case "BOOKSTACK_CONNECTOR": - if ( - formData.BOOKSTACK_BASE_URL !== originalConfig.BOOKSTACK_BASE_URL || - formData.BOOKSTACK_TOKEN_ID !== originalConfig.BOOKSTACK_TOKEN_ID || - formData.BOOKSTACK_TOKEN_SECRET !== originalConfig.BOOKSTACK_TOKEN_SECRET - ) { - if ( - !formData.BOOKSTACK_BASE_URL || - !formData.BOOKSTACK_TOKEN_ID || - !formData.BOOKSTACK_TOKEN_SECRET - ) { - toast.error("All BookStack fields are required."); - setIsSaving(false); - return; - } - newConfig = { - BOOKSTACK_BASE_URL: formData.BOOKSTACK_BASE_URL, - BOOKSTACK_TOKEN_ID: formData.BOOKSTACK_TOKEN_ID, - BOOKSTACK_TOKEN_SECRET: formData.BOOKSTACK_TOKEN_SECRET, - }; - } - break; - case "JIRA_CONNECTOR": { - // Check if this is an OAuth connector (has access_token or _token_encrypted flag) - const isJiraOAuth = !!(originalConfig.access_token || originalConfig._token_encrypted); - - if (isJiraOAuth) { - // OAuth connectors don't allow editing credentials through the form - // Only allow name changes, which are handled separately - break; - } - - // Legacy API token connector - allow editing credentials - if ( - formData.JIRA_BASE_URL !== originalConfig.JIRA_BASE_URL || - formData.JIRA_EMAIL !== originalConfig.JIRA_EMAIL || - formData.JIRA_API_TOKEN !== originalConfig.JIRA_API_TOKEN - ) { - if (!formData.JIRA_BASE_URL || !formData.JIRA_EMAIL || !formData.JIRA_API_TOKEN) { - toast.error("All Jira fields are required."); - setIsSaving(false); - return; - } - newConfig = { - JIRA_BASE_URL: formData.JIRA_BASE_URL, - JIRA_EMAIL: formData.JIRA_EMAIL, - JIRA_API_TOKEN: formData.JIRA_API_TOKEN, - }; - } - break; - } - case "LUMA_CONNECTOR": - if (formData.LUMA_API_KEY !== originalConfig.LUMA_API_KEY) { - if (!formData.LUMA_API_KEY) { - toast.error("Luma API Key cannot be empty."); - setIsSaving(false); - return; - } - newConfig = { LUMA_API_KEY: formData.LUMA_API_KEY }; - } - break; - case "ELASTICSEARCH_CONNECTOR": - if (formData.ELASTICSEARCH_API_KEY !== originalConfig.ELASTICSEARCH_API_KEY) { - if (!formData.ELASTICSEARCH_API_KEY) { - toast.error("Elasticsearch API Key cannot be empty."); - setIsSaving(false); - return; - } - newConfig = { ELASTICSEARCH_API_KEY: formData.ELASTICSEARCH_API_KEY }; - } - break; - case "WEBCRAWLER_CONNECTOR": - if ( - formData.FIRECRAWL_API_KEY !== originalConfig.FIRECRAWL_API_KEY || - formData.INITIAL_URLS !== originalConfig.INITIAL_URLS - ) { - newConfig = {}; - - if (formData.FIRECRAWL_API_KEY?.trim()) { - if (!formData.FIRECRAWL_API_KEY.startsWith("fc-")) { - toast.warning( - "Firecrawl API keys typically start with 'fc-'. Please verify your key." - ); - } - newConfig.FIRECRAWL_API_KEY = formData.FIRECRAWL_API_KEY.trim(); - } else if (originalConfig.FIRECRAWL_API_KEY) { - toast.info( - "Firecrawl API key removed. Web crawler will use AsyncChromiumLoader as fallback." - ); - } - - if (formData.INITIAL_URLS !== undefined) { - if (formData.INITIAL_URLS?.trim()) { - newConfig.INITIAL_URLS = formData.INITIAL_URLS.trim(); - } else if (originalConfig.INITIAL_URLS) { - toast.info("URLs removed from crawler configuration."); - } - } - } - break; - } - - if (newConfig !== null) { - updatePayload.config = newConfig; - configChanged = true; - } - - if (Object.keys(updatePayload).length === 0) { - toast.info("No changes detected."); - setIsSaving(false); - if (connector.connector_type === "GITHUB_CONNECTOR") { - setEditMode("viewing"); - patForm.reset({ github_pat: originalPat }); - } - return; - } - - try { - const updatedConnector = (await updateConnector({ - id: connectorId, - data: { - ...updatePayload, - connector_type: connector.connector_type as EnumConnectorName, - }, - })) as UpdateConnectorResponse; - toast.success("Connector updated!"); - // Use the response from the API which has the full merged config - const newlySavedConfig = updatedConnector.config || originalConfig; - setOriginalConfig(newlySavedConfig); - // Update connector state with the full updated connector from the API - setConnector(updatedConnector); - if (configChanged) { - if (connector.connector_type === "GITHUB_CONNECTOR") { - const savedGitHubConfig = newlySavedConfig as { - GITHUB_PAT?: string; - repo_full_names?: string[]; - }; - setCurrentSelectedRepos(savedGitHubConfig.repo_full_names || []); - setOriginalPat(savedGitHubConfig.GITHUB_PAT || ""); - setNewSelectedRepos(savedGitHubConfig.repo_full_names || []); - patForm.reset({ github_pat: savedGitHubConfig.GITHUB_PAT || "" }); - } else if (connector.connector_type === "SLACK_CONNECTOR") { - editForm.setValue("SLACK_BOT_TOKEN", newlySavedConfig.SLACK_BOT_TOKEN || ""); - } else if (connector.connector_type === "NOTION_CONNECTOR") { - editForm.setValue( - "NOTION_INTEGRATION_TOKEN", - newlySavedConfig.NOTION_INTEGRATION_TOKEN || "" - ); - } else if (connector.connector_type === "TAVILY_API") { - editForm.setValue("TAVILY_API_KEY", newlySavedConfig.TAVILY_API_KEY || ""); - } else if (connector.connector_type === "SEARXNG_API") { - editForm.setValue("SEARXNG_HOST", newlySavedConfig.SEARXNG_HOST || ""); - editForm.setValue("SEARXNG_API_KEY", newlySavedConfig.SEARXNG_API_KEY || ""); - editForm.setValue( - "SEARXNG_ENGINES", - normalizeListInput(newlySavedConfig.SEARXNG_ENGINES).join(", ") - ); - editForm.setValue( - "SEARXNG_CATEGORIES", - normalizeListInput(newlySavedConfig.SEARXNG_CATEGORIES).join(", ") - ); - editForm.setValue("SEARXNG_LANGUAGE", newlySavedConfig.SEARXNG_LANGUAGE || ""); - editForm.setValue( - "SEARXNG_SAFESEARCH", - newlySavedConfig.SEARXNG_SAFESEARCH === null || - newlySavedConfig.SEARXNG_SAFESEARCH === undefined - ? "" - : String(newlySavedConfig.SEARXNG_SAFESEARCH) - ); - const verifyValue = normalizeBoolean(newlySavedConfig.SEARXNG_VERIFY_SSL); - editForm.setValue( - "SEARXNG_VERIFY_SSL", - verifyValue === null ? "" : String(verifyValue) - ); - } else if (connector.connector_type === "LINKUP_API") { - editForm.setValue("LINKUP_API_KEY", newlySavedConfig.LINKUP_API_KEY || ""); - } else if (connector.connector_type === "DISCORD_CONNECTOR") { - editForm.setValue("DISCORD_BOT_TOKEN", newlySavedConfig.DISCORD_BOT_TOKEN || ""); - } else if (connector.connector_type === "CONFLUENCE_CONNECTOR") { - editForm.setValue("CONFLUENCE_BASE_URL", newlySavedConfig.CONFLUENCE_BASE_URL || ""); - editForm.setValue("CONFLUENCE_EMAIL", newlySavedConfig.CONFLUENCE_EMAIL || ""); - editForm.setValue("CONFLUENCE_API_TOKEN", newlySavedConfig.CONFLUENCE_API_TOKEN || ""); - } else if (connector.connector_type === "BOOKSTACK_CONNECTOR") { - editForm.setValue("BOOKSTACK_BASE_URL", newlySavedConfig.BOOKSTACK_BASE_URL || ""); - editForm.setValue("BOOKSTACK_TOKEN_ID", newlySavedConfig.BOOKSTACK_TOKEN_ID || ""); - editForm.setValue( - "BOOKSTACK_TOKEN_SECRET", - newlySavedConfig.BOOKSTACK_TOKEN_SECRET || "" - ); - } else if (connector.connector_type === "JIRA_CONNECTOR") { - editForm.setValue("JIRA_BASE_URL", newlySavedConfig.JIRA_BASE_URL || ""); - editForm.setValue("JIRA_EMAIL", newlySavedConfig.JIRA_EMAIL || ""); - editForm.setValue("JIRA_API_TOKEN", newlySavedConfig.JIRA_API_TOKEN || ""); - } else if (connector.connector_type === "LUMA_CONNECTOR") { - editForm.setValue("LUMA_API_KEY", newlySavedConfig.LUMA_API_KEY || ""); - } else if (connector.connector_type === "ELASTICSEARCH_CONNECTOR") { - editForm.setValue( - "ELASTICSEARCH_API_KEY", - newlySavedConfig.ELASTICSEARCH_API_KEY || "" - ); - } else if (connector.connector_type === "WEBCRAWLER_CONNECTOR") { - editForm.setValue("FIRECRAWL_API_KEY", newlySavedConfig.FIRECRAWL_API_KEY || ""); - editForm.setValue("INITIAL_URLS", newlySavedConfig.INITIAL_URLS || ""); - } - } - if (connector.connector_type === "GITHUB_CONNECTOR") { - setEditMode("viewing"); - setFetchedRepos(null); - } - // Resetting simple form values is handled by useEffect if connector state updates - } catch (error) { - console.error("Error updating connector:", error); - toast.error(error instanceof Error ? error.message : "Failed to update connector."); - } finally { - setIsSaving(false); - } - }, - [ - connector, - originalConfig, - updateConnector, - connectorId, - patForm, - originalPat, - currentSelectedRepos, - newSelectedRepos, - editMode, - fetchedRepos, - editForm, - ] - ); // Added editForm to dependencies - - // Return values needed by the component - return { - connectorsLoading, - connector, - isSaving, - editForm, - patForm, - handleSaveChanges, - // GitHub specific props - editMode, - setEditMode, - originalPat, - currentSelectedRepos, - fetchedRepos, - setFetchedRepos, - newSelectedRepos, - setNewSelectedRepos, - isFetchingRepos, - handleFetchRepositories, - handleRepoSelectionChange, - }; -} diff --git a/surfsense_web/lib/auth-utils.ts b/surfsense_web/lib/auth-utils.ts index c1dc7194b..604843292 100644 --- a/surfsense_web/lib/auth-utils.ts +++ b/surfsense_web/lib/auth-utils.ts @@ -130,44 +130,3 @@ export async function authenticatedFetch( return response; } - -/** - * Type for the result of a fetch operation with built-in error handling - */ -export type FetchResult = - | { success: true; data: T; response: Response } - | { success: false; error: string; status?: number }; - -/** - * Authenticated fetch with JSON response handling - * Returns a result object instead of throwing on non-401 errors - */ -export async function authenticatedFetchJson( - url: string, - options?: RequestInit & { skipAuthRedirect?: boolean } -): Promise> { - try { - const response = await authenticatedFetch(url, options); - - if (!response.ok) { - const errorData = await response.json().catch(() => ({})); - return { - success: false, - error: errorData.detail || `Request failed: ${response.status}`, - status: response.status, - }; - } - - const data = await response.json(); - return { success: true, data, response }; - } catch (err: any) { - // Re-throw if it's the unauthorized redirect - if (err.message?.includes("Unauthorized")) { - throw err; - } - return { - success: false, - error: err.message || "Request failed", - }; - } -} diff --git a/surfsense_web/lib/connectors/utils.ts b/surfsense_web/lib/connectors/utils.ts index 75e81e2cc..a85b912ed 100644 --- a/surfsense_web/lib/connectors/utils.ts +++ b/surfsense_web/lib/connectors/utils.ts @@ -15,6 +15,7 @@ export const getConnectorTypeDisplay = (type: string): string => { CLICKUP_CONNECTOR: "ClickUp", GOOGLE_CALENDAR_CONNECTOR: "Google Calendar", GOOGLE_GMAIL_CONNECTOR: "Google Gmail", + GOOGLE_DRIVE_CONNECTOR: "Google Drive", AIRTABLE_CONNECTOR: "Airtable", LUMA_CONNECTOR: "Luma", ELASTICSEARCH_CONNECTOR: "Elasticsearch", diff --git a/surfsense_web/lib/posthog/events.ts b/surfsense_web/lib/posthog/events.ts index fae713f80..36a2b065a 100644 --- a/surfsense_web/lib/posthog/events.ts +++ b/surfsense_web/lib/posthog/events.ts @@ -271,6 +271,156 @@ export function trackSourcesTabViewed(searchSpaceId: number, tab: string) { }); } +// ============================================ +// SEARCH SPACE INVITE EVENTS +// ============================================ + +export function trackSearchSpaceInviteSent( + searchSpaceId: number, + options?: { + roleName?: string; + hasExpiry?: boolean; + hasMaxUses?: boolean; + } +) { + posthog.capture("search_space_invite_sent", { + search_space_id: searchSpaceId, + role_name: options?.roleName, + has_expiry: options?.hasExpiry ?? false, + has_max_uses: options?.hasMaxUses ?? false, + }); +} + +export function trackSearchSpaceInviteAccepted( + searchSpaceId: number, + searchSpaceName: string, + roleName?: string | null +) { + posthog.capture("search_space_invite_accepted", { + search_space_id: searchSpaceId, + search_space_name: searchSpaceName, + role_name: roleName, + }); +} + +export function trackSearchSpaceInviteDeclined(searchSpaceName?: string) { + posthog.capture("search_space_invite_declined", { + search_space_name: searchSpaceName, + }); +} + +export function trackSearchSpaceUserAdded( + searchSpaceId: number, + searchSpaceName: string, + roleName?: string | null +) { + posthog.capture("search_space_user_added", { + search_space_id: searchSpaceId, + search_space_name: searchSpaceName, + role_name: roleName, + }); +} + +export function trackSearchSpaceUsersViewed( + searchSpaceId: number, + userCount: number, + ownerCount: number +) { + posthog.capture("search_space_users_viewed", { + search_space_id: searchSpaceId, + user_count: userCount, + owner_count: ownerCount, + }); +} + +// ============================================ +// CONNECTOR CONNECTION EVENTS +// ============================================ + +export function trackConnectorConnected( + searchSpaceId: number, + connectorType: string, + connectorId?: number +) { + posthog.capture("connector_connected", { + search_space_id: searchSpaceId, + connector_type: connectorType, + connector_id: connectorId, + }); +} + +// ============================================ +// INDEXING EVENTS +// ============================================ + +export function trackIndexWithDateRangeOpened( + searchSpaceId: number, + connectorType: string, + connectorId: number +) { + posthog.capture("index_with_date_range_opened", { + search_space_id: searchSpaceId, + connector_type: connectorType, + connector_id: connectorId, + }); +} + +export function trackIndexWithDateRangeStarted( + searchSpaceId: number, + connectorType: string, + connectorId: number, + options?: { + hasStartDate?: boolean; + hasEndDate?: boolean; + } +) { + posthog.capture("index_with_date_range_started", { + search_space_id: searchSpaceId, + connector_type: connectorType, + connector_id: connectorId, + has_start_date: options?.hasStartDate ?? false, + has_end_date: options?.hasEndDate ?? false, + }); +} + +export function trackQuickIndexClicked( + searchSpaceId: number, + connectorType: string, + connectorId: number +) { + posthog.capture("quick_index_clicked", { + search_space_id: searchSpaceId, + connector_type: connectorType, + connector_id: connectorId, + }); +} + +export function trackConfigurePeriodicIndexingOpened( + searchSpaceId: number, + connectorType: string, + connectorId: number +) { + posthog.capture("configure_periodic_indexing_opened", { + search_space_id: searchSpaceId, + connector_type: connectorType, + connector_id: connectorId, + }); +} + +export function trackPeriodicIndexingStarted( + searchSpaceId: number, + connectorType: string, + connectorId: number, + frequencyMinutes: number +) { + posthog.capture("periodic_indexing_started", { + search_space_id: searchSpaceId, + connector_type: connectorType, + connector_id: connectorId, + frequency_minutes: frequencyMinutes, + }); +} + // ============================================ // USER IDENTIFICATION // ============================================ diff --git a/surfsense_web/lib/utils.ts b/surfsense_web/lib/utils.ts index 1e29bb9a4..212ff1259 100644 --- a/surfsense_web/lib/utils.ts +++ b/surfsense_web/lib/utils.ts @@ -1,4 +1,3 @@ -import type { Message } from "@ai-sdk/react"; import { type ClassValue, clsx } from "clsx"; import { twMerge } from "tailwind-merge"; @@ -6,12 +5,6 @@ export function cn(...inputs: ClassValue[]) { return twMerge(clsx(inputs)); } -export function getChatTitleFromMessages(messages: Message[]) { - const userMessages = messages.filter((msg) => msg.role === "user"); - if (userMessages.length === 0) return "Untitled Chat"; - return userMessages[0].content; -} - export const formatDate = (date: Date): string => { return date.toLocaleDateString("en-US", { year: "numeric", diff --git a/surfsense_web/mdx-components.tsx b/surfsense_web/mdx-components.tsx index f6d86e543..9dedbd20f 100644 --- a/surfsense_web/mdx-components.tsx +++ b/surfsense_web/mdx-components.tsx @@ -1,5 +1,6 @@ import defaultMdxComponents from "fumadocs-ui/mdx"; import type { MDXComponents } from "mdx/types"; +import Image, { type ImageProps } from "next/image"; import { Accordion, AccordionContent, @@ -7,16 +8,15 @@ import { AccordionTrigger, } from "@/components/ui/accordion"; import { cn } from "@/lib/utils"; -import Image, { type ImageProps } from "next/image"; export function getMDXComponents(components?: MDXComponents): MDXComponents { return { ...defaultMdxComponents, img: ({ className, alt, ...props }: React.ComponentProps<"img">) => ( {alt ), Video: ({ className, ...props }: React.ComponentProps<"video">) => ( diff --git a/surfsense_web/messages/en.json b/surfsense_web/messages/en.json index fd655be6c..6c64e62ba 100644 --- a/surfsense_web/messages/en.json +++ b/surfsense_web/messages/en.json @@ -265,7 +265,7 @@ "no_documents": "No documents found", "type": "Type", "content_summary": "Content Summary", - "view_full": "View Full Content", + "view_full": "View Summary", "filter_placeholder": "Filter by title...", "rows_per_page": "Rows per page", "refresh": "Refresh", diff --git a/surfsense_web/package.json b/surfsense_web/package.json index ccb34b973..3c98c47e0 100644 --- a/surfsense_web/package.json +++ b/surfsense_web/package.json @@ -1,6 +1,6 @@ { "name": "surfsense_web", - "version": "0.0.10", + "version": "0.0.11", "private": true, "description": "SurfSense Frontend", "scripts": { diff --git a/surfsense_web/public/changelog/0.0.11/header.gif b/surfsense_web/public/changelog/0.0.11/header.gif new file mode 100644 index 000000000..1c22a9242 Binary files /dev/null and b/surfsense_web/public/changelog/0.0.11/header.gif differ diff --git a/surfsense_web/public/docs/connectors/airtable/airtable-oauth-integrations.png b/surfsense_web/public/docs/connectors/airtable/airtable-oauth-integrations.png new file mode 100644 index 000000000..bfe301d78 Binary files /dev/null and b/surfsense_web/public/docs/connectors/airtable/airtable-oauth-integrations.png differ diff --git a/surfsense_web/public/docs/connectors/airtable/airtable-register-integration.png b/surfsense_web/public/docs/connectors/airtable/airtable-register-integration.png new file mode 100644 index 000000000..85062341b Binary files /dev/null and b/surfsense_web/public/docs/connectors/airtable/airtable-register-integration.png differ diff --git a/surfsense_web/public/docs/connectors/airtable/airtable-scopes.png b/surfsense_web/public/docs/connectors/airtable/airtable-scopes.png new file mode 100644 index 000000000..f5c41dd24 Binary files /dev/null and b/surfsense_web/public/docs/connectors/airtable/airtable-scopes.png differ diff --git a/surfsense_web/public/docs/connectors/airtable/airtable-support-info.png b/surfsense_web/public/docs/connectors/airtable/airtable-support-info.png new file mode 100644 index 000000000..d556a6109 Binary files /dev/null and b/surfsense_web/public/docs/connectors/airtable/airtable-support-info.png differ diff --git a/surfsense_web/public/docs/connectors/clickup/clickup-api-settings.png b/surfsense_web/public/docs/connectors/clickup/clickup-api-settings.png new file mode 100644 index 000000000..893458c61 Binary files /dev/null and b/surfsense_web/public/docs/connectors/clickup/clickup-api-settings.png differ diff --git a/surfsense_web/public/docs/connectors/clickup/clickup-app-credentials.png b/surfsense_web/public/docs/connectors/clickup/clickup-app-credentials.png new file mode 100644 index 000000000..9735c36b2 Binary files /dev/null and b/surfsense_web/public/docs/connectors/clickup/clickup-app-credentials.png differ diff --git a/surfsense_web/public/docs/connectors/microsoft-teams/azure-api-permissions.png b/surfsense_web/public/docs/connectors/microsoft-teams/azure-api-permissions.png new file mode 100644 index 000000000..f362a3344 Binary files /dev/null and b/surfsense_web/public/docs/connectors/microsoft-teams/azure-api-permissions.png differ diff --git a/surfsense_web/public/docs/connectors/microsoft-teams/azure-app-overview.png b/surfsense_web/public/docs/connectors/microsoft-teams/azure-app-overview.png new file mode 100644 index 000000000..27a4290e7 Binary files /dev/null and b/surfsense_web/public/docs/connectors/microsoft-teams/azure-app-overview.png differ diff --git a/surfsense_web/public/docs/connectors/microsoft-teams/azure-app-registrations.png b/surfsense_web/public/docs/connectors/microsoft-teams/azure-app-registrations.png new file mode 100644 index 000000000..f7865fe5e Binary files /dev/null and b/surfsense_web/public/docs/connectors/microsoft-teams/azure-app-registrations.png differ diff --git a/surfsense_web/public/docs/connectors/microsoft-teams/azure-certificates-created.png b/surfsense_web/public/docs/connectors/microsoft-teams/azure-certificates-created.png new file mode 100644 index 000000000..abfc90dde Binary files /dev/null and b/surfsense_web/public/docs/connectors/microsoft-teams/azure-certificates-created.png differ diff --git a/surfsense_web/public/docs/connectors/microsoft-teams/azure-certificates-empty.png b/surfsense_web/public/docs/connectors/microsoft-teams/azure-certificates-empty.png new file mode 100644 index 000000000..603d79155 Binary files /dev/null and b/surfsense_web/public/docs/connectors/microsoft-teams/azure-certificates-empty.png differ diff --git a/surfsense_web/public/docs/connectors/microsoft-teams/azure-register-app.png b/surfsense_web/public/docs/connectors/microsoft-teams/azure-register-app.png new file mode 100644 index 000000000..d1a5d1b6e Binary files /dev/null and b/surfsense_web/public/docs/connectors/microsoft-teams/azure-register-app.png differ diff --git a/surfsense_web/public/docs/connectors/microsoft-teams/azure-search-app-reg.png b/surfsense_web/public/docs/connectors/microsoft-teams/azure-search-app-reg.png new file mode 100644 index 000000000..974b9d013 Binary files /dev/null and b/surfsense_web/public/docs/connectors/microsoft-teams/azure-search-app-reg.png differ diff --git a/surfsense_web/public/icon-128.png b/surfsense_web/public/icon-128.png deleted file mode 100644 index 5d1464a7a..000000000 Binary files a/surfsense_web/public/icon-128.png and /dev/null differ diff --git a/surfsense_web/public/icon-128.svg b/surfsense_web/public/icon-128.svg new file mode 100644 index 000000000..1d73fc752 --- /dev/null +++ b/surfsense_web/public/icon-128.svg @@ -0,0 +1,9 @@ + + + + + + + + +