mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-05-01 11:56:25 +02:00
Merge remote-tracking branch 'upstream/dev' into sur-70-feature-streamline-onboarding-auto-create-default-workspace
This commit is contained in:
commit
a5d47cae31
133 changed files with 5763 additions and 2603 deletions
|
|
@ -26,6 +26,7 @@ _ALL_CONNECTORS: list[str] = [
|
|||
"EXTENSION",
|
||||
"FILE",
|
||||
"SLACK_CONNECTOR",
|
||||
"TEAMS_CONNECTOR",
|
||||
"NOTION_CONNECTOR",
|
||||
"YOUTUBE_VIDEO",
|
||||
"GITHUB_CONNECTOR",
|
||||
|
|
@ -573,6 +574,7 @@ def create_search_knowledge_base_tool(
|
|||
- FILE: "User-uploaded documents (PDFs, Word, etc.)" (personal files)
|
||||
- NOTE: "SurfSense Notes" (notes created inside SurfSense)
|
||||
- SLACK_CONNECTOR: "Slack conversations and shared content" (personal workspace communications)
|
||||
- TEAMS_CONNECTOR: "Microsoft Teams messages and conversations" (personal Teams communications)
|
||||
- NOTION_CONNECTOR: "Notion workspace pages and databases" (personal knowledge management)
|
||||
- YOUTUBE_VIDEO: "YouTube video transcripts and metadata" (personally saved videos)
|
||||
- GITHUB_CONNECTOR: "GitHub repository content and issues" (personal repositories and interactions)
|
||||
|
|
|
|||
|
|
@ -117,6 +117,16 @@ class Config:
|
|||
DISCORD_REDIRECT_URI = os.getenv("DISCORD_REDIRECT_URI")
|
||||
DISCORD_BOT_TOKEN = os.getenv("DISCORD_BOT_TOKEN")
|
||||
|
||||
# Microsoft Teams OAuth
|
||||
TEAMS_CLIENT_ID = os.getenv("TEAMS_CLIENT_ID")
|
||||
TEAMS_CLIENT_SECRET = os.getenv("TEAMS_CLIENT_SECRET")
|
||||
TEAMS_REDIRECT_URI = os.getenv("TEAMS_REDIRECT_URI")
|
||||
|
||||
# ClickUp OAuth
|
||||
CLICKUP_CLIENT_ID = os.getenv("CLICKUP_CLIENT_ID")
|
||||
CLICKUP_CLIENT_SECRET = os.getenv("CLICKUP_CLIENT_SECRET")
|
||||
CLICKUP_REDIRECT_URI = os.getenv("CLICKUP_REDIRECT_URI")
|
||||
|
||||
# LLM instances are now managed per-user through the LLMConfig system
|
||||
# Legacy environment variables removed in favor of user-specific configurations
|
||||
|
||||
|
|
|
|||
|
|
@ -294,6 +294,12 @@ class AirtableConnector:
|
|||
Tuple of (records, error_message)
|
||||
"""
|
||||
try:
|
||||
# Validate date strings before parsing
|
||||
if not start_date or start_date.lower() in ("undefined", "null", "none"):
|
||||
return [], "Invalid start_date: date string is required"
|
||||
if not end_date or end_date.lower() in ("undefined", "null", "none"):
|
||||
return [], "Invalid end_date: date string is required"
|
||||
|
||||
# Parse and validate dates
|
||||
start_dt = isoparse(start_date)
|
||||
end_dt = isoparse(end_date)
|
||||
|
|
@ -382,3 +388,43 @@ class AirtableConnector:
|
|||
markdown_parts.append("")
|
||||
|
||||
return "\n".join(markdown_parts)
|
||||
|
||||
|
||||
# --- OAuth User Info ---
|
||||
|
||||
AIRTABLE_WHOAMI_URL = "https://api.airtable.com/v0/meta/whoami"
|
||||
|
||||
|
||||
async def fetch_airtable_user_email(access_token: str) -> str | None:
|
||||
"""
|
||||
Fetch user email from Airtable whoami API.
|
||||
|
||||
Args:
|
||||
access_token: The Airtable OAuth access token
|
||||
|
||||
Returns:
|
||||
User's email address or None if fetch fails
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
AIRTABLE_WHOAMI_URL,
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
timeout=10.0,
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
email = data.get("email")
|
||||
if email:
|
||||
logger.debug(f"Fetched Airtable user email: {email}")
|
||||
return email
|
||||
|
||||
logger.warning(
|
||||
f"Failed to fetch Airtable user info: {response.status_code}"
|
||||
)
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error fetching Airtable user email: {e!s}")
|
||||
return None
|
||||
|
|
|
|||
175
surfsense_backend/app/connectors/airtable_history.py
Normal file
175
surfsense_backend/app/connectors/airtable_history.py
Normal file
|
|
@ -0,0 +1,175 @@
|
|||
"""
|
||||
Airtable OAuth Connector.
|
||||
|
||||
Handles OAuth-based authentication and token refresh for Airtable API access.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.connectors.airtable_connector import AirtableConnector
|
||||
from app.db import SearchSourceConnector
|
||||
from app.routes.airtable_add_connector_route import refresh_airtable_token
|
||||
from app.schemas.airtable_auth_credentials import AirtableAuthCredentialsBase
|
||||
from app.utils.oauth_security import TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AirtableHistoryConnector:
|
||||
"""
|
||||
Airtable connector with OAuth support and automatic token refresh.
|
||||
|
||||
This connector uses OAuth 2.0 access tokens to authenticate with the
|
||||
Airtable API. It automatically refreshes expired tokens when needed.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
connector_id: int,
|
||||
credentials: AirtableAuthCredentialsBase | None = None,
|
||||
):
|
||||
"""
|
||||
Initialize the AirtableHistoryConnector with auto-refresh capability.
|
||||
|
||||
Args:
|
||||
session: Database session for updating connector
|
||||
connector_id: Connector ID for direct updates
|
||||
credentials: Airtable OAuth credentials (optional, will be loaded from DB if not provided)
|
||||
"""
|
||||
self._session = session
|
||||
self._connector_id = connector_id
|
||||
self._credentials = credentials
|
||||
self._airtable_connector: AirtableConnector | None = None
|
||||
|
||||
async def _get_valid_token(self) -> str:
|
||||
"""
|
||||
Get valid Airtable access token, refreshing if needed.
|
||||
|
||||
Returns:
|
||||
Valid access token
|
||||
|
||||
Raises:
|
||||
ValueError: If credentials are missing or invalid
|
||||
Exception: If token refresh fails
|
||||
"""
|
||||
# Load credentials from DB if not provided
|
||||
if self._credentials is None:
|
||||
result = await self._session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.id == self._connector_id
|
||||
)
|
||||
)
|
||||
connector = result.scalars().first()
|
||||
|
||||
if not connector:
|
||||
raise ValueError(f"Connector {self._connector_id} not found")
|
||||
|
||||
config_data = connector.config.copy()
|
||||
|
||||
# Decrypt credentials if they are encrypted
|
||||
token_encrypted = config_data.get("_token_encrypted", False)
|
||||
if token_encrypted and config.SECRET_KEY:
|
||||
try:
|
||||
token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
|
||||
# Decrypt sensitive fields
|
||||
if config_data.get("access_token"):
|
||||
config_data["access_token"] = token_encryption.decrypt_token(
|
||||
config_data["access_token"]
|
||||
)
|
||||
if config_data.get("refresh_token"):
|
||||
config_data["refresh_token"] = token_encryption.decrypt_token(
|
||||
config_data["refresh_token"]
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Decrypted Airtable credentials for connector {self._connector_id}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to decrypt Airtable credentials for connector {self._connector_id}: {e!s}"
|
||||
)
|
||||
raise ValueError(
|
||||
f"Failed to decrypt Airtable credentials: {e!s}"
|
||||
) from e
|
||||
|
||||
try:
|
||||
self._credentials = AirtableAuthCredentialsBase.from_dict(config_data)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid Airtable credentials: {e!s}") from e
|
||||
|
||||
# Check if token is expired and refreshable
|
||||
if self._credentials.is_expired and self._credentials.is_refreshable:
|
||||
try:
|
||||
logger.info(
|
||||
f"Airtable token expired for connector {self._connector_id}, refreshing..."
|
||||
)
|
||||
|
||||
# Get connector for refresh
|
||||
result = await self._session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.id == self._connector_id
|
||||
)
|
||||
)
|
||||
connector = result.scalars().first()
|
||||
|
||||
if not connector:
|
||||
raise RuntimeError(
|
||||
f"Connector {self._connector_id} not found; cannot refresh token."
|
||||
)
|
||||
|
||||
# Refresh token
|
||||
connector = await refresh_airtable_token(self._session, connector)
|
||||
|
||||
# Reload credentials after refresh
|
||||
config_data = connector.config.copy()
|
||||
token_encrypted = config_data.get("_token_encrypted", False)
|
||||
if token_encrypted and config.SECRET_KEY:
|
||||
token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
if config_data.get("access_token"):
|
||||
config_data["access_token"] = token_encryption.decrypt_token(
|
||||
config_data["access_token"]
|
||||
)
|
||||
if config_data.get("refresh_token"):
|
||||
config_data["refresh_token"] = token_encryption.decrypt_token(
|
||||
config_data["refresh_token"]
|
||||
)
|
||||
|
||||
self._credentials = AirtableAuthCredentialsBase.from_dict(config_data)
|
||||
|
||||
# Invalidate cached connector so it's recreated with new token
|
||||
self._airtable_connector = None
|
||||
|
||||
logger.info(
|
||||
f"Successfully refreshed Airtable token for connector {self._connector_id}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to refresh Airtable token for connector {self._connector_id}: {e!s}"
|
||||
)
|
||||
raise Exception(
|
||||
f"Failed to refresh Airtable OAuth credentials: {e!s}"
|
||||
) from e
|
||||
|
||||
return self._credentials.access_token
|
||||
|
||||
async def _get_connector(self) -> AirtableConnector:
|
||||
"""
|
||||
Get or create AirtableConnector with valid token.
|
||||
|
||||
Returns:
|
||||
AirtableConnector instance
|
||||
"""
|
||||
if self._airtable_connector is None:
|
||||
# Ensure we have valid credentials (this will refresh if needed)
|
||||
await self._get_valid_token()
|
||||
# Use the credentials object which is now guaranteed to be valid
|
||||
if not self._credentials:
|
||||
raise ValueError("Credentials not loaded")
|
||||
self._airtable_connector = AirtableConnector(self._credentials)
|
||||
return self._airtable_connector
|
||||
349
surfsense_backend/app/connectors/clickup_history.py
Normal file
349
surfsense_backend/app/connectors/clickup_history.py
Normal file
|
|
@ -0,0 +1,349 @@
|
|||
"""
|
||||
ClickUp History Module
|
||||
|
||||
A module for retrieving data from ClickUp with OAuth support and backward compatibility.
|
||||
Allows fetching tasks from workspaces and lists with automatic token refresh.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.connectors.clickup_connector import ClickUpConnector
|
||||
from app.db import SearchSourceConnector
|
||||
from app.routes.clickup_add_connector_route import refresh_clickup_token
|
||||
from app.schemas.clickup_auth_credentials import ClickUpAuthCredentialsBase
|
||||
from app.utils.oauth_security import TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ClickUpHistoryConnector:
|
||||
"""
|
||||
Class for retrieving data from ClickUp with OAuth support and backward compatibility.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
connector_id: int,
|
||||
credentials: ClickUpAuthCredentialsBase | None = None,
|
||||
api_token: str | None = None, # For backward compatibility
|
||||
):
|
||||
"""
|
||||
Initialize the ClickUpHistoryConnector.
|
||||
|
||||
Args:
|
||||
session: Database session for token refresh
|
||||
connector_id: Connector ID for direct updates
|
||||
credentials: ClickUp OAuth credentials (optional, will be loaded from DB if not provided)
|
||||
api_token: Legacy API token for backward compatibility (optional)
|
||||
"""
|
||||
self._session = session
|
||||
self._connector_id = connector_id
|
||||
self._credentials = credentials
|
||||
self._api_token = api_token # Legacy API token
|
||||
self._use_oauth = False
|
||||
self._use_legacy = api_token is not None
|
||||
self._clickup_client: ClickUpConnector | None = None
|
||||
|
||||
async def _get_valid_token(self) -> str:
|
||||
"""
|
||||
Get valid ClickUp access token, refreshing if needed.
|
||||
For legacy API tokens, returns the token directly.
|
||||
|
||||
Returns:
|
||||
Valid access token or API token
|
||||
|
||||
Raises:
|
||||
ValueError: If credentials are missing or invalid
|
||||
Exception: If token refresh fails
|
||||
"""
|
||||
# If using legacy API token, return it directly
|
||||
if self._use_legacy and self._api_token:
|
||||
return self._api_token
|
||||
|
||||
# Load credentials from DB if not provided
|
||||
if self._credentials is None:
|
||||
result = await self._session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.id == self._connector_id
|
||||
)
|
||||
)
|
||||
connector = result.scalars().first()
|
||||
|
||||
if not connector:
|
||||
raise ValueError(f"Connector {self._connector_id} not found")
|
||||
|
||||
config_data = connector.config.copy()
|
||||
|
||||
# Check if using OAuth or legacy API token
|
||||
is_oauth = config_data.get("_token_encrypted", False) or config_data.get(
|
||||
"access_token"
|
||||
)
|
||||
has_legacy_token = config_data.get("CLICKUP_API_TOKEN") is not None
|
||||
|
||||
if is_oauth:
|
||||
# OAuth 2.0 authentication
|
||||
self._use_oauth = True
|
||||
# Decrypt credentials if they are encrypted
|
||||
token_encrypted = config_data.get("_token_encrypted", False)
|
||||
if token_encrypted and config.SECRET_KEY:
|
||||
try:
|
||||
token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
|
||||
# Decrypt sensitive fields
|
||||
if config_data.get("access_token"):
|
||||
config_data["access_token"] = (
|
||||
token_encryption.decrypt_token(
|
||||
config_data["access_token"]
|
||||
)
|
||||
)
|
||||
if config_data.get("refresh_token"):
|
||||
config_data["refresh_token"] = (
|
||||
token_encryption.decrypt_token(
|
||||
config_data["refresh_token"]
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Decrypted ClickUp OAuth credentials for connector {self._connector_id}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to decrypt ClickUp OAuth credentials for connector {self._connector_id}: {e!s}"
|
||||
)
|
||||
raise ValueError(
|
||||
f"Failed to decrypt ClickUp OAuth credentials: {e!s}"
|
||||
) from e
|
||||
|
||||
try:
|
||||
self._credentials = ClickUpAuthCredentialsBase.from_dict(
|
||||
config_data
|
||||
)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid ClickUp OAuth credentials: {e!s}") from e
|
||||
elif has_legacy_token:
|
||||
# Legacy API token authentication (backward compatibility)
|
||||
self._use_legacy = True
|
||||
self._api_token = config_data.get("CLICKUP_API_TOKEN")
|
||||
|
||||
# Decrypt token if it's encrypted (legacy tokens might be encrypted)
|
||||
token_encrypted = config_data.get("_token_encrypted", False)
|
||||
if token_encrypted and config.SECRET_KEY and self._api_token:
|
||||
try:
|
||||
token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
self._api_token = token_encryption.decrypt_token(
|
||||
self._api_token
|
||||
)
|
||||
logger.info(
|
||||
f"Decrypted legacy ClickUp API token for connector {self._connector_id}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Failed to decrypt legacy ClickUp API token for connector {self._connector_id}: {e!s}. "
|
||||
"Trying to use token as-is (might be unencrypted)."
|
||||
)
|
||||
# Continue with token as-is - might be unencrypted legacy token
|
||||
|
||||
if not self._api_token:
|
||||
raise ValueError("ClickUp API token not found in connector config")
|
||||
|
||||
# Return legacy token directly (no refresh needed)
|
||||
return self._api_token
|
||||
else:
|
||||
raise ValueError(
|
||||
"ClickUp credentials not found in connector config (neither OAuth nor API token)"
|
||||
)
|
||||
|
||||
# Check if token is expired and refreshable (only for OAuth)
|
||||
if (
|
||||
self._use_oauth
|
||||
and self._credentials.is_expired
|
||||
and self._credentials.is_refreshable
|
||||
):
|
||||
try:
|
||||
logger.info(
|
||||
f"ClickUp token expired for connector {self._connector_id}, refreshing..."
|
||||
)
|
||||
|
||||
# Get connector for refresh
|
||||
result = await self._session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.id == self._connector_id
|
||||
)
|
||||
)
|
||||
connector = result.scalars().first()
|
||||
|
||||
if not connector:
|
||||
raise RuntimeError(
|
||||
f"Connector {self._connector_id} not found; cannot refresh token."
|
||||
)
|
||||
|
||||
# Refresh token
|
||||
connector = await refresh_clickup_token(self._session, connector)
|
||||
|
||||
# Reload credentials after refresh
|
||||
config_data = connector.config.copy()
|
||||
token_encrypted = config_data.get("_token_encrypted", False)
|
||||
if token_encrypted and config.SECRET_KEY:
|
||||
token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
if config_data.get("access_token"):
|
||||
config_data["access_token"] = token_encryption.decrypt_token(
|
||||
config_data["access_token"]
|
||||
)
|
||||
if config_data.get("refresh_token"):
|
||||
config_data["refresh_token"] = token_encryption.decrypt_token(
|
||||
config_data["refresh_token"]
|
||||
)
|
||||
|
||||
self._credentials = ClickUpAuthCredentialsBase.from_dict(config_data)
|
||||
|
||||
# Invalidate cached client so it's recreated with new token
|
||||
self._clickup_client = None
|
||||
|
||||
logger.info(
|
||||
f"Successfully refreshed ClickUp token for connector {self._connector_id}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to refresh ClickUp token for connector {self._connector_id}: {e!s}"
|
||||
)
|
||||
raise Exception(
|
||||
f"Failed to refresh ClickUp OAuth credentials: {e!s}"
|
||||
) from e
|
||||
|
||||
if self._use_oauth:
|
||||
return self._credentials.access_token
|
||||
else:
|
||||
return self._api_token
|
||||
|
||||
async def _get_client(self) -> ClickUpConnector:
|
||||
"""
|
||||
Get or create ClickUpConnector with valid token.
|
||||
|
||||
Returns:
|
||||
ClickUpConnector instance
|
||||
"""
|
||||
if self._clickup_client is None:
|
||||
token = await self._get_valid_token()
|
||||
# ClickUp API uses Bearer token for OAuth, or direct token for legacy
|
||||
if self._use_oauth:
|
||||
# For OAuth, use Bearer token format (ClickUp OAuth expects "Bearer {token}")
|
||||
self._clickup_client = ClickUpConnector(api_token=f"Bearer {token}")
|
||||
else:
|
||||
# For legacy API token, use token directly (format: "pk_...")
|
||||
self._clickup_client = ClickUpConnector(api_token=token)
|
||||
return self._clickup_client
|
||||
|
||||
async def close(self):
|
||||
"""Close any open connections."""
|
||||
self._clickup_client = None
|
||||
|
||||
async def __aenter__(self):
|
||||
"""Async context manager entry."""
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Async context manager exit."""
|
||||
await self.close()
|
||||
|
||||
async def get_authorized_workspaces(self) -> dict[str, Any]:
|
||||
"""
|
||||
Fetch authorized workspaces (teams) from ClickUp.
|
||||
|
||||
Returns:
|
||||
Dictionary containing teams data
|
||||
|
||||
Raises:
|
||||
ValueError: If credentials have not been set
|
||||
Exception: If the API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
return client.get_authorized_workspaces()
|
||||
|
||||
async def get_workspace_tasks(
|
||||
self, workspace_id: str, include_closed: bool = False
|
||||
) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Fetch all tasks from a ClickUp workspace.
|
||||
|
||||
Args:
|
||||
workspace_id: ClickUp workspace (team) ID
|
||||
include_closed: Whether to include closed tasks (default: False)
|
||||
|
||||
Returns:
|
||||
List of task objects
|
||||
|
||||
Raises:
|
||||
ValueError: If credentials have not been set
|
||||
Exception: If the API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
return client.get_workspace_tasks(
|
||||
workspace_id=workspace_id, include_closed=include_closed
|
||||
)
|
||||
|
||||
async def get_tasks_in_date_range(
|
||||
self,
|
||||
workspace_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
include_closed: bool = False,
|
||||
) -> tuple[list[dict[str, Any]], str | None]:
|
||||
"""
|
||||
Fetch tasks from ClickUp within a specific date range.
|
||||
|
||||
Args:
|
||||
workspace_id: ClickUp workspace (team) ID
|
||||
start_date: Start date in YYYY-MM-DD format
|
||||
end_date: End date in YYYY-MM-DD format
|
||||
include_closed: Whether to include closed tasks (default: False)
|
||||
|
||||
Returns:
|
||||
Tuple containing (tasks list, error message or None)
|
||||
"""
|
||||
client = await self._get_client()
|
||||
return client.get_tasks_in_date_range(
|
||||
workspace_id=workspace_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
include_closed=include_closed,
|
||||
)
|
||||
|
||||
async def get_task_details(self, task_id: str) -> dict[str, Any]:
|
||||
"""
|
||||
Fetch detailed information about a specific task.
|
||||
|
||||
Args:
|
||||
task_id: ClickUp task ID
|
||||
|
||||
Returns:
|
||||
Task details
|
||||
|
||||
Raises:
|
||||
ValueError: If credentials have not been set
|
||||
Exception: If the API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
return client.get_task_details(task_id)
|
||||
|
||||
async def get_task_comments(self, task_id: str) -> dict[str, Any]:
|
||||
"""
|
||||
Fetch comments for a specific task.
|
||||
|
||||
Args:
|
||||
task_id: ClickUp task ID
|
||||
|
||||
Returns:
|
||||
Task comments
|
||||
|
||||
Raises:
|
||||
ValueError: If credentials have not been set
|
||||
Exception: If the API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
return client.get_task_comments(task_id)
|
||||
|
|
@ -6,6 +6,7 @@ Allows fetching emails from Gmail mailbox using Google OAuth credentials.
|
|||
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
|
|
@ -21,6 +22,34 @@ from app.db import (
|
|||
SearchSourceConnectorType,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def fetch_google_user_email(credentials: Credentials) -> str | None:
|
||||
"""
|
||||
Fetch user email from Gmail API using Google credentials.
|
||||
|
||||
Uses the Gmail users.getProfile endpoint which returns the authenticated
|
||||
user's email address.
|
||||
|
||||
Args:
|
||||
credentials: Google OAuth Credentials object (not encrypted)
|
||||
|
||||
Returns:
|
||||
User's email address or None if fetch fails
|
||||
"""
|
||||
try:
|
||||
service = build("gmail", "v1", credentials=credentials)
|
||||
profile = service.users().getProfile(userId="me").execute()
|
||||
email = profile.get("emailAddress")
|
||||
if email:
|
||||
logger.debug(f"Fetched Google user email: {email}")
|
||||
return email
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.warning(f"Error fetching Google user email: {e!s}")
|
||||
return None
|
||||
|
||||
|
||||
class GoogleGmailConnector:
|
||||
"""Class for retrieving emails from Gmail using Google OAuth credentials."""
|
||||
|
|
|
|||
|
|
@ -9,18 +9,65 @@ import logging
|
|||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
import requests
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.db import SearchSourceConnector
|
||||
from app.routes.linear_add_connector_route import refresh_linear_token
|
||||
from app.schemas.linear_auth_credentials import LinearAuthCredentialsBase
|
||||
from app.utils.oauth_security import TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
LINEAR_GRAPHQL_URL = "https://api.linear.app/graphql"
|
||||
|
||||
ORGANIZATION_QUERY = """
|
||||
query {
|
||||
organization {
|
||||
name
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
async def fetch_linear_organization_name(access_token: str) -> str | None:
|
||||
"""
|
||||
Fetch organization/workspace name from Linear GraphQL API.
|
||||
|
||||
Args:
|
||||
access_token: The Linear OAuth access token
|
||||
|
||||
Returns:
|
||||
Organization name or None if fetch fails
|
||||
"""
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(
|
||||
LINEAR_GRAPHQL_URL,
|
||||
headers={
|
||||
"Authorization": access_token,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
json={"query": ORGANIZATION_QUERY},
|
||||
timeout=10.0,
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
org_name = data.get("data", {}).get("organization", {}).get("name")
|
||||
if org_name:
|
||||
logger.debug(f"Fetched Linear organization name: {org_name}")
|
||||
return org_name
|
||||
|
||||
logger.warning(f"Failed to fetch Linear org info: {response.status_code}")
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error fetching Linear organization name: {e!s}")
|
||||
return None
|
||||
|
||||
|
||||
class LinearConnector:
|
||||
"""Class for retrieving issues and comments from Linear."""
|
||||
|
|
@ -121,6 +168,9 @@ class LinearConnector:
|
|||
f"Connector {self._connector_id} not found; cannot refresh token."
|
||||
)
|
||||
|
||||
# Lazy import to avoid circular dependency
|
||||
from app.routes.linear_add_connector_route import refresh_linear_token
|
||||
|
||||
# Refresh token
|
||||
connector = await refresh_linear_token(self._session, connector)
|
||||
|
||||
|
|
|
|||
|
|
@ -377,7 +377,7 @@ class SlackHistory:
|
|||
else:
|
||||
raise # Re-raise to outer handler for not_in_channel or other SlackApiErrors
|
||||
|
||||
if not current_api_call_successful:
|
||||
if not current_api_call_successful or result is None:
|
||||
continue # Retry the current page fetch due to handled rate limit
|
||||
|
||||
# Process result if successful
|
||||
|
|
|
|||
338
surfsense_backend/app/connectors/teams_connector.py
Normal file
338
surfsense_backend/app/connectors/teams_connector.py
Normal file
|
|
@ -0,0 +1,338 @@
|
|||
"""
|
||||
Microsoft Teams Connector
|
||||
|
||||
A module for interacting with Microsoft Teams Graph API to retrieve teams, channels, and message history.
|
||||
|
||||
Supports OAuth-based authentication with token refresh.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.db import SearchSourceConnector
|
||||
from app.routes.teams_add_connector_route import refresh_teams_token
|
||||
from app.schemas.teams_auth_credentials import TeamsAuthCredentialsBase
|
||||
from app.utils.oauth_security import TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TeamsConnector:
|
||||
"""Class for retrieving teams, channels, and message history from Microsoft Teams."""
|
||||
|
||||
# Microsoft Graph API endpoints
|
||||
GRAPH_API_BASE = "https://graph.microsoft.com/v1.0"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
access_token: str | None = None,
|
||||
session: AsyncSession | None = None,
|
||||
connector_id: int | None = None,
|
||||
credentials: TeamsAuthCredentialsBase | None = None,
|
||||
):
|
||||
"""
|
||||
Initialize the TeamsConnector with an access token or OAuth credentials.
|
||||
|
||||
Args:
|
||||
access_token: Microsoft Graph API access token (optional, for backward compatibility)
|
||||
session: Database session for token refresh (optional)
|
||||
connector_id: Connector ID for token refresh (optional)
|
||||
credentials: Teams OAuth credentials (optional, will be loaded from DB if not provided)
|
||||
"""
|
||||
self._session = session
|
||||
self._connector_id = connector_id
|
||||
self._credentials = credentials
|
||||
self._access_token = access_token
|
||||
|
||||
async def _get_valid_token(self) -> str:
|
||||
"""
|
||||
Get valid Microsoft Teams access token, refreshing if needed.
|
||||
|
||||
Returns:
|
||||
Valid access token
|
||||
|
||||
Raises:
|
||||
ValueError: If credentials are missing or invalid
|
||||
Exception: If token refresh fails
|
||||
"""
|
||||
# If we have a direct token (backward compatibility), use it
|
||||
if (
|
||||
self._access_token
|
||||
and self._session is None
|
||||
and self._connector_id is None
|
||||
and self._credentials is None
|
||||
):
|
||||
return self._access_token
|
||||
|
||||
# Load credentials from DB if not provided
|
||||
if self._credentials is None:
|
||||
if not self._session or not self._connector_id:
|
||||
raise ValueError(
|
||||
"Cannot load credentials: session and connector_id required"
|
||||
)
|
||||
|
||||
result = await self._session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.id == self._connector_id
|
||||
)
|
||||
)
|
||||
connector = result.scalars().first()
|
||||
|
||||
if not connector:
|
||||
raise ValueError(f"Connector {self._connector_id} not found")
|
||||
|
||||
config_data = connector.config.copy()
|
||||
|
||||
# Decrypt credentials if they are encrypted
|
||||
token_encrypted = config_data.get("_token_encrypted", False)
|
||||
if token_encrypted and config.SECRET_KEY:
|
||||
try:
|
||||
token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
|
||||
# Decrypt sensitive fields
|
||||
if config_data.get("access_token"):
|
||||
config_data["access_token"] = token_encryption.decrypt_token(
|
||||
config_data["access_token"]
|
||||
)
|
||||
if config_data.get("refresh_token"):
|
||||
config_data["refresh_token"] = token_encryption.decrypt_token(
|
||||
config_data["refresh_token"]
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Decrypted Teams credentials for connector %s",
|
||||
self._connector_id,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to decrypt Teams credentials for connector %s: %s",
|
||||
self._connector_id,
|
||||
str(e),
|
||||
)
|
||||
raise ValueError(
|
||||
f"Failed to decrypt Teams credentials: {e!s}"
|
||||
) from e
|
||||
|
||||
try:
|
||||
self._credentials = TeamsAuthCredentialsBase.from_dict(config_data)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid Teams credentials: {e!s}") from e
|
||||
|
||||
# Check if token is expired and refreshable
|
||||
if self._credentials.is_expired and self._credentials.is_refreshable:
|
||||
try:
|
||||
logger.info(
|
||||
"Teams token expired for connector %s, refreshing...",
|
||||
self._connector_id,
|
||||
)
|
||||
|
||||
# Get connector for refresh
|
||||
result = await self._session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.id == self._connector_id
|
||||
)
|
||||
)
|
||||
connector = result.scalars().first()
|
||||
|
||||
if not connector:
|
||||
raise RuntimeError(
|
||||
f"Connector {self._connector_id} not found; cannot refresh token."
|
||||
)
|
||||
|
||||
# Refresh token
|
||||
connector = await refresh_teams_token(self._session, connector)
|
||||
|
||||
# Reload credentials after refresh
|
||||
config_data = connector.config.copy()
|
||||
token_encrypted = config_data.get("_token_encrypted", False)
|
||||
if token_encrypted and config.SECRET_KEY:
|
||||
token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
if config_data.get("access_token"):
|
||||
config_data["access_token"] = token_encryption.decrypt_token(
|
||||
config_data["access_token"]
|
||||
)
|
||||
if config_data.get("refresh_token"):
|
||||
config_data["refresh_token"] = token_encryption.decrypt_token(
|
||||
config_data["refresh_token"]
|
||||
)
|
||||
|
||||
self._credentials = TeamsAuthCredentialsBase.from_dict(config_data)
|
||||
|
||||
logger.info(
|
||||
"Successfully refreshed Teams token for connector %s",
|
||||
self._connector_id,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to refresh Teams token for connector %s: %s",
|
||||
self._connector_id,
|
||||
str(e),
|
||||
)
|
||||
raise ValueError(
|
||||
f"Failed to refresh Teams OAuth credentials: {e!s}"
|
||||
) from e
|
||||
|
||||
return self._credentials.access_token
|
||||
|
||||
async def get_joined_teams(self) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Get list of all teams the user is a member of.
|
||||
|
||||
Returns:
|
||||
List of team objects with id, display_name, etc.
|
||||
"""
|
||||
access_token = await self._get_valid_token()
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{self.GRAPH_API_BASE}/me/joinedTeams",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
timeout=30.0,
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise ValueError(
|
||||
f"Failed to get joined teams: {response.status_code} - {response.text}"
|
||||
)
|
||||
|
||||
data = response.json()
|
||||
return data.get("value", [])
|
||||
|
||||
async def get_team_channels(self, team_id: str) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Get list of all channels in a team.
|
||||
|
||||
Args:
|
||||
team_id: The team ID
|
||||
|
||||
Returns:
|
||||
List of channel objects
|
||||
"""
|
||||
access_token = await self._get_valid_token()
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.get(
|
||||
f"{self.GRAPH_API_BASE}/teams/{team_id}/channels",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
timeout=30.0,
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise ValueError(
|
||||
f"Failed to get channels for team {team_id}: {response.status_code} - {response.text}"
|
||||
)
|
||||
|
||||
data = response.json()
|
||||
return data.get("value", [])
|
||||
|
||||
async def get_channel_messages(
|
||||
self,
|
||||
team_id: str,
|
||||
channel_id: str,
|
||||
start_date: datetime | None = None,
|
||||
end_date: datetime | None = None,
|
||||
) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Get messages from a specific channel with optional date filtering.
|
||||
|
||||
Args:
|
||||
team_id: The team ID
|
||||
channel_id: The channel ID
|
||||
start_date: Optional start date for filtering messages
|
||||
end_date: Optional end date for filtering messages
|
||||
|
||||
Returns:
|
||||
List of message objects
|
||||
"""
|
||||
access_token = await self._get_valid_token()
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
url = f"{self.GRAPH_API_BASE}/teams/{team_id}/channels/{channel_id}/messages"
|
||||
|
||||
# Note: The Graph API for channel messages doesn't support $filter parameter
|
||||
# We fetch all messages and filter them client-side
|
||||
response = await client.get(
|
||||
url,
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
timeout=30.0,
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise ValueError(
|
||||
f"Failed to get messages from channel {channel_id}: {response.status_code} - {response.text}"
|
||||
)
|
||||
|
||||
data = response.json()
|
||||
messages = data.get("value", [])
|
||||
|
||||
# Filter messages by date if needed (client-side filtering)
|
||||
if start_date or end_date:
|
||||
# Make sure comparison dates are timezone-aware (UTC)
|
||||
if start_date and start_date.tzinfo is None:
|
||||
start_date = start_date.replace(tzinfo=timezone.utc)
|
||||
if end_date and end_date.tzinfo is None:
|
||||
end_date = end_date.replace(tzinfo=timezone.utc)
|
||||
|
||||
filtered_messages = []
|
||||
for message in messages:
|
||||
created_at_str = message.get("createdDateTime")
|
||||
if not created_at_str:
|
||||
continue
|
||||
|
||||
# Parse the ISO 8601 datetime string (already timezone-aware)
|
||||
created_at = datetime.fromisoformat(created_at_str.replace('Z', '+00:00'))
|
||||
|
||||
# Check if message is within date range
|
||||
if start_date and created_at < start_date:
|
||||
continue
|
||||
if end_date and created_at > end_date:
|
||||
continue
|
||||
|
||||
filtered_messages.append(message)
|
||||
|
||||
return filtered_messages
|
||||
|
||||
return messages
|
||||
|
||||
async def get_message_replies(
|
||||
self, team_id: str, channel_id: str, message_id: str
|
||||
) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Get replies to a specific message.
|
||||
|
||||
Args:
|
||||
team_id: The team ID
|
||||
channel_id: The channel ID
|
||||
message_id: The message ID
|
||||
|
||||
Returns:
|
||||
List of reply message objects
|
||||
"""
|
||||
access_token = await self._get_valid_token()
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
url = f"{self.GRAPH_API_BASE}/teams/{team_id}/channels/{channel_id}/messages/{message_id}/replies"
|
||||
|
||||
response = await client.get(
|
||||
url,
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
timeout=30.0,
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.warning(
|
||||
"Failed to get replies for message %s: %s - %s",
|
||||
message_id,
|
||||
response.status_code,
|
||||
response.text,
|
||||
)
|
||||
return []
|
||||
|
||||
data = response.json()
|
||||
return data.get("value", [])
|
||||
254
surfsense_backend/app/connectors/teams_history.py
Normal file
254
surfsense_backend/app/connectors/teams_history.py
Normal file
|
|
@ -0,0 +1,254 @@
|
|||
"""
|
||||
Microsoft Teams History Module
|
||||
|
||||
A module for retrieving conversation history from Microsoft Teams channels.
|
||||
Allows fetching team lists, channel lists, and message history with date range filtering.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.connectors.teams_connector import TeamsConnector
|
||||
from app.schemas.teams_auth_credentials import TeamsAuthCredentialsBase
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TeamsHistory:
|
||||
"""Class for retrieving conversation history from Microsoft Teams channels."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
access_token: str | None = None,
|
||||
session: AsyncSession | None = None,
|
||||
connector_id: int | None = None,
|
||||
credentials: TeamsAuthCredentialsBase | None = None,
|
||||
):
|
||||
"""
|
||||
Initialize the TeamsHistory class.
|
||||
|
||||
Args:
|
||||
access_token: Microsoft Graph API access token (optional, for backward compatibility)
|
||||
session: Database session for token refresh (optional)
|
||||
connector_id: Connector ID for token refresh (optional)
|
||||
credentials: Teams OAuth credentials (optional, will be loaded from DB if not provided)
|
||||
"""
|
||||
self.connector = TeamsConnector(
|
||||
access_token=access_token,
|
||||
session=session,
|
||||
connector_id=connector_id,
|
||||
credentials=credentials,
|
||||
)
|
||||
|
||||
async def get_all_teams(self) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Get list of all teams the user has access to.
|
||||
|
||||
Returns:
|
||||
List of team objects containing team metadata.
|
||||
"""
|
||||
try:
|
||||
teams = await self.connector.get_joined_teams()
|
||||
logger.info("Retrieved %s teams", len(teams))
|
||||
return teams
|
||||
except Exception as e:
|
||||
logger.error("Error fetching teams: %s", str(e))
|
||||
raise
|
||||
|
||||
async def get_channels_for_team(self, team_id: str) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Get list of all channels in a specific team.
|
||||
|
||||
Args:
|
||||
team_id: The ID of the team
|
||||
|
||||
Returns:
|
||||
List of channel objects containing channel metadata.
|
||||
"""
|
||||
try:
|
||||
channels = await self.connector.get_team_channels(team_id)
|
||||
logger.info("Retrieved %s channels for team %s", len(channels), team_id)
|
||||
return channels
|
||||
except Exception as e:
|
||||
logger.error("Error fetching channels for team %s: %s", team_id, str(e))
|
||||
raise
|
||||
|
||||
async def get_messages_from_channel(
|
||||
self,
|
||||
team_id: str,
|
||||
channel_id: str,
|
||||
start_date: datetime | None = None,
|
||||
end_date: datetime | None = None,
|
||||
include_replies: bool = True,
|
||||
) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Get messages from a specific channel with optional date filtering.
|
||||
|
||||
Args:
|
||||
team_id: The ID of the team
|
||||
channel_id: The ID of the channel
|
||||
start_date: Optional start date for filtering messages
|
||||
end_date: Optional end date for filtering messages
|
||||
include_replies: Whether to include reply messages (default: True)
|
||||
|
||||
Returns:
|
||||
List of message objects with content and metadata.
|
||||
"""
|
||||
try:
|
||||
messages = await self.connector.get_channel_messages(
|
||||
team_id, channel_id, start_date, end_date
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Retrieved %s messages from channel %s in team %s",
|
||||
len(messages),
|
||||
channel_id,
|
||||
team_id,
|
||||
)
|
||||
|
||||
# Fetch replies if requested
|
||||
if include_replies:
|
||||
all_messages = []
|
||||
for message in messages:
|
||||
all_messages.append(message)
|
||||
# Get replies for this message
|
||||
try:
|
||||
replies = await self.connector.get_message_replies(
|
||||
team_id, channel_id, message.get("id")
|
||||
)
|
||||
all_messages.extend(replies)
|
||||
except Exception:
|
||||
logger.warning(
|
||||
"Failed to get replies for message %s",
|
||||
message.get("id"),
|
||||
exc_info=True,
|
||||
)
|
||||
# Continue without replies for this message
|
||||
|
||||
logger.info(
|
||||
"Total messages including replies: %s for channel %s",
|
||||
len(all_messages),
|
||||
channel_id,
|
||||
)
|
||||
return all_messages
|
||||
|
||||
return messages
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error fetching messages from channel %s in team %s: %s",
|
||||
channel_id,
|
||||
team_id,
|
||||
str(e),
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_all_messages_from_team(
|
||||
self,
|
||||
team_id: str,
|
||||
start_date: datetime | None = None,
|
||||
end_date: datetime | None = None,
|
||||
include_replies: bool = True,
|
||||
) -> dict[str, list[dict[str, Any]]]:
|
||||
"""
|
||||
Get all messages from all channels in a team.
|
||||
|
||||
Args:
|
||||
team_id: The ID of the team
|
||||
start_date: Optional start date for filtering messages
|
||||
end_date: Optional end date for filtering messages
|
||||
include_replies: Whether to include reply messages (default: True)
|
||||
|
||||
Returns:
|
||||
Dictionary mapping channel IDs to lists of messages.
|
||||
"""
|
||||
try:
|
||||
channels = await self.get_channels_for_team(team_id)
|
||||
all_channel_messages = {}
|
||||
|
||||
for channel in channels:
|
||||
channel_id = channel.get("id")
|
||||
channel_name = channel.get("displayName", "Unknown")
|
||||
|
||||
try:
|
||||
messages = await self.get_messages_from_channel(
|
||||
team_id, channel_id, start_date, end_date, include_replies
|
||||
)
|
||||
all_channel_messages[channel_id] = messages
|
||||
logger.info(
|
||||
"Fetched %s messages from channel '%s' (%s)",
|
||||
len(messages),
|
||||
channel_name,
|
||||
channel_id,
|
||||
)
|
||||
except Exception:
|
||||
logger.error(
|
||||
"Failed to fetch messages from channel '%s' (%s)",
|
||||
channel_name,
|
||||
channel_id,
|
||||
exc_info=True,
|
||||
)
|
||||
all_channel_messages[channel_id] = []
|
||||
|
||||
return all_channel_messages
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching messages from team %s: %s", team_id, str(e))
|
||||
raise
|
||||
|
||||
async def get_all_messages(
|
||||
self,
|
||||
start_date: datetime | None = None,
|
||||
end_date: datetime | None = None,
|
||||
include_replies: bool = True,
|
||||
) -> dict[str, dict[str, list[dict[str, Any]]]]:
|
||||
"""
|
||||
Get all messages from all teams and channels the user has access to.
|
||||
|
||||
Args:
|
||||
start_date: Optional start date for filtering messages
|
||||
end_date: Optional end date for filtering messages
|
||||
include_replies: Whether to include reply messages (default: True)
|
||||
|
||||
Returns:
|
||||
Nested dictionary: team_id -> channel_id -> list of messages.
|
||||
"""
|
||||
try:
|
||||
teams = await self.get_all_teams()
|
||||
all_messages = {}
|
||||
|
||||
for team in teams:
|
||||
team_id = team.get("id")
|
||||
team_name = team.get("displayName", "Unknown")
|
||||
|
||||
try:
|
||||
team_messages = await self.get_all_messages_from_team(
|
||||
team_id, start_date, end_date, include_replies
|
||||
)
|
||||
all_messages[team_id] = team_messages
|
||||
total_messages = sum(
|
||||
len(messages) for messages in team_messages.values()
|
||||
)
|
||||
logger.info(
|
||||
"Fetched %s total messages from team '%s' (%s)",
|
||||
total_messages,
|
||||
team_name,
|
||||
team_id,
|
||||
)
|
||||
except Exception:
|
||||
logger.error(
|
||||
"Failed to fetch messages from team '%s' (%s)",
|
||||
team_name,
|
||||
team_id,
|
||||
exc_info=True,
|
||||
)
|
||||
all_messages[team_id] = {}
|
||||
|
||||
return all_messages
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error fetching all messages: %s", str(e))
|
||||
raise
|
||||
|
|
@ -36,6 +36,7 @@ class DocumentType(str, Enum):
|
|||
CRAWLED_URL = "CRAWLED_URL"
|
||||
FILE = "FILE"
|
||||
SLACK_CONNECTOR = "SLACK_CONNECTOR"
|
||||
TEAMS_CONNECTOR = "TEAMS_CONNECTOR"
|
||||
NOTION_CONNECTOR = "NOTION_CONNECTOR"
|
||||
YOUTUBE_VIDEO = "YOUTUBE_VIDEO"
|
||||
GITHUB_CONNECTOR = "GITHUB_CONNECTOR"
|
||||
|
|
@ -62,6 +63,7 @@ class SearchSourceConnectorType(str, Enum):
|
|||
LINKUP_API = "LINKUP_API"
|
||||
BAIDU_SEARCH_API = "BAIDU_SEARCH_API" # Baidu AI Search API for Chinese web search
|
||||
SLACK_CONNECTOR = "SLACK_CONNECTOR"
|
||||
TEAMS_CONNECTOR = "TEAMS_CONNECTOR"
|
||||
NOTION_CONNECTOR = "NOTION_CONNECTOR"
|
||||
GITHUB_CONNECTOR = "GITHUB_CONNECTOR"
|
||||
LINEAR_CONNECTOR = "LINEAR_CONNECTOR"
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from .airtable_add_connector_route import (
|
|||
router as airtable_add_connector_router,
|
||||
)
|
||||
from .circleback_webhook_route import router as circleback_webhook_router
|
||||
from .clickup_add_connector_route import router as clickup_add_connector_router
|
||||
from .confluence_add_connector_route import router as confluence_add_connector_router
|
||||
from .discord_add_connector_route import router as discord_add_connector_router
|
||||
from .documents_routes import router as documents_router
|
||||
|
|
@ -30,6 +31,7 @@ from .rbac_routes import router as rbac_router
|
|||
from .search_source_connectors_routes import router as search_source_connectors_router
|
||||
from .search_spaces_routes import router as search_spaces_router
|
||||
from .slack_add_connector_route import router as slack_add_connector_router
|
||||
from .teams_add_connector_route import router as teams_add_connector_router
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
|
@ -49,9 +51,11 @@ router.include_router(linear_add_connector_router)
|
|||
router.include_router(luma_add_connector_router)
|
||||
router.include_router(notion_add_connector_router)
|
||||
router.include_router(slack_add_connector_router)
|
||||
router.include_router(teams_add_connector_router)
|
||||
router.include_router(discord_add_connector_router)
|
||||
router.include_router(jira_add_connector_router)
|
||||
router.include_router(confluence_add_connector_router)
|
||||
router.include_router(clickup_add_connector_router)
|
||||
router.include_router(new_llm_config_router) # LLM configs with prompt configuration
|
||||
router.include_router(logs_router)
|
||||
router.include_router(circleback_webhook_router) # Circleback meeting webhooks
|
||||
|
|
|
|||
|
|
@ -11,9 +11,9 @@ from fastapi.responses import RedirectResponse
|
|||
from pydantic import ValidationError
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.connectors.airtable_connector import fetch_airtable_user_email
|
||||
from app.db import (
|
||||
SearchSourceConnector,
|
||||
SearchSourceConnectorType,
|
||||
|
|
@ -22,6 +22,10 @@ from app.db import (
|
|||
)
|
||||
from app.schemas.airtable_auth_credentials import AirtableAuthCredentialsBase
|
||||
from app.users import current_active_user
|
||||
from app.utils.connector_naming import (
|
||||
check_duplicate_connector,
|
||||
generate_unique_connector_name,
|
||||
)
|
||||
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -275,6 +279,8 @@ async def airtable_callback(
|
|||
status_code=400, detail="No access token received from Airtable"
|
||||
)
|
||||
|
||||
user_email = await fetch_airtable_user_email(access_token)
|
||||
|
||||
# Calculate expiration time (UTC, tz-aware)
|
||||
expires_at = None
|
||||
if token_json.get("expires_in"):
|
||||
|
|
@ -297,39 +303,43 @@ async def airtable_callback(
|
|||
credentials_dict = credentials.to_dict()
|
||||
credentials_dict["_token_encrypted"] = True
|
||||
|
||||
# Check if connector already exists for this search space and user
|
||||
existing_connector_result = await session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.search_space_id == space_id,
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
SearchSourceConnector.connector_type
|
||||
== SearchSourceConnectorType.AIRTABLE_CONNECTOR,
|
||||
)
|
||||
# Check for duplicate connector (same account already connected)
|
||||
is_duplicate = await check_duplicate_connector(
|
||||
session,
|
||||
SearchSourceConnectorType.AIRTABLE_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
user_email,
|
||||
)
|
||||
existing_connector = existing_connector_result.scalars().first()
|
||||
if is_duplicate:
|
||||
logger.warning(
|
||||
f"Duplicate Airtable connector detected for user {user_id} with email {user_email}"
|
||||
)
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=airtable-connector"
|
||||
)
|
||||
|
||||
if existing_connector:
|
||||
# Update existing connector
|
||||
existing_connector.config = credentials_dict
|
||||
existing_connector.name = "Airtable Connector"
|
||||
existing_connector.is_indexable = True
|
||||
logger.info(
|
||||
f"Updated existing Airtable connector for user {user_id} in space {space_id}"
|
||||
)
|
||||
else:
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name="Airtable Connector",
|
||||
connector_type=SearchSourceConnectorType.AIRTABLE_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=credentials_dict,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(new_connector)
|
||||
logger.info(
|
||||
f"Created new Airtable connector for user {user_id} in space {space_id}"
|
||||
)
|
||||
# Generate a unique, user-friendly connector name
|
||||
connector_name = await generate_unique_connector_name(
|
||||
session,
|
||||
SearchSourceConnectorType.AIRTABLE_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
user_email,
|
||||
)
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name=connector_name,
|
||||
connector_type=SearchSourceConnectorType.AIRTABLE_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=credentials_dict,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(new_connector)
|
||||
logger.info(
|
||||
f"Created new Airtable connector for user {user_id} in space {space_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
await session.commit()
|
||||
|
|
@ -338,7 +348,7 @@ async def airtable_callback(
|
|||
# Redirect to the frontend with success params for indexing config
|
||||
# Using query params to auto-open the popup with config view on new-chat page
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=airtable-connector"
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=airtable-connector&connectorId={new_connector.id}"
|
||||
)
|
||||
|
||||
except ValidationError as e:
|
||||
|
|
@ -350,7 +360,7 @@ async def airtable_callback(
|
|||
await session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail=f"Integrity error: A connector with this type already exists. {e!s}",
|
||||
detail=f"Database integrity error: {e!s}",
|
||||
) from e
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create search source connector: {e!s}")
|
||||
|
|
@ -371,7 +381,7 @@ async def airtable_callback(
|
|||
|
||||
async def refresh_airtable_token(
|
||||
session: AsyncSession, connector: SearchSourceConnector
|
||||
):
|
||||
) -> SearchSourceConnector:
|
||||
"""
|
||||
Refresh the Airtable access token for a connector.
|
||||
|
||||
|
|
@ -401,6 +411,12 @@ async def refresh_airtable_token(
|
|||
status_code=500, detail="Failed to decrypt stored refresh token"
|
||||
) from e
|
||||
|
||||
if not refresh_token:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="No refresh token available. Please re-authenticate.",
|
||||
)
|
||||
|
||||
auth_header = make_basic_auth_header(
|
||||
config.AIRTABLE_CLIENT_ID, config.AIRTABLE_CLIENT_SECRET
|
||||
)
|
||||
|
|
@ -425,8 +441,14 @@ async def refresh_airtable_token(
|
|||
)
|
||||
|
||||
if token_response.status_code != 200:
|
||||
error_detail = token_response.text
|
||||
try:
|
||||
error_json = token_response.json()
|
||||
error_detail = error_json.get("error_description", error_detail)
|
||||
except Exception:
|
||||
pass
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Token refresh failed: {token_response.text}"
|
||||
status_code=400, detail=f"Token refresh failed: {error_detail}"
|
||||
)
|
||||
|
||||
token_json = token_response.json()
|
||||
|
|
@ -468,6 +490,8 @@ async def refresh_airtable_token(
|
|||
)
|
||||
|
||||
return connector
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to refresh Airtable token: {e!s}"
|
||||
|
|
|
|||
481
surfsense_backend/app/routes/clickup_add_connector_route.py
Normal file
481
surfsense_backend/app/routes/clickup_add_connector_route.py
Normal file
|
|
@ -0,0 +1,481 @@
|
|||
"""
|
||||
ClickUp Connector OAuth Routes.
|
||||
|
||||
Handles OAuth 2.0 authentication flow for ClickUp connector.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from uuid import UUID
|
||||
|
||||
import httpx
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||
from fastapi.responses import RedirectResponse
|
||||
from pydantic import ValidationError
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.db import (
|
||||
SearchSourceConnector,
|
||||
SearchSourceConnectorType,
|
||||
User,
|
||||
get_async_session,
|
||||
)
|
||||
from app.schemas.clickup_auth_credentials import ClickUpAuthCredentialsBase
|
||||
from app.users import current_active_user
|
||||
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# ClickUp OAuth endpoints
|
||||
AUTHORIZATION_URL = "https://app.clickup.com/api"
|
||||
TOKEN_URL = "https://api.clickup.com/api/v2/oauth/token"
|
||||
|
||||
# Initialize security utilities
|
||||
_state_manager = None
|
||||
_token_encryption = None
|
||||
|
||||
|
||||
def get_state_manager() -> OAuthStateManager:
|
||||
"""Get or create OAuth state manager instance."""
|
||||
global _state_manager
|
||||
if _state_manager is None:
|
||||
if not config.SECRET_KEY:
|
||||
raise ValueError("SECRET_KEY must be set for OAuth security")
|
||||
_state_manager = OAuthStateManager(config.SECRET_KEY)
|
||||
return _state_manager
|
||||
|
||||
|
||||
def get_token_encryption() -> TokenEncryption:
|
||||
"""Get or create token encryption instance."""
|
||||
global _token_encryption
|
||||
if _token_encryption is None:
|
||||
if not config.SECRET_KEY:
|
||||
raise ValueError("SECRET_KEY must be set for token encryption")
|
||||
_token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
return _token_encryption
|
||||
|
||||
|
||||
@router.get("/auth/clickup/connector/add")
|
||||
async def connect_clickup(space_id: int, user: User = Depends(current_active_user)):
|
||||
"""
|
||||
Initiate ClickUp OAuth flow.
|
||||
|
||||
Args:
|
||||
space_id: The search space ID
|
||||
user: Current authenticated user
|
||||
|
||||
Returns:
|
||||
Authorization URL for redirect
|
||||
"""
|
||||
try:
|
||||
if not space_id:
|
||||
raise HTTPException(status_code=400, detail="space_id is required")
|
||||
|
||||
if not config.CLICKUP_CLIENT_ID:
|
||||
raise HTTPException(status_code=500, detail="ClickUp OAuth not configured.")
|
||||
|
||||
if not config.SECRET_KEY:
|
||||
raise HTTPException(
|
||||
status_code=500, detail="SECRET_KEY not configured for OAuth security."
|
||||
)
|
||||
|
||||
# Generate secure state parameter with HMAC signature
|
||||
state_manager = get_state_manager()
|
||||
state_encoded = state_manager.generate_secure_state(space_id, user.id)
|
||||
|
||||
# Build authorization URL
|
||||
from urllib.parse import urlencode
|
||||
|
||||
auth_params = {
|
||||
"client_id": config.CLICKUP_CLIENT_ID,
|
||||
"redirect_uri": config.CLICKUP_REDIRECT_URI,
|
||||
"state": state_encoded,
|
||||
}
|
||||
|
||||
auth_url = f"{AUTHORIZATION_URL}?{urlencode(auth_params)}"
|
||||
|
||||
logger.info(f"Generated ClickUp OAuth URL for user {user.id}, space {space_id}")
|
||||
return {"auth_url": auth_url}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initiate ClickUp OAuth: {e!s}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to initiate ClickUp OAuth: {e!s}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/auth/clickup/connector/callback")
|
||||
async def clickup_callback(
|
||||
request: Request,
|
||||
code: str | None = None,
|
||||
error: str | None = None,
|
||||
state: str | None = None,
|
||||
session: AsyncSession = Depends(get_async_session),
|
||||
):
|
||||
"""
|
||||
Handle ClickUp OAuth callback.
|
||||
|
||||
Args:
|
||||
request: FastAPI request object
|
||||
code: Authorization code from ClickUp (if user granted access)
|
||||
error: Error code from ClickUp (if user denied access or error occurred)
|
||||
state: State parameter containing user/space info
|
||||
session: Database session
|
||||
|
||||
Returns:
|
||||
Redirect response to frontend
|
||||
"""
|
||||
try:
|
||||
# Handle OAuth errors (e.g., user denied access)
|
||||
if error:
|
||||
logger.warning(f"ClickUp OAuth error: {error}")
|
||||
# Try to decode state to get space_id for redirect, but don't fail if it's invalid
|
||||
space_id = None
|
||||
if state:
|
||||
try:
|
||||
state_manager = get_state_manager()
|
||||
data = state_manager.validate_state(state)
|
||||
space_id = data.get("space_id")
|
||||
except Exception:
|
||||
# If state is invalid, we'll redirect without space_id
|
||||
logger.warning("Failed to validate state in error handler")
|
||||
|
||||
# Redirect to frontend with error parameter
|
||||
if space_id:
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=clickup_oauth_denied"
|
||||
)
|
||||
else:
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=clickup_oauth_denied"
|
||||
)
|
||||
|
||||
# Validate required parameters for successful flow
|
||||
if not code:
|
||||
raise HTTPException(status_code=400, detail="Missing authorization code")
|
||||
if not state:
|
||||
raise HTTPException(status_code=400, detail="Missing state parameter")
|
||||
|
||||
# Validate and decode state with signature verification
|
||||
state_manager = get_state_manager()
|
||||
try:
|
||||
data = state_manager.validate_state(state)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Invalid state parameter: {e!s}"
|
||||
) from e
|
||||
|
||||
user_id = UUID(data["user_id"])
|
||||
space_id = data["space_id"]
|
||||
|
||||
# Validate redirect URI (security: ensure it matches configured value)
|
||||
if not config.CLICKUP_REDIRECT_URI:
|
||||
raise HTTPException(
|
||||
status_code=500, detail="CLICKUP_REDIRECT_URI not configured"
|
||||
)
|
||||
|
||||
# Exchange authorization code for access token
|
||||
token_data = {
|
||||
"client_id": config.CLICKUP_CLIENT_ID,
|
||||
"client_secret": config.CLICKUP_CLIENT_SECRET,
|
||||
"code": code,
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
token_response = await client.post(
|
||||
TOKEN_URL,
|
||||
json=token_data,
|
||||
headers={"Content-Type": "application/json"},
|
||||
timeout=30.0,
|
||||
)
|
||||
|
||||
if token_response.status_code != 200:
|
||||
error_detail = token_response.text
|
||||
try:
|
||||
error_json = token_response.json()
|
||||
error_detail = error_json.get("error", error_detail)
|
||||
except Exception:
|
||||
pass
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Token exchange failed: {error_detail}"
|
||||
)
|
||||
|
||||
token_json = token_response.json()
|
||||
|
||||
# Extract access token
|
||||
access_token = token_json.get("access_token")
|
||||
if not access_token:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="No access token received from ClickUp"
|
||||
)
|
||||
|
||||
# Extract refresh token if available
|
||||
refresh_token = token_json.get("refresh_token")
|
||||
|
||||
# Encrypt sensitive tokens before storing
|
||||
token_encryption = get_token_encryption()
|
||||
|
||||
# Calculate expiration time (UTC, tz-aware)
|
||||
expires_at = None
|
||||
expires_in = token_json.get("expires_in")
|
||||
if expires_in:
|
||||
now_utc = datetime.now(UTC)
|
||||
expires_at = now_utc + timedelta(seconds=int(expires_in))
|
||||
|
||||
# Get user information and workspace information from ClickUp API
|
||||
user_info = {}
|
||||
workspace_info = {}
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
# Get user info
|
||||
user_response = await client.get(
|
||||
"https://api.clickup.com/api/v2/user",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
timeout=30.0,
|
||||
)
|
||||
if user_response.status_code == 200:
|
||||
user_data = user_response.json().get("user", {})
|
||||
user_info = {
|
||||
"user_id": str(user_data.get("id"))
|
||||
if user_data.get("id") is not None
|
||||
else None,
|
||||
"user_email": user_data.get("email"),
|
||||
"user_name": user_data.get("username"),
|
||||
}
|
||||
|
||||
# Get workspace (team) info - get the first workspace
|
||||
team_response = await client.get(
|
||||
"https://api.clickup.com/api/v2/team",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
timeout=30.0,
|
||||
)
|
||||
if team_response.status_code == 200:
|
||||
teams_data = team_response.json().get("teams", [])
|
||||
if teams_data and len(teams_data) > 0:
|
||||
first_team = teams_data[0]
|
||||
workspace_info = {
|
||||
"workspace_id": str(first_team.get("id"))
|
||||
if first_team.get("id") is not None
|
||||
else None,
|
||||
"workspace_name": first_team.get("name"),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch user/workspace info from ClickUp: {e!s}")
|
||||
|
||||
# Store the encrypted tokens and user/workspace info in connector config
|
||||
connector_config = {
|
||||
"access_token": token_encryption.encrypt_token(access_token),
|
||||
"refresh_token": token_encryption.encrypt_token(refresh_token)
|
||||
if refresh_token
|
||||
else None,
|
||||
"expires_in": expires_in,
|
||||
"expires_at": expires_at.isoformat() if expires_at else None,
|
||||
"user_id": user_info.get("user_id"),
|
||||
"user_email": user_info.get("user_email"),
|
||||
"user_name": user_info.get("user_name"),
|
||||
"workspace_id": workspace_info.get("workspace_id"),
|
||||
"workspace_name": workspace_info.get("workspace_name"),
|
||||
# Mark that token is encrypted for backward compatibility
|
||||
"_token_encrypted": True,
|
||||
}
|
||||
|
||||
# Check if connector already exists for this search space and user
|
||||
existing_connector_result = await session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.search_space_id == space_id,
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
SearchSourceConnector.connector_type
|
||||
== SearchSourceConnectorType.CLICKUP_CONNECTOR,
|
||||
)
|
||||
)
|
||||
existing_connector = existing_connector_result.scalars().first()
|
||||
|
||||
if existing_connector:
|
||||
# Update existing connector
|
||||
existing_connector.config = connector_config
|
||||
existing_connector.name = "ClickUp Connector"
|
||||
existing_connector.is_indexable = True
|
||||
logger.info(
|
||||
f"Updated existing ClickUp connector for user {user_id} in space {space_id}"
|
||||
)
|
||||
else:
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name="ClickUp Connector",
|
||||
connector_type=SearchSourceConnectorType.CLICKUP_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=connector_config,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(new_connector)
|
||||
logger.info(
|
||||
f"Created new ClickUp connector for user {user_id} in space {space_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
await session.commit()
|
||||
logger.info(f"Successfully saved ClickUp connector for user {user_id}")
|
||||
|
||||
# Redirect to the frontend with success params
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=clickup-connector"
|
||||
)
|
||||
|
||||
except ValidationError as e:
|
||||
await session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=422, detail=f"Validation error: {e!s}"
|
||||
) from e
|
||||
except IntegrityError as e:
|
||||
await session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail=f"Integrity error: A connector with this type already exists. {e!s}",
|
||||
) from e
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create search source connector: {e!s}")
|
||||
await session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to create search source connector: {e!s}",
|
||||
) from e
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to complete ClickUp OAuth: {e!s}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to complete ClickUp OAuth: {e!s}"
|
||||
) from e
|
||||
|
||||
|
||||
async def refresh_clickup_token(
|
||||
session: AsyncSession, connector: SearchSourceConnector
|
||||
) -> SearchSourceConnector:
|
||||
"""
|
||||
Refresh the ClickUp access token for a connector.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
connector: ClickUp connector to refresh
|
||||
|
||||
Returns:
|
||||
Updated connector object
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Refreshing ClickUp token for connector {connector.id}")
|
||||
|
||||
credentials = ClickUpAuthCredentialsBase.from_dict(connector.config)
|
||||
|
||||
# Decrypt tokens if they are encrypted
|
||||
token_encryption = get_token_encryption()
|
||||
is_encrypted = connector.config.get("_token_encrypted", False)
|
||||
|
||||
refresh_token = credentials.refresh_token
|
||||
if is_encrypted and refresh_token:
|
||||
try:
|
||||
refresh_token = token_encryption.decrypt_token(refresh_token)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to decrypt refresh token: {e!s}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail="Failed to decrypt stored refresh token"
|
||||
) from e
|
||||
|
||||
if not refresh_token:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="No refresh token available. Please re-authenticate.",
|
||||
)
|
||||
|
||||
# Prepare token refresh data
|
||||
refresh_data = {
|
||||
"client_id": config.CLICKUP_CLIENT_ID,
|
||||
"client_secret": config.CLICKUP_CLIENT_SECRET,
|
||||
"refresh_token": refresh_token,
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
token_response = await client.post(
|
||||
TOKEN_URL,
|
||||
json=refresh_data,
|
||||
headers={"Content-Type": "application/json"},
|
||||
timeout=30.0,
|
||||
)
|
||||
|
||||
if token_response.status_code != 200:
|
||||
error_detail = token_response.text
|
||||
try:
|
||||
error_json = token_response.json()
|
||||
error_detail = error_json.get("error", error_detail)
|
||||
except Exception:
|
||||
pass
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Token refresh failed: {error_detail}"
|
||||
)
|
||||
|
||||
token_json = token_response.json()
|
||||
|
||||
# Calculate expiration time (UTC, tz-aware)
|
||||
expires_at = None
|
||||
expires_in = token_json.get("expires_in")
|
||||
if expires_in:
|
||||
now_utc = datetime.now(UTC)
|
||||
expires_at = now_utc + timedelta(seconds=int(expires_in))
|
||||
|
||||
# Encrypt new tokens before storing
|
||||
access_token = token_json.get("access_token")
|
||||
new_refresh_token = token_json.get("refresh_token")
|
||||
|
||||
if not access_token:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="No access token received from ClickUp refresh"
|
||||
)
|
||||
|
||||
# Update credentials object with encrypted tokens
|
||||
credentials.access_token = token_encryption.encrypt_token(access_token)
|
||||
if new_refresh_token:
|
||||
credentials.refresh_token = token_encryption.encrypt_token(
|
||||
new_refresh_token
|
||||
)
|
||||
credentials.expires_in = expires_in
|
||||
credentials.expires_at = expires_at
|
||||
|
||||
# Preserve user and workspace info
|
||||
if not credentials.user_id:
|
||||
credentials.user_id = connector.config.get("user_id")
|
||||
if not credentials.user_email:
|
||||
credentials.user_email = connector.config.get("user_email")
|
||||
if not credentials.user_name:
|
||||
credentials.user_name = connector.config.get("user_name")
|
||||
if not credentials.workspace_id:
|
||||
credentials.workspace_id = connector.config.get("workspace_id")
|
||||
if not credentials.workspace_name:
|
||||
credentials.workspace_name = connector.config.get("workspace_name")
|
||||
|
||||
# Update connector config with encrypted tokens
|
||||
credentials_dict = credentials.to_dict()
|
||||
credentials_dict["_token_encrypted"] = True
|
||||
connector.config = credentials_dict
|
||||
await session.commit()
|
||||
await session.refresh(connector)
|
||||
|
||||
logger.info(
|
||||
f"Successfully refreshed ClickUp token for connector {connector.id}"
|
||||
)
|
||||
|
||||
return connector
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to refresh ClickUp token: {e!s}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to refresh ClickUp token: {e!s}"
|
||||
) from e
|
||||
|
|
@ -14,7 +14,6 @@ from fastapi.responses import RedirectResponse
|
|||
from pydantic import ValidationError
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.db import (
|
||||
|
|
@ -25,6 +24,11 @@ from app.db import (
|
|||
)
|
||||
from app.schemas.atlassian_auth_credentials import AtlassianAuthCredentialsBase
|
||||
from app.users import current_active_user
|
||||
from app.utils.connector_naming import (
|
||||
check_duplicate_connector,
|
||||
extract_identifier_from_credentials,
|
||||
generate_unique_connector_name,
|
||||
)
|
||||
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -288,47 +292,56 @@ async def confluence_callback(
|
|||
"_token_encrypted": True,
|
||||
}
|
||||
|
||||
# Check if connector already exists for this search space and user
|
||||
existing_connector_result = await session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.search_space_id == space_id,
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
SearchSourceConnector.connector_type
|
||||
== SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
|
||||
)
|
||||
# Extract unique identifier from connector credentials
|
||||
connector_identifier = extract_identifier_from_credentials(
|
||||
SearchSourceConnectorType.CONFLUENCE_CONNECTOR, connector_config
|
||||
)
|
||||
existing_connector = existing_connector_result.scalars().first()
|
||||
|
||||
if existing_connector:
|
||||
# Update existing connector
|
||||
existing_connector.config = connector_config
|
||||
existing_connector.name = "Confluence Connector"
|
||||
existing_connector.is_indexable = True
|
||||
logger.info(
|
||||
f"Updated existing Confluence connector for user {user_id} in space {space_id}"
|
||||
# Check for duplicate connector (same Confluence instance already connected)
|
||||
is_duplicate = await check_duplicate_connector(
|
||||
session,
|
||||
SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
connector_identifier,
|
||||
)
|
||||
if is_duplicate:
|
||||
logger.warning(
|
||||
f"Duplicate Confluence connector detected for user {user_id} with instance {connector_identifier}"
|
||||
)
|
||||
else:
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name="Confluence Connector",
|
||||
connector_type=SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=connector_config,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(new_connector)
|
||||
logger.info(
|
||||
f"Created new Confluence connector for user {user_id} in space {space_id}"
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=confluence-connector"
|
||||
)
|
||||
|
||||
# Generate a unique, user-friendly connector name
|
||||
connector_name = await generate_unique_connector_name(
|
||||
session,
|
||||
SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
connector_identifier,
|
||||
)
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name=connector_name,
|
||||
connector_type=SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=connector_config,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(new_connector)
|
||||
logger.info(
|
||||
f"Created new Confluence connector for user {user_id} in space {space_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
await session.commit()
|
||||
logger.info(f"Successfully saved Confluence connector for user {user_id}")
|
||||
|
||||
# Redirect to the frontend with success params
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=confluence-connector"
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=confluence-connector&connectorId={new_connector.id}"
|
||||
)
|
||||
|
||||
except ValidationError as e:
|
||||
|
|
@ -340,7 +353,7 @@ async def confluence_callback(
|
|||
await session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail=f"Integrity error: A connector with this type already exists. {e!s}",
|
||||
detail=f"Database integrity error: {e!s}",
|
||||
) from e
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create search source connector: {e!s}")
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ from fastapi.responses import RedirectResponse
|
|||
from pydantic import ValidationError
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.db import (
|
||||
|
|
@ -25,6 +24,11 @@ from app.db import (
|
|||
)
|
||||
from app.schemas.discord_auth_credentials import DiscordAuthCredentialsBase
|
||||
from app.users import current_active_user
|
||||
from app.utils.connector_naming import (
|
||||
check_duplicate_connector,
|
||||
extract_identifier_from_credentials,
|
||||
generate_unique_connector_name,
|
||||
)
|
||||
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -284,47 +288,56 @@ async def discord_callback(
|
|||
"_token_encrypted": True,
|
||||
}
|
||||
|
||||
# Check if connector already exists for this search space and user
|
||||
existing_connector_result = await session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.search_space_id == space_id,
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
SearchSourceConnector.connector_type
|
||||
== SearchSourceConnectorType.DISCORD_CONNECTOR,
|
||||
)
|
||||
# Extract unique identifier from connector credentials
|
||||
connector_identifier = extract_identifier_from_credentials(
|
||||
SearchSourceConnectorType.DISCORD_CONNECTOR, connector_config
|
||||
)
|
||||
existing_connector = existing_connector_result.scalars().first()
|
||||
|
||||
if existing_connector:
|
||||
# Update existing connector
|
||||
existing_connector.config = connector_config
|
||||
existing_connector.name = "Discord Connector"
|
||||
existing_connector.is_indexable = True
|
||||
logger.info(
|
||||
f"Updated existing Discord connector for user {user_id} in space {space_id}"
|
||||
# Check for duplicate connector (same server already connected)
|
||||
is_duplicate = await check_duplicate_connector(
|
||||
session,
|
||||
SearchSourceConnectorType.DISCORD_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
connector_identifier,
|
||||
)
|
||||
if is_duplicate:
|
||||
logger.warning(
|
||||
f"Duplicate Discord connector detected for user {user_id} with server {connector_identifier}"
|
||||
)
|
||||
else:
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name="Discord Connector",
|
||||
connector_type=SearchSourceConnectorType.DISCORD_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=connector_config,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(new_connector)
|
||||
logger.info(
|
||||
f"Created new Discord connector for user {user_id} in space {space_id}"
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=discord-connector"
|
||||
)
|
||||
|
||||
# Generate a unique, user-friendly connector name
|
||||
connector_name = await generate_unique_connector_name(
|
||||
session,
|
||||
SearchSourceConnectorType.DISCORD_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
connector_identifier,
|
||||
)
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name=connector_name,
|
||||
connector_type=SearchSourceConnectorType.DISCORD_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=connector_config,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(new_connector)
|
||||
logger.info(
|
||||
f"Created new Discord connector for user {user_id} in space {space_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
await session.commit()
|
||||
logger.info(f"Successfully saved Discord connector for user {user_id}")
|
||||
|
||||
# Redirect to the frontend with success params
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=discord-connector"
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=discord-connector&connectorId={new_connector.id}"
|
||||
)
|
||||
|
||||
except ValidationError as e:
|
||||
|
|
@ -336,7 +349,7 @@ async def discord_callback(
|
|||
await session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail=f"Integrity error: A connector with this type already exists. {e!s}",
|
||||
detail=f"Database integrity error: {e!s}",
|
||||
) from e
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create search source connector: {e!s}")
|
||||
|
|
|
|||
|
|
@ -12,9 +12,9 @@ from google_auth_oauthlib.flow import Flow
|
|||
from pydantic import ValidationError
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.connectors.google_gmail_connector import fetch_google_user_email
|
||||
from app.db import (
|
||||
SearchSourceConnector,
|
||||
SearchSourceConnectorType,
|
||||
|
|
@ -22,6 +22,10 @@ from app.db import (
|
|||
get_async_session,
|
||||
)
|
||||
from app.users import current_active_user
|
||||
from app.utils.connector_naming import (
|
||||
check_duplicate_connector,
|
||||
generate_unique_connector_name,
|
||||
)
|
||||
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -172,6 +176,9 @@ async def calendar_callback(
|
|||
creds = flow.credentials
|
||||
creds_dict = json.loads(creds.to_json())
|
||||
|
||||
# Fetch user email
|
||||
user_email = fetch_google_user_email(creds)
|
||||
|
||||
# Encrypt sensitive credentials before storing
|
||||
token_encryption = get_token_encryption()
|
||||
|
||||
|
|
@ -190,24 +197,33 @@ async def calendar_callback(
|
|||
# Mark that credentials are encrypted for backward compatibility
|
||||
creds_dict["_token_encrypted"] = True
|
||||
|
||||
try:
|
||||
# Check if a connector with the same type already exists for this search space and user
|
||||
result = await session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.search_space_id == space_id,
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
SearchSourceConnector.connector_type
|
||||
== SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR,
|
||||
)
|
||||
# Check for duplicate connector (same account already connected)
|
||||
is_duplicate = await check_duplicate_connector(
|
||||
session,
|
||||
SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
user_email,
|
||||
)
|
||||
if is_duplicate:
|
||||
logger.warning(
|
||||
f"Duplicate Google Calendar connector detected for user {user_id} with email {user_email}"
|
||||
)
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=google-calendar-connector"
|
||||
)
|
||||
|
||||
try:
|
||||
# Generate a unique, user-friendly connector name
|
||||
connector_name = await generate_unique_connector_name(
|
||||
session,
|
||||
SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
user_email,
|
||||
)
|
||||
existing_connector = result.scalars().first()
|
||||
if existing_connector:
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail="A GOOGLE_CALENDAR_CONNECTOR connector already exists in this search space. Each search space can have only one connector of each type per user.",
|
||||
)
|
||||
db_connector = SearchSourceConnector(
|
||||
name="Google Calendar Connector",
|
||||
name=connector_name,
|
||||
connector_type=SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR,
|
||||
config=creds_dict,
|
||||
search_space_id=space_id,
|
||||
|
|
@ -220,7 +236,7 @@ async def calendar_callback(
|
|||
# Redirect to the frontend with success params for indexing config
|
||||
# Using query params to auto-open the popup with config view on new-chat page
|
||||
return RedirectResponse(
|
||||
f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-calendar-connector"
|
||||
f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-calendar-connector&connectorId={db_connector.id}"
|
||||
)
|
||||
except ValidationError as e:
|
||||
await session.rollback()
|
||||
|
|
@ -231,7 +247,7 @@ async def calendar_callback(
|
|||
await session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail=f"Integrity error: A connector with this type already exists. {e!s}",
|
||||
detail=f"Database integrity error: {e!s}",
|
||||
) from e
|
||||
except HTTPException:
|
||||
await session.rollback()
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ from app.connectors.google_drive import (
|
|||
get_start_page_token,
|
||||
list_folder_contents,
|
||||
)
|
||||
from app.connectors.google_gmail_connector import fetch_google_user_email
|
||||
from app.db import (
|
||||
SearchSourceConnector,
|
||||
SearchSourceConnectorType,
|
||||
|
|
@ -36,6 +37,10 @@ from app.db import (
|
|||
get_async_session,
|
||||
)
|
||||
from app.users import current_active_user
|
||||
from app.utils.connector_naming import (
|
||||
check_duplicate_connector,
|
||||
generate_unique_connector_name,
|
||||
)
|
||||
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
|
||||
|
||||
# Relax token scope validation for Google OAuth
|
||||
|
|
@ -227,6 +232,9 @@ async def drive_callback(
|
|||
creds = flow.credentials
|
||||
creds_dict = json.loads(creds.to_json())
|
||||
|
||||
# Fetch user email
|
||||
user_email = fetch_google_user_email(creds)
|
||||
|
||||
# Encrypt sensitive credentials before storing
|
||||
token_encryption = get_token_encryption()
|
||||
|
||||
|
|
@ -245,26 +253,33 @@ async def drive_callback(
|
|||
# Mark that credentials are encrypted for backward compatibility
|
||||
creds_dict["_token_encrypted"] = True
|
||||
|
||||
# Check if connector already exists for this space/user
|
||||
result = await session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.search_space_id == space_id,
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
SearchSourceConnector.connector_type
|
||||
== SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR,
|
||||
)
|
||||
# Check for duplicate connector (same account already connected)
|
||||
is_duplicate = await check_duplicate_connector(
|
||||
session,
|
||||
SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
user_email,
|
||||
)
|
||||
existing_connector = result.scalars().first()
|
||||
|
||||
if existing_connector:
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail="A GOOGLE_DRIVE_CONNECTOR already exists in this search space. Each search space can have only one connector of each type per user.",
|
||||
if is_duplicate:
|
||||
logger.warning(
|
||||
f"Duplicate Google Drive connector detected for user {user_id} with email {user_email}"
|
||||
)
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=google-drive-connector"
|
||||
)
|
||||
|
||||
# Create new connector (NO folder selection here - happens at index time)
|
||||
# Generate a unique, user-friendly connector name
|
||||
connector_name = await generate_unique_connector_name(
|
||||
session,
|
||||
SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
user_email,
|
||||
)
|
||||
|
||||
db_connector = SearchSourceConnector(
|
||||
name="Google Drive Connector",
|
||||
name=connector_name,
|
||||
connector_type=SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR,
|
||||
config={
|
||||
**creds_dict,
|
||||
|
|
@ -301,7 +316,7 @@ async def drive_callback(
|
|||
)
|
||||
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-drive-connector"
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-drive-connector&connectorId={db_connector.id}"
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
|
|
@ -318,7 +333,7 @@ async def drive_callback(
|
|||
logger.error(f"Database integrity error: {e!s}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail="A connector with this configuration already exists.",
|
||||
detail=f"Database integrity error: {e!s}",
|
||||
) from e
|
||||
except Exception as e:
|
||||
await session.rollback()
|
||||
|
|
|
|||
|
|
@ -12,9 +12,9 @@ from google_auth_oauthlib.flow import Flow
|
|||
from pydantic import ValidationError
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.connectors.google_gmail_connector import fetch_google_user_email
|
||||
from app.db import (
|
||||
SearchSourceConnector,
|
||||
SearchSourceConnectorType,
|
||||
|
|
@ -22,6 +22,10 @@ from app.db import (
|
|||
get_async_session,
|
||||
)
|
||||
from app.users import current_active_user
|
||||
from app.utils.connector_naming import (
|
||||
check_duplicate_connector,
|
||||
generate_unique_connector_name,
|
||||
)
|
||||
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -203,6 +207,9 @@ async def gmail_callback(
|
|||
creds = flow.credentials
|
||||
creds_dict = json.loads(creds.to_json())
|
||||
|
||||
# Fetch user email
|
||||
user_email = fetch_google_user_email(creds)
|
||||
|
||||
# Encrypt sensitive credentials before storing
|
||||
token_encryption = get_token_encryption()
|
||||
|
||||
|
|
@ -221,24 +228,33 @@ async def gmail_callback(
|
|||
# Mark that credentials are encrypted for backward compatibility
|
||||
creds_dict["_token_encrypted"] = True
|
||||
|
||||
try:
|
||||
# Check if a connector with the same type already exists for this search space and user
|
||||
result = await session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.search_space_id == space_id,
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
SearchSourceConnector.connector_type
|
||||
== SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR,
|
||||
)
|
||||
# Check for duplicate connector (same account already connected)
|
||||
is_duplicate = await check_duplicate_connector(
|
||||
session,
|
||||
SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
user_email,
|
||||
)
|
||||
if is_duplicate:
|
||||
logger.warning(
|
||||
f"Duplicate Gmail connector detected for user {user_id} with email {user_email}"
|
||||
)
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=google-gmail-connector"
|
||||
)
|
||||
|
||||
try:
|
||||
# Generate a unique, user-friendly connector name
|
||||
connector_name = await generate_unique_connector_name(
|
||||
session,
|
||||
SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
user_email,
|
||||
)
|
||||
existing_connector = result.scalars().first()
|
||||
if existing_connector:
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail="A GOOGLE_GMAIL_CONNECTOR connector already exists in this search space. Each search space can have only one connector of each type per user.",
|
||||
)
|
||||
db_connector = SearchSourceConnector(
|
||||
name="Google Gmail Connector",
|
||||
name=connector_name,
|
||||
connector_type=SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR,
|
||||
config=creds_dict,
|
||||
search_space_id=space_id,
|
||||
|
|
@ -256,7 +272,7 @@ async def gmail_callback(
|
|||
# Redirect to the frontend with success params for indexing config
|
||||
# Using query params to auto-open the popup with config view on new-chat page
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-gmail-connector"
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-gmail-connector&connectorId={db_connector.id}"
|
||||
)
|
||||
|
||||
except IntegrityError as e:
|
||||
|
|
@ -264,7 +280,7 @@ async def gmail_callback(
|
|||
logger.error(f"Database integrity error: {e!s}")
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail="A connector with this configuration already exists.",
|
||||
detail=f"Database integrity error: {e!s}",
|
||||
) from e
|
||||
except ValidationError as e:
|
||||
await session.rollback()
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ from fastapi.responses import RedirectResponse
|
|||
from pydantic import ValidationError
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.db import (
|
||||
|
|
@ -26,6 +25,11 @@ from app.db import (
|
|||
)
|
||||
from app.schemas.atlassian_auth_credentials import AtlassianAuthCredentialsBase
|
||||
from app.users import current_active_user
|
||||
from app.utils.connector_naming import (
|
||||
check_duplicate_connector,
|
||||
extract_identifier_from_credentials,
|
||||
generate_unique_connector_name,
|
||||
)
|
||||
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -306,47 +310,56 @@ async def jira_callback(
|
|||
"_token_encrypted": True,
|
||||
}
|
||||
|
||||
# Check if connector already exists for this search space and user
|
||||
existing_connector_result = await session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.search_space_id == space_id,
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
SearchSourceConnector.connector_type
|
||||
== SearchSourceConnectorType.JIRA_CONNECTOR,
|
||||
)
|
||||
# Extract unique identifier from connector credentials
|
||||
connector_identifier = extract_identifier_from_credentials(
|
||||
SearchSourceConnectorType.JIRA_CONNECTOR, connector_config
|
||||
)
|
||||
existing_connector = existing_connector_result.scalars().first()
|
||||
|
||||
if existing_connector:
|
||||
# Update existing connector
|
||||
existing_connector.config = connector_config
|
||||
existing_connector.name = "Jira Connector"
|
||||
existing_connector.is_indexable = True
|
||||
logger.info(
|
||||
f"Updated existing Jira connector for user {user_id} in space {space_id}"
|
||||
# Check for duplicate connector (same Jira instance already connected)
|
||||
is_duplicate = await check_duplicate_connector(
|
||||
session,
|
||||
SearchSourceConnectorType.JIRA_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
connector_identifier,
|
||||
)
|
||||
if is_duplicate:
|
||||
logger.warning(
|
||||
f"Duplicate Jira connector detected for user {user_id} with instance {connector_identifier}"
|
||||
)
|
||||
else:
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name="Jira Connector",
|
||||
connector_type=SearchSourceConnectorType.JIRA_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=connector_config,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(new_connector)
|
||||
logger.info(
|
||||
f"Created new Jira connector for user {user_id} in space {space_id}"
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=jira-connector"
|
||||
)
|
||||
|
||||
# Generate a unique, user-friendly connector name
|
||||
connector_name = await generate_unique_connector_name(
|
||||
session,
|
||||
SearchSourceConnectorType.JIRA_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
connector_identifier,
|
||||
)
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name=connector_name,
|
||||
connector_type=SearchSourceConnectorType.JIRA_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=connector_config,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(new_connector)
|
||||
logger.info(
|
||||
f"Created new Jira connector for user {user_id} in space {space_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
await session.commit()
|
||||
logger.info(f"Successfully saved Jira connector for user {user_id}")
|
||||
|
||||
# Redirect to the frontend with success params
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=jira-connector"
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=jira-connector&connectorId={new_connector.id}"
|
||||
)
|
||||
|
||||
except ValidationError as e:
|
||||
|
|
@ -358,7 +371,7 @@ async def jira_callback(
|
|||
await session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail=f"Integrity error: A connector with this type already exists. {e!s}",
|
||||
detail=f"Database integrity error: {e!s}",
|
||||
) from e
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create search source connector: {e!s}")
|
||||
|
|
|
|||
|
|
@ -14,9 +14,9 @@ from fastapi.responses import RedirectResponse
|
|||
from pydantic import ValidationError
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.connectors.linear_connector import fetch_linear_organization_name
|
||||
from app.db import (
|
||||
SearchSourceConnector,
|
||||
SearchSourceConnectorType,
|
||||
|
|
@ -25,6 +25,10 @@ from app.db import (
|
|||
)
|
||||
from app.schemas.linear_auth_credentials import LinearAuthCredentialsBase
|
||||
from app.users import current_active_user
|
||||
from app.utils.connector_naming import (
|
||||
check_duplicate_connector,
|
||||
generate_unique_connector_name,
|
||||
)
|
||||
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -240,6 +244,9 @@ async def linear_callback(
|
|||
status_code=400, detail="No access token received from Linear"
|
||||
)
|
||||
|
||||
# Fetch organization name
|
||||
org_name = await fetch_linear_organization_name(access_token)
|
||||
|
||||
# Calculate expiration time (UTC, tz-aware)
|
||||
expires_at = None
|
||||
if token_json.get("expires_in"):
|
||||
|
|
@ -260,39 +267,43 @@ async def linear_callback(
|
|||
"_token_encrypted": True,
|
||||
}
|
||||
|
||||
# Check if connector already exists for this search space and user
|
||||
existing_connector_result = await session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.search_space_id == space_id,
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
SearchSourceConnector.connector_type
|
||||
== SearchSourceConnectorType.LINEAR_CONNECTOR,
|
||||
)
|
||||
# Check for duplicate connector (same organization already connected)
|
||||
is_duplicate = await check_duplicate_connector(
|
||||
session,
|
||||
SearchSourceConnectorType.LINEAR_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
org_name,
|
||||
)
|
||||
existing_connector = existing_connector_result.scalars().first()
|
||||
if is_duplicate:
|
||||
logger.warning(
|
||||
f"Duplicate Linear connector detected for user {user_id} with org {org_name}"
|
||||
)
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=linear-connector"
|
||||
)
|
||||
|
||||
if existing_connector:
|
||||
# Update existing connector
|
||||
existing_connector.config = connector_config
|
||||
existing_connector.name = "Linear Connector"
|
||||
existing_connector.is_indexable = True
|
||||
logger.info(
|
||||
f"Updated existing Linear connector for user {user_id} in space {space_id}"
|
||||
)
|
||||
else:
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name="Linear Connector",
|
||||
connector_type=SearchSourceConnectorType.LINEAR_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=connector_config,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(new_connector)
|
||||
logger.info(
|
||||
f"Created new Linear connector for user {user_id} in space {space_id}"
|
||||
)
|
||||
# Generate a unique, user-friendly connector name
|
||||
connector_name = await generate_unique_connector_name(
|
||||
session,
|
||||
SearchSourceConnectorType.LINEAR_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
org_name,
|
||||
)
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name=connector_name,
|
||||
connector_type=SearchSourceConnectorType.LINEAR_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=connector_config,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(new_connector)
|
||||
logger.info(
|
||||
f"Created new Linear connector for user {user_id} in space {space_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
await session.commit()
|
||||
|
|
@ -300,7 +311,7 @@ async def linear_callback(
|
|||
|
||||
# Redirect to the frontend with success params
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=linear-connector"
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=linear-connector&connectorId={new_connector.id}"
|
||||
)
|
||||
|
||||
except ValidationError as e:
|
||||
|
|
@ -312,7 +323,7 @@ async def linear_callback(
|
|||
await session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail=f"Integrity error: A connector with this type already exists. {e!s}",
|
||||
detail=f"Database integrity error: {e!s}",
|
||||
) from e
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create search source connector: {e!s}")
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ from fastapi.responses import RedirectResponse
|
|||
from pydantic import ValidationError
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.db import (
|
||||
|
|
@ -25,6 +24,11 @@ from app.db import (
|
|||
)
|
||||
from app.schemas.notion_auth_credentials import NotionAuthCredentialsBase
|
||||
from app.users import current_active_user
|
||||
from app.utils.connector_naming import (
|
||||
check_duplicate_connector,
|
||||
extract_identifier_from_credentials,
|
||||
generate_unique_connector_name,
|
||||
)
|
||||
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -262,47 +266,56 @@ async def notion_callback(
|
|||
"_token_encrypted": True,
|
||||
}
|
||||
|
||||
# Check if connector already exists for this search space and user
|
||||
existing_connector_result = await session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.search_space_id == space_id,
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
SearchSourceConnector.connector_type
|
||||
== SearchSourceConnectorType.NOTION_CONNECTOR,
|
||||
)
|
||||
# Extract unique identifier from connector credentials
|
||||
connector_identifier = extract_identifier_from_credentials(
|
||||
SearchSourceConnectorType.NOTION_CONNECTOR, connector_config
|
||||
)
|
||||
existing_connector = existing_connector_result.scalars().first()
|
||||
|
||||
if existing_connector:
|
||||
# Update existing connector
|
||||
existing_connector.config = connector_config
|
||||
existing_connector.name = "Notion Connector"
|
||||
existing_connector.is_indexable = True
|
||||
logger.info(
|
||||
f"Updated existing Notion connector for user {user_id} in space {space_id}"
|
||||
# Check for duplicate connector (same workspace already connected)
|
||||
is_duplicate = await check_duplicate_connector(
|
||||
session,
|
||||
SearchSourceConnectorType.NOTION_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
connector_identifier,
|
||||
)
|
||||
if is_duplicate:
|
||||
logger.warning(
|
||||
f"Duplicate Notion connector detected for user {user_id} with workspace {connector_identifier}"
|
||||
)
|
||||
else:
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name="Notion Connector",
|
||||
connector_type=SearchSourceConnectorType.NOTION_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=connector_config,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(new_connector)
|
||||
logger.info(
|
||||
f"Created new Notion connector for user {user_id} in space {space_id}"
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=notion-connector"
|
||||
)
|
||||
|
||||
# Generate a unique, user-friendly connector name
|
||||
connector_name = await generate_unique_connector_name(
|
||||
session,
|
||||
SearchSourceConnectorType.NOTION_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
connector_identifier,
|
||||
)
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name=connector_name,
|
||||
connector_type=SearchSourceConnectorType.NOTION_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=connector_config,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(new_connector)
|
||||
logger.info(
|
||||
f"Created new Notion connector for user {user_id} in space {space_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
await session.commit()
|
||||
logger.info(f"Successfully saved Notion connector for user {user_id}")
|
||||
|
||||
# Redirect to the frontend with success params
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=notion-connector"
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=notion-connector&connectorId={new_connector.id}"
|
||||
)
|
||||
|
||||
except ValidationError as e:
|
||||
|
|
@ -314,7 +327,7 @@ async def notion_callback(
|
|||
await session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail=f"Integrity error: A connector with this type already exists. {e!s}",
|
||||
detail=f"Database integrity error: {e!s}",
|
||||
) from e
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create search source connector: {e!s}")
|
||||
|
|
|
|||
|
|
@ -7,7 +7,8 @@ PUT /search-source-connectors/{connector_id} - Update a specific connector
|
|||
DELETE /search-source-connectors/{connector_id} - Delete a specific connector
|
||||
POST /search-source-connectors/{connector_id}/index - Index content from a connector to a search space
|
||||
|
||||
Note: Each search space can have only one connector of each type per user (based on search_space_id, user_id, and connector_type).
|
||||
Note: OAuth connectors (Gmail, Drive, Slack, etc.) support multiple accounts per search space.
|
||||
Non-OAuth connectors (BookStack, GitHub, etc.) are limited to one per search space.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
|
@ -125,6 +126,7 @@ async def create_search_source_connector(
|
|||
)
|
||||
|
||||
# Check if a connector with the same type already exists for this search space
|
||||
# (for non-OAuth connectors that don't support multiple accounts)
|
||||
result = await session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.search_space_id == search_space_id,
|
||||
|
|
@ -556,6 +558,7 @@ async def index_connector_content(
|
|||
|
||||
Currently supports:
|
||||
- SLACK_CONNECTOR: Indexes messages from all accessible Slack channels
|
||||
- TEAMS_CONNECTOR: Indexes messages from all accessible Microsoft Teams channels
|
||||
- NOTION_CONNECTOR: Indexes pages from all accessible Notion pages
|
||||
- GITHUB_CONNECTOR: Indexes code and documentation from GitHub repositories
|
||||
- LINEAR_CONNECTOR: Indexes issues and comments from Linear
|
||||
|
|
@ -629,6 +632,19 @@ async def index_connector_content(
|
|||
)
|
||||
response_message = "Slack indexing started in the background."
|
||||
|
||||
elif connector.connector_type == SearchSourceConnectorType.TEAMS_CONNECTOR:
|
||||
from app.tasks.celery_tasks.connector_tasks import (
|
||||
index_teams_messages_task,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Triggering Teams indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}"
|
||||
)
|
||||
index_teams_messages_task.delay(
|
||||
connector_id, search_space_id, str(user.id), indexing_from, indexing_to
|
||||
)
|
||||
response_message = "Teams indexing started in the background."
|
||||
|
||||
elif connector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR:
|
||||
from app.tasks.celery_tasks.connector_tasks import index_notion_pages_task
|
||||
|
||||
|
|
@ -1186,6 +1202,64 @@ async def run_discord_indexing(
|
|||
logger.error(f"Error in background Discord indexing task: {e!s}")
|
||||
|
||||
|
||||
async def run_teams_indexing_with_new_session(
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
):
|
||||
"""
|
||||
Create a new session and run the Microsoft Teams indexing task.
|
||||
This prevents session leaks by creating a dedicated session for the background task.
|
||||
"""
|
||||
async with async_session_maker() as session:
|
||||
await run_teams_indexing(
|
||||
session, connector_id, search_space_id, user_id, start_date, end_date
|
||||
)
|
||||
|
||||
|
||||
async def run_teams_indexing(
|
||||
session: AsyncSession,
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
):
|
||||
"""
|
||||
Background task to run Microsoft Teams indexing.
|
||||
Args:
|
||||
session: Database session
|
||||
connector_id: ID of the Teams connector
|
||||
search_space_id: ID of the search space
|
||||
user_id: ID of the user
|
||||
start_date: Start date for indexing
|
||||
end_date: End date for indexing
|
||||
"""
|
||||
try:
|
||||
from app.tasks.connector_indexers.teams_indexer import index_teams_messages
|
||||
|
||||
# Index Teams messages without updating last_indexed_at (we'll do it separately)
|
||||
documents_processed, error_or_warning = await index_teams_messages(
|
||||
session=session,
|
||||
connector_id=connector_id,
|
||||
search_space_id=search_space_id,
|
||||
user_id=user_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
update_last_indexed=False, # Don't update timestamp in the indexing function
|
||||
)
|
||||
|
||||
# Update last_indexed_at after successful indexing (even if 0 new docs - they were checked)
|
||||
await update_connector_last_indexed(session, connector_id)
|
||||
logger.info(
|
||||
f"Teams indexing completed successfully: {documents_processed} documents processed. {error_or_warning or ''}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in background Teams indexing task: {e!s}")
|
||||
|
||||
|
||||
# Add new helper functions for Jira indexing
|
||||
async def run_jira_indexing_with_new_session(
|
||||
connector_id: int,
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ from fastapi.responses import RedirectResponse
|
|||
from pydantic import ValidationError
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.db import (
|
||||
|
|
@ -25,6 +24,11 @@ from app.db import (
|
|||
)
|
||||
from app.schemas.slack_auth_credentials import SlackAuthCredentialsBase
|
||||
from app.users import current_active_user
|
||||
from app.utils.connector_naming import (
|
||||
check_duplicate_connector,
|
||||
extract_identifier_from_credentials,
|
||||
generate_unique_connector_name,
|
||||
)
|
||||
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -272,47 +276,57 @@ async def slack_callback(
|
|||
"_token_encrypted": True,
|
||||
}
|
||||
|
||||
# Check if connector already exists for this search space and user
|
||||
existing_connector_result = await session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.search_space_id == space_id,
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
SearchSourceConnector.connector_type
|
||||
== SearchSourceConnectorType.SLACK_CONNECTOR,
|
||||
)
|
||||
# Extract unique identifier from connector credentials
|
||||
connector_identifier = extract_identifier_from_credentials(
|
||||
SearchSourceConnectorType.SLACK_CONNECTOR, connector_config
|
||||
)
|
||||
existing_connector = existing_connector_result.scalars().first()
|
||||
|
||||
if existing_connector:
|
||||
# Update existing connector
|
||||
existing_connector.config = connector_config
|
||||
existing_connector.name = "Slack Connector"
|
||||
existing_connector.is_indexable = True
|
||||
logger.info(
|
||||
f"Updated existing Slack connector for user {user_id} in space {space_id}"
|
||||
# Check for duplicate connector (same workspace already connected)
|
||||
is_duplicate = await check_duplicate_connector(
|
||||
session,
|
||||
SearchSourceConnectorType.SLACK_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
connector_identifier,
|
||||
)
|
||||
if is_duplicate:
|
||||
logger.warning(
|
||||
f"Duplicate Slack connector detected for user {user_id} with workspace {connector_identifier}"
|
||||
)
|
||||
else:
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name="Slack Connector",
|
||||
connector_type=SearchSourceConnectorType.SLACK_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=connector_config,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(new_connector)
|
||||
logger.info(
|
||||
f"Created new Slack connector for user {user_id} in space {space_id}"
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=slack-connector"
|
||||
)
|
||||
|
||||
# Generate a unique, user-friendly connector name
|
||||
connector_name = await generate_unique_connector_name(
|
||||
session,
|
||||
SearchSourceConnectorType.SLACK_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
connector_identifier,
|
||||
)
|
||||
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name=connector_name,
|
||||
connector_type=SearchSourceConnectorType.SLACK_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=connector_config,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(new_connector)
|
||||
logger.info(
|
||||
f"Created new Slack connector for user {user_id} in space {space_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
await session.commit()
|
||||
logger.info(f"Successfully saved Slack connector for user {user_id}")
|
||||
|
||||
# Redirect to the frontend with success params
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=slack-connector"
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=slack-connector&connectorId={new_connector.id}"
|
||||
)
|
||||
|
||||
except ValidationError as e:
|
||||
|
|
@ -324,7 +338,7 @@ async def slack_callback(
|
|||
await session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail=f"Integrity error: A connector with this type already exists. {e!s}",
|
||||
detail=f"Database integrity error: {e!s}",
|
||||
) from e
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create search source connector: {e!s}")
|
||||
|
|
|
|||
474
surfsense_backend/app/routes/teams_add_connector_route.py
Normal file
474
surfsense_backend/app/routes/teams_add_connector_route.py
Normal file
|
|
@ -0,0 +1,474 @@
|
|||
"""
|
||||
Microsoft Teams Connector OAuth Routes.
|
||||
|
||||
Handles OAuth 2.0 authentication flow for Microsoft Teams connector using Microsoft Graph API.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from uuid import UUID
|
||||
|
||||
import httpx
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi.responses import RedirectResponse
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.config import config
|
||||
from app.db import (
|
||||
SearchSourceConnector,
|
||||
SearchSourceConnectorType,
|
||||
User,
|
||||
get_async_session,
|
||||
)
|
||||
from app.schemas.teams_auth_credentials import TeamsAuthCredentialsBase
|
||||
from app.users import current_active_user
|
||||
from app.utils.connector_naming import (
|
||||
check_duplicate_connector,
|
||||
extract_identifier_from_credentials,
|
||||
generate_unique_connector_name,
|
||||
)
|
||||
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# Microsoft identity platform endpoints
|
||||
AUTHORIZATION_URL = "https://login.microsoftonline.com/common/oauth2/v2.0/authorize"
|
||||
TOKEN_URL = "https://login.microsoftonline.com/common/oauth2/v2.0/token"
|
||||
|
||||
# OAuth scopes for Microsoft Teams (Graph API)
|
||||
SCOPES = [
|
||||
"offline_access", # Required for refresh tokens
|
||||
"User.Read", # Read user profile
|
||||
"Team.ReadBasic.All", # Read basic team information
|
||||
"Channel.ReadBasic.All", # Read basic channel information
|
||||
"ChannelMessage.Read.All", # Read messages in channels
|
||||
]
|
||||
|
||||
# Initialize security utilities
|
||||
_state_manager = None
|
||||
_token_encryption = None
|
||||
|
||||
|
||||
def get_state_manager() -> OAuthStateManager:
|
||||
"""Get or create OAuth state manager instance."""
|
||||
global _state_manager
|
||||
if _state_manager is None:
|
||||
if not config.SECRET_KEY:
|
||||
raise ValueError("SECRET_KEY must be set for OAuth security")
|
||||
_state_manager = OAuthStateManager(config.SECRET_KEY)
|
||||
return _state_manager
|
||||
|
||||
|
||||
def get_token_encryption() -> TokenEncryption:
|
||||
"""Get or create token encryption instance."""
|
||||
global _token_encryption
|
||||
if _token_encryption is None:
|
||||
if not config.SECRET_KEY:
|
||||
raise ValueError("SECRET_KEY must be set for token encryption")
|
||||
_token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
return _token_encryption
|
||||
|
||||
|
||||
@router.get("/auth/teams/connector/add")
|
||||
async def connect_teams(space_id: int, user: User = Depends(current_active_user)):
|
||||
"""
|
||||
Initiate Microsoft Teams OAuth flow.
|
||||
|
||||
Args:
|
||||
space_id: The search space ID
|
||||
user: Current authenticated user
|
||||
|
||||
Returns:
|
||||
Authorization URL for redirect
|
||||
"""
|
||||
try:
|
||||
if not space_id:
|
||||
raise HTTPException(status_code=400, detail="space_id is required")
|
||||
|
||||
if not config.TEAMS_CLIENT_ID:
|
||||
raise HTTPException(
|
||||
status_code=500, detail="Microsoft Teams OAuth not configured."
|
||||
)
|
||||
|
||||
if not config.SECRET_KEY:
|
||||
raise HTTPException(
|
||||
status_code=500, detail="SECRET_KEY not configured for OAuth security."
|
||||
)
|
||||
|
||||
# Generate secure state parameter with HMAC signature
|
||||
state_manager = get_state_manager()
|
||||
state_encoded = state_manager.generate_secure_state(space_id, user.id)
|
||||
|
||||
# Build authorization URL
|
||||
from urllib.parse import urlencode
|
||||
|
||||
auth_params = {
|
||||
"client_id": config.TEAMS_CLIENT_ID,
|
||||
"response_type": "code",
|
||||
"redirect_uri": config.TEAMS_REDIRECT_URI,
|
||||
"response_mode": "query",
|
||||
"scope": " ".join(SCOPES),
|
||||
"state": state_encoded,
|
||||
}
|
||||
|
||||
auth_url = f"{AUTHORIZATION_URL}?{urlencode(auth_params)}"
|
||||
|
||||
logger.info(
|
||||
"Generated Microsoft Teams OAuth URL for user %s, space %s",
|
||||
user.id,
|
||||
space_id,
|
||||
)
|
||||
return {"auth_url": auth_url}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to initiate Microsoft Teams OAuth: %s", str(e), exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to initiate Microsoft Teams OAuth: {e!s}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/auth/teams/connector/callback")
|
||||
async def teams_callback(
|
||||
code: str | None = None,
|
||||
error: str | None = None,
|
||||
error_description: str | None = None,
|
||||
state: str | None = None,
|
||||
session: AsyncSession = Depends(get_async_session),
|
||||
):
|
||||
"""
|
||||
Handle Microsoft Teams OAuth callback.
|
||||
|
||||
Args:
|
||||
code: Authorization code from Microsoft (if user granted access)
|
||||
error: Error code from Microsoft (if user denied access or error occurred)
|
||||
error_description: Human-readable error description
|
||||
state: State parameter containing user/space info
|
||||
session: Database session
|
||||
|
||||
Returns:
|
||||
Redirect response to frontend
|
||||
"""
|
||||
try:
|
||||
# Handle OAuth errors (e.g., user denied access)
|
||||
if error:
|
||||
error_msg = error_description or error
|
||||
logger.warning("Microsoft Teams OAuth error: %s", error_msg)
|
||||
redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=teams_auth_failed&message={error_msg}"
|
||||
return RedirectResponse(url=redirect_url)
|
||||
|
||||
# Validate required parameters
|
||||
if not code or not state:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Missing required OAuth parameters"
|
||||
)
|
||||
|
||||
# Verify and decode state parameter
|
||||
state_manager = get_state_manager()
|
||||
try:
|
||||
data = state_manager.validate_state(state)
|
||||
space_id = data["space_id"]
|
||||
user_id = UUID(data["user_id"])
|
||||
except (HTTPException, ValueError, KeyError) as e:
|
||||
logger.error("Invalid OAuth state: %s", str(e))
|
||||
redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=invalid_state"
|
||||
return RedirectResponse(url=redirect_url)
|
||||
|
||||
# Exchange authorization code for access token
|
||||
token_data = {
|
||||
"client_id": config.TEAMS_CLIENT_ID,
|
||||
"client_secret": config.TEAMS_CLIENT_SECRET,
|
||||
"code": code,
|
||||
"redirect_uri": config.TEAMS_REDIRECT_URI,
|
||||
"grant_type": "authorization_code",
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
token_response = await client.post(
|
||||
TOKEN_URL,
|
||||
data=token_data,
|
||||
headers={"Content-Type": "application/x-www-form-urlencoded"},
|
||||
timeout=30.0,
|
||||
)
|
||||
|
||||
if token_response.status_code != 200:
|
||||
error_detail = token_response.text
|
||||
try:
|
||||
error_json = token_response.json()
|
||||
error_detail = error_json.get("error_description", error_detail)
|
||||
except Exception:
|
||||
pass
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Token exchange failed: {error_detail}"
|
||||
)
|
||||
|
||||
token_json = token_response.json()
|
||||
|
||||
# Extract tokens from response
|
||||
access_token = token_json.get("access_token")
|
||||
refresh_token = token_json.get("refresh_token")
|
||||
|
||||
if not access_token:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="No access token received from Microsoft"
|
||||
)
|
||||
|
||||
# Encrypt sensitive tokens before storing
|
||||
token_encryption = get_token_encryption()
|
||||
|
||||
# Calculate expiration time (UTC, tz-aware)
|
||||
expires_at = None
|
||||
if token_json.get("expires_in"):
|
||||
now_utc = datetime.now(UTC)
|
||||
expires_at = now_utc + timedelta(seconds=int(token_json["expires_in"]))
|
||||
|
||||
# Fetch user info from Microsoft Graph API
|
||||
user_info = {}
|
||||
tenant_info = {}
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
# Get user profile
|
||||
user_response = await client.get(
|
||||
"https://graph.microsoft.com/v1.0/me",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
timeout=30.0,
|
||||
)
|
||||
if user_response.status_code == 200:
|
||||
user_data = user_response.json()
|
||||
user_info = {
|
||||
"user_id": user_data.get("id"),
|
||||
"user_name": user_data.get("displayName"),
|
||||
"user_email": user_data.get("mail")
|
||||
or user_data.get("userPrincipalName"),
|
||||
}
|
||||
|
||||
# Get organization/tenant info
|
||||
org_response = await client.get(
|
||||
"https://graph.microsoft.com/v1.0/organization",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
timeout=30.0,
|
||||
)
|
||||
if org_response.status_code == 200:
|
||||
org_data = org_response.json()
|
||||
if org_data.get("value") and len(org_data["value"]) > 0:
|
||||
org = org_data["value"][0]
|
||||
tenant_info = {
|
||||
"tenant_id": org.get("id"),
|
||||
"tenant_name": org.get("displayName"),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to fetch user/tenant info from Microsoft Graph: %s", str(e)
|
||||
)
|
||||
|
||||
# Store the encrypted tokens and user/tenant info in connector config
|
||||
connector_config = {
|
||||
"access_token": token_encryption.encrypt_token(access_token),
|
||||
"refresh_token": token_encryption.encrypt_token(refresh_token)
|
||||
if refresh_token
|
||||
else None,
|
||||
"token_type": token_json.get("token_type", "Bearer"),
|
||||
"expires_in": token_json.get("expires_in"),
|
||||
"expires_at": expires_at.isoformat() if expires_at else None,
|
||||
"scope": token_json.get("scope"),
|
||||
"tenant_id": tenant_info.get("tenant_id"),
|
||||
"tenant_name": tenant_info.get("tenant_name"),
|
||||
"user_id": user_info.get("user_id"),
|
||||
# Mark that token is encrypted for backward compatibility
|
||||
"_token_encrypted": True,
|
||||
}
|
||||
|
||||
# Extract unique identifier from connector credentials
|
||||
connector_identifier = extract_identifier_from_credentials(
|
||||
SearchSourceConnectorType.TEAMS_CONNECTOR, connector_config
|
||||
)
|
||||
|
||||
# Check for duplicate connector (same tenant already connected)
|
||||
is_duplicate = await check_duplicate_connector(
|
||||
session,
|
||||
SearchSourceConnectorType.TEAMS_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
connector_identifier,
|
||||
)
|
||||
|
||||
if is_duplicate:
|
||||
logger.warning(
|
||||
"Duplicate Microsoft Teams connector for user %s, space %s, tenant %s",
|
||||
user_id,
|
||||
space_id,
|
||||
tenant_info.get("tenant_name"),
|
||||
)
|
||||
redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=duplicate_connector&message=This Microsoft Teams tenant is already connected to this space"
|
||||
return RedirectResponse(url=redirect_url)
|
||||
|
||||
# Generate unique connector name
|
||||
connector_name = await generate_unique_connector_name(
|
||||
session,
|
||||
SearchSourceConnectorType.TEAMS_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
connector_identifier,
|
||||
)
|
||||
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name=connector_name,
|
||||
connector_type=SearchSourceConnectorType.TEAMS_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=connector_config,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
try:
|
||||
session.add(new_connector)
|
||||
await session.commit()
|
||||
await session.refresh(new_connector)
|
||||
|
||||
logger.info(
|
||||
"Successfully created Microsoft Teams connector %s for user %s",
|
||||
new_connector.id,
|
||||
user_id,
|
||||
)
|
||||
|
||||
# Redirect to frontend with success
|
||||
redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?success=teams_connected&connector_id={new_connector.id}"
|
||||
return RedirectResponse(url=redirect_url)
|
||||
|
||||
except IntegrityError as e:
|
||||
await session.rollback()
|
||||
logger.error("Database integrity error creating Teams connector: %s", str(e))
|
||||
redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=connector_creation_failed"
|
||||
return RedirectResponse(url=redirect_url)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except (IntegrityError, ValueError) as e:
|
||||
logger.error("Teams OAuth callback error: %s", str(e), exc_info=True)
|
||||
redirect_url = f"{config.NEXT_FRONTEND_URL}/dashboard?error=teams_auth_error"
|
||||
return RedirectResponse(url=redirect_url)
|
||||
|
||||
|
||||
async def refresh_teams_token(
|
||||
session: AsyncSession, connector: SearchSourceConnector
|
||||
) -> SearchSourceConnector:
|
||||
"""
|
||||
Refresh Microsoft Teams OAuth tokens.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
connector: The connector to refresh
|
||||
|
||||
Returns:
|
||||
Updated connector with refreshed tokens
|
||||
|
||||
Raises:
|
||||
HTTPException: If token refresh fails
|
||||
"""
|
||||
logger.info(
|
||||
"Refreshing Microsoft Teams OAuth tokens for connector %s", connector.id
|
||||
)
|
||||
|
||||
credentials = TeamsAuthCredentialsBase.from_dict(connector.config)
|
||||
|
||||
# Decrypt tokens if they are encrypted
|
||||
token_encryption = get_token_encryption()
|
||||
is_encrypted = connector.config.get("_token_encrypted", False)
|
||||
refresh_token = credentials.refresh_token
|
||||
|
||||
if is_encrypted and refresh_token:
|
||||
try:
|
||||
refresh_token = token_encryption.decrypt_token(refresh_token)
|
||||
except Exception as e:
|
||||
logger.error("Failed to decrypt refresh token: %s", str(e))
|
||||
raise HTTPException(
|
||||
status_code=500, detail="Failed to decrypt stored refresh token"
|
||||
) from e
|
||||
|
||||
if not refresh_token:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"No refresh token available for connector {connector.id}",
|
||||
)
|
||||
|
||||
# Microsoft uses oauth2/v2.0/token for token refresh
|
||||
refresh_data = {
|
||||
"client_id": config.TEAMS_CLIENT_ID,
|
||||
"client_secret": config.TEAMS_CLIENT_SECRET,
|
||||
"grant_type": "refresh_token",
|
||||
"refresh_token": refresh_token,
|
||||
"scope": " ".join(SCOPES),
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
token_response = await client.post(
|
||||
TOKEN_URL,
|
||||
data=refresh_data,
|
||||
headers={"Content-Type": "application/x-www-form-urlencoded"},
|
||||
timeout=30.0,
|
||||
)
|
||||
|
||||
if token_response.status_code != 200:
|
||||
error_detail = token_response.text
|
||||
try:
|
||||
error_json = token_response.json()
|
||||
error_detail = error_json.get("error_description", error_detail)
|
||||
except Exception:
|
||||
pass
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Token refresh failed: {error_detail}"
|
||||
)
|
||||
|
||||
token_json = token_response.json()
|
||||
|
||||
# Extract new tokens
|
||||
access_token = token_json.get("access_token")
|
||||
new_refresh_token = token_json.get("refresh_token")
|
||||
|
||||
if not access_token:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="No access token received from Microsoft refresh"
|
||||
)
|
||||
|
||||
# Calculate expiration time (UTC, tz-aware)
|
||||
expires_at = None
|
||||
expires_in = token_json.get("expires_in")
|
||||
if expires_in:
|
||||
now_utc = datetime.now(UTC)
|
||||
expires_at = now_utc + timedelta(seconds=int(expires_in))
|
||||
|
||||
# Update credentials object with encrypted tokens
|
||||
credentials.access_token = token_encryption.encrypt_token(access_token)
|
||||
if new_refresh_token:
|
||||
credentials.refresh_token = token_encryption.encrypt_token(new_refresh_token)
|
||||
credentials.expires_in = expires_in
|
||||
credentials.expires_at = expires_at
|
||||
credentials.scope = token_json.get("scope")
|
||||
|
||||
# Preserve tenant/user info
|
||||
if not credentials.tenant_id:
|
||||
credentials.tenant_id = connector.config.get("tenant_id")
|
||||
if not credentials.tenant_name:
|
||||
credentials.tenant_name = connector.config.get("tenant_name")
|
||||
if not credentials.user_id:
|
||||
credentials.user_id = connector.config.get("user_id")
|
||||
|
||||
# Update connector config with encrypted tokens
|
||||
credentials_dict = credentials.to_dict()
|
||||
credentials_dict["_token_encrypted"] = True
|
||||
connector.config = credentials_dict
|
||||
|
||||
await session.commit()
|
||||
await session.refresh(connector)
|
||||
|
||||
logger.info(
|
||||
"Successfully refreshed Microsoft Teams tokens for connector %s", connector.id
|
||||
)
|
||||
|
||||
return connector
|
||||
85
surfsense_backend/app/schemas/clickup_auth_credentials.py
Normal file
85
surfsense_backend/app/schemas/clickup_auth_credentials.py
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
from datetime import UTC, datetime
|
||||
|
||||
from pydantic import BaseModel, field_validator
|
||||
|
||||
|
||||
class ClickUpAuthCredentialsBase(BaseModel):
|
||||
access_token: str
|
||||
refresh_token: str | None = None
|
||||
expires_in: int | None = None
|
||||
expires_at: datetime | None = None
|
||||
user_id: str | None = None
|
||||
user_email: str | None = None
|
||||
user_name: str | None = None
|
||||
workspace_id: str | None = None
|
||||
workspace_name: str | None = None
|
||||
|
||||
@property
|
||||
def is_expired(self) -> bool:
|
||||
"""Check if the credentials have expired."""
|
||||
if self.expires_at is None:
|
||||
return False # Long-lived token, treat as not expired
|
||||
return self.expires_at <= datetime.now(UTC)
|
||||
|
||||
@property
|
||||
def is_refreshable(self) -> bool:
|
||||
"""Check if the credentials can be refreshed."""
|
||||
return self.refresh_token is not None
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""Convert credentials to dictionary for storage."""
|
||||
return {
|
||||
"access_token": self.access_token,
|
||||
"refresh_token": self.refresh_token,
|
||||
"expires_in": self.expires_in,
|
||||
"expires_at": self.expires_at.isoformat() if self.expires_at else None,
|
||||
"user_id": self.user_id,
|
||||
"user_email": self.user_email,
|
||||
"user_name": self.user_name,
|
||||
"workspace_id": self.workspace_id,
|
||||
"workspace_name": self.workspace_name,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> "ClickUpAuthCredentialsBase":
|
||||
"""Create credentials from dictionary."""
|
||||
expires_at = None
|
||||
if data.get("expires_at"):
|
||||
expires_at = datetime.fromisoformat(data["expires_at"])
|
||||
|
||||
# Convert user_id to string if it's an integer (for backward compatibility)
|
||||
user_id = data.get("user_id")
|
||||
if user_id is not None and not isinstance(user_id, str):
|
||||
user_id = str(user_id)
|
||||
|
||||
# Convert workspace_id to string if it's an integer (for backward compatibility)
|
||||
workspace_id = data.get("workspace_id")
|
||||
if workspace_id is not None and not isinstance(workspace_id, str):
|
||||
workspace_id = str(workspace_id)
|
||||
|
||||
return cls(
|
||||
access_token=data.get("access_token", ""),
|
||||
refresh_token=data.get("refresh_token"),
|
||||
expires_in=data.get("expires_in"),
|
||||
expires_at=expires_at,
|
||||
user_id=user_id,
|
||||
user_email=data.get("user_email"),
|
||||
user_name=data.get("user_name"),
|
||||
workspace_id=workspace_id,
|
||||
workspace_name=data.get("workspace_name"),
|
||||
)
|
||||
|
||||
@field_validator("expires_at", mode="before")
|
||||
@classmethod
|
||||
def ensure_aware_utc(cls, v):
|
||||
# Strings like "2025-08-26T14:46:57.367184"
|
||||
if isinstance(v, str):
|
||||
# add +00:00 if missing tz info
|
||||
if v.endswith("Z"):
|
||||
return datetime.fromisoformat(v.replace("Z", "+00:00"))
|
||||
dt = datetime.fromisoformat(v)
|
||||
return dt if dt.tzinfo else dt.replace(tzinfo=UTC)
|
||||
# datetime objects
|
||||
if isinstance(v, datetime):
|
||||
return v if v.tzinfo else v.replace(tzinfo=UTC)
|
||||
return v
|
||||
79
surfsense_backend/app/schemas/teams_auth_credentials.py
Normal file
79
surfsense_backend/app/schemas/teams_auth_credentials.py
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
"""
|
||||
Microsoft Teams OAuth credentials schema.
|
||||
"""
|
||||
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from pydantic import BaseModel, field_validator
|
||||
|
||||
|
||||
class TeamsAuthCredentialsBase(BaseModel):
|
||||
"""Microsoft Teams OAuth credentials."""
|
||||
|
||||
access_token: str
|
||||
refresh_token: str | None = None
|
||||
token_type: str = "Bearer"
|
||||
expires_in: int | None = None
|
||||
expires_at: datetime | None = None
|
||||
scope: str | None = None
|
||||
tenant_id: str | None = None
|
||||
tenant_name: str | None = None
|
||||
user_id: str | None = None
|
||||
|
||||
@property
|
||||
def is_expired(self) -> bool:
|
||||
"""Check if the credentials have expired."""
|
||||
if self.expires_at is None:
|
||||
return False
|
||||
return self.expires_at <= datetime.now(UTC)
|
||||
|
||||
@property
|
||||
def is_refreshable(self) -> bool:
|
||||
"""Check if the credentials can be refreshed."""
|
||||
return self.refresh_token is not None
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""Convert credentials to dictionary for storage."""
|
||||
return {
|
||||
"access_token": self.access_token,
|
||||
"refresh_token": self.refresh_token,
|
||||
"token_type": self.token_type,
|
||||
"expires_in": self.expires_in,
|
||||
"expires_at": self.expires_at.isoformat() if self.expires_at else None,
|
||||
"scope": self.scope,
|
||||
"tenant_id": self.tenant_id,
|
||||
"tenant_name": self.tenant_name,
|
||||
"user_id": self.user_id,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> "TeamsAuthCredentialsBase":
|
||||
"""Create credentials from dictionary."""
|
||||
expires_at = None
|
||||
if data.get("expires_at"):
|
||||
expires_at = datetime.fromisoformat(data["expires_at"])
|
||||
|
||||
return cls(
|
||||
access_token=data.get("access_token", ""),
|
||||
refresh_token=data.get("refresh_token"),
|
||||
token_type=data.get("token_type", "Bearer"),
|
||||
expires_in=data.get("expires_in"),
|
||||
expires_at=expires_at,
|
||||
scope=data.get("scope"),
|
||||
tenant_id=data.get("tenant_id"),
|
||||
tenant_name=data.get("tenant_name"),
|
||||
user_id=data.get("user_id"),
|
||||
)
|
||||
|
||||
@field_validator("expires_at", mode="before")
|
||||
@classmethod
|
||||
def ensure_aware_utc(cls, v):
|
||||
"""Ensure datetime is timezone-aware (UTC)."""
|
||||
if isinstance(v, str):
|
||||
if v.endswith("Z"):
|
||||
return datetime.fromisoformat(v.replace("Z", "+00:00"))
|
||||
dt = datetime.fromisoformat(v)
|
||||
return dt if dt.tzinfo else dt.replace(tzinfo=UTC)
|
||||
if isinstance(v, datetime):
|
||||
return v if v.tzinfo else v.replace(tzinfo=UTC)
|
||||
return v
|
||||
|
|
@ -2269,6 +2269,80 @@ class ConnectorService:
|
|||
|
||||
return result_object, discord_docs
|
||||
|
||||
async def search_teams(
|
||||
self,
|
||||
user_query: str,
|
||||
search_space_id: int,
|
||||
top_k: int = 20,
|
||||
start_date: datetime | None = None,
|
||||
end_date: datetime | None = None,
|
||||
) -> tuple:
|
||||
"""
|
||||
Search for Microsoft Teams messages and return both the source information and langchain documents.
|
||||
|
||||
Uses combined chunk-level and document-level hybrid search with RRF fusion.
|
||||
|
||||
Args:
|
||||
user_query: The user's query
|
||||
search_space_id: The search space ID to search in
|
||||
top_k: Maximum number of results to return
|
||||
start_date: Optional start date for filtering documents by updated_at
|
||||
end_date: Optional end date for filtering documents by updated_at
|
||||
|
||||
Returns:
|
||||
tuple: (sources_info, langchain_documents)
|
||||
"""
|
||||
teams_docs = await self._combined_rrf_search(
|
||||
query_text=user_query,
|
||||
search_space_id=search_space_id,
|
||||
document_type="TEAMS_CONNECTOR",
|
||||
top_k=top_k,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
)
|
||||
|
||||
# Early return if no results
|
||||
if not teams_docs:
|
||||
return {
|
||||
"id": 53,
|
||||
"name": "Microsoft Teams",
|
||||
"type": "TEAMS_CONNECTOR",
|
||||
"sources": [],
|
||||
}, []
|
||||
|
||||
def _title_fn(_doc_info: dict[str, Any], metadata: dict[str, Any]) -> str:
|
||||
team_name = metadata.get("team_name", "Unknown Team")
|
||||
channel_name = metadata.get("channel_name", "Unknown Channel")
|
||||
message_date = metadata.get("start_date", "")
|
||||
title = f"Teams: {team_name} - {channel_name}"
|
||||
if message_date:
|
||||
title += f" ({message_date})"
|
||||
return title
|
||||
|
||||
def _url_fn(_doc_info: dict[str, Any], metadata: dict[str, Any]) -> str:
|
||||
team_id = metadata.get("team_id", "")
|
||||
channel_id = metadata.get("channel_id", "")
|
||||
if team_id and channel_id:
|
||||
return f"https://teams.microsoft.com/l/channel/{channel_id}/General?groupId={team_id}"
|
||||
return ""
|
||||
|
||||
sources_list = self._build_chunk_sources_from_documents(
|
||||
teams_docs,
|
||||
title_fn=_title_fn,
|
||||
url_fn=_url_fn,
|
||||
description_fn=lambda chunk, _doc_info, _metadata: chunk.get("content", ""),
|
||||
)
|
||||
|
||||
# Create result object
|
||||
result_object = {
|
||||
"id": 53,
|
||||
"name": "Microsoft Teams",
|
||||
"type": "TEAMS_CONNECTOR",
|
||||
"sources": sources_list,
|
||||
}
|
||||
|
||||
return result_object, teams_docs
|
||||
|
||||
async def search_luma(
|
||||
self,
|
||||
user_query: str,
|
||||
|
|
|
|||
|
|
@ -128,42 +128,6 @@ class DoclingService:
|
|||
logger.error(f"❌ Docling initialization failed: {e}")
|
||||
raise RuntimeError(f"Docling initialization failed: {e}") from e
|
||||
|
||||
def _configure_easyocr_local_models(self):
|
||||
"""Configure EasyOCR to use pre-downloaded local models."""
|
||||
try:
|
||||
import os
|
||||
|
||||
import easyocr
|
||||
|
||||
# Set SSL environment for EasyOCR downloads
|
||||
os.environ["CURL_CA_BUNDLE"] = ""
|
||||
os.environ["REQUESTS_CA_BUNDLE"] = ""
|
||||
|
||||
# Try to use local models first, fallback to download if needed
|
||||
try:
|
||||
reader = easyocr.Reader(
|
||||
["en"],
|
||||
download_enabled=False,
|
||||
model_storage_directory="/root/.EasyOCR/model",
|
||||
)
|
||||
logger.info("✅ EasyOCR configured for local models")
|
||||
return reader
|
||||
except Exception:
|
||||
# If local models fail, allow download with SSL bypass
|
||||
logger.info(
|
||||
"🔄 Local models failed, attempting download with SSL bypass..."
|
||||
)
|
||||
reader = easyocr.Reader(
|
||||
["en"],
|
||||
download_enabled=True,
|
||||
model_storage_directory="/root/.EasyOCR/model",
|
||||
)
|
||||
logger.info("✅ EasyOCR configured with downloaded models")
|
||||
return reader
|
||||
except Exception as e:
|
||||
logger.warning(f"⚠️ EasyOCR configuration failed: {e}")
|
||||
return None
|
||||
|
||||
async def process_document(
|
||||
self, file_path: str, filename: str | None = None
|
||||
) -> dict[str, Any]:
|
||||
|
|
|
|||
|
|
@ -342,40 +342,7 @@ async def get_document_summary_llm(
|
|||
)
|
||||
|
||||
|
||||
# Backward-compatible aliases (deprecated - will be removed in future versions)
|
||||
async def get_user_llm_instance(
|
||||
session: AsyncSession, user_id: str, search_space_id: int, role: str
|
||||
) -> ChatLiteLLM | None:
|
||||
"""
|
||||
Deprecated: Use get_search_space_llm_instance instead.
|
||||
LLM preferences are now stored at the search space level, not per-user.
|
||||
"""
|
||||
return await get_search_space_llm_instance(session, search_space_id, role)
|
||||
|
||||
|
||||
# Legacy aliases for backward compatibility
|
||||
async def get_long_context_llm(
|
||||
session: AsyncSession, search_space_id: int
|
||||
) -> ChatLiteLLM | None:
|
||||
"""Deprecated: Use get_document_summary_llm instead."""
|
||||
return await get_document_summary_llm(session, search_space_id)
|
||||
|
||||
|
||||
async def get_fast_llm(
|
||||
session: AsyncSession, search_space_id: int
|
||||
) -> ChatLiteLLM | None:
|
||||
"""Deprecated: Use get_agent_llm instead."""
|
||||
return await get_agent_llm(session, search_space_id)
|
||||
|
||||
|
||||
async def get_strategic_llm(
|
||||
session: AsyncSession, search_space_id: int
|
||||
) -> ChatLiteLLM | None:
|
||||
"""Deprecated: Use get_document_summary_llm instead."""
|
||||
return await get_document_summary_llm(session, search_space_id)
|
||||
|
||||
|
||||
# User-based legacy aliases (LLM preferences are now per-search-space, not per-user)
|
||||
# Backward-compatible alias (LLM preferences are now per-search-space, not per-user)
|
||||
async def get_user_long_context_llm(
|
||||
session: AsyncSession, user_id: str, search_space_id: int
|
||||
) -> ChatLiteLLM | None:
|
||||
|
|
@ -384,23 +351,3 @@ async def get_user_long_context_llm(
|
|||
The user_id parameter is ignored as LLM preferences are now per-search-space.
|
||||
"""
|
||||
return await get_document_summary_llm(session, search_space_id)
|
||||
|
||||
|
||||
async def get_user_fast_llm(
|
||||
session: AsyncSession, user_id: str, search_space_id: int
|
||||
) -> ChatLiteLLM | None:
|
||||
"""
|
||||
Deprecated: Use get_agent_llm instead.
|
||||
The user_id parameter is ignored as LLM preferences are now per-search-space.
|
||||
"""
|
||||
return await get_agent_llm(session, search_space_id)
|
||||
|
||||
|
||||
async def get_user_strategic_llm(
|
||||
session: AsyncSession, user_id: str, search_space_id: int
|
||||
) -> ChatLiteLLM | None:
|
||||
"""
|
||||
Deprecated: Use get_document_summary_llm instead.
|
||||
The user_id parameter is ignored as LLM preferences are now per-search-space.
|
||||
"""
|
||||
return await get_document_summary_llm(session, search_space_id)
|
||||
|
|
|
|||
|
|
@ -1,114 +0,0 @@
|
|||
import datetime
|
||||
from typing import Any
|
||||
|
||||
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.services.llm_service import get_document_summary_llm
|
||||
|
||||
|
||||
class QueryService:
|
||||
"""
|
||||
Service for query-related operations, including reformulation and processing.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
async def reformulate_query_with_chat_history(
|
||||
user_query: str,
|
||||
session: AsyncSession,
|
||||
search_space_id: int,
|
||||
chat_history_str: str | None = None,
|
||||
) -> str:
|
||||
"""
|
||||
Reformulate the user query using the search space's document summary LLM to make it more
|
||||
effective for information retrieval and research purposes.
|
||||
|
||||
Args:
|
||||
user_query: The original user query
|
||||
session: Database session for accessing LLM configs
|
||||
search_space_id: Search Space ID to get LLM preferences
|
||||
chat_history_str: Optional chat history string
|
||||
|
||||
Returns:
|
||||
str: The reformulated query
|
||||
"""
|
||||
if not user_query or not user_query.strip():
|
||||
return user_query
|
||||
|
||||
try:
|
||||
# Get the search space's document summary LLM instance
|
||||
llm = await get_document_summary_llm(session, search_space_id)
|
||||
if not llm:
|
||||
print(
|
||||
f"Warning: No document summary LLM configured for search space {search_space_id}. Using original query."
|
||||
)
|
||||
return user_query
|
||||
|
||||
# Create system message with instructions
|
||||
system_message = SystemMessage(
|
||||
content=f"""
|
||||
Today's date: {datetime.datetime.now().strftime("%Y-%m-%d")}
|
||||
You are a highly skilled AI assistant specializing in query optimization for advanced research.
|
||||
Your primary objective is to transform a user's initial query into a highly effective search query.
|
||||
This reformulated query will be used to retrieve information from diverse data sources.
|
||||
|
||||
**Chat History Context:**
|
||||
{chat_history_str if chat_history_str else "No prior conversation history is available."}
|
||||
If chat history is provided, analyze it to understand the user's evolving information needs and the broader context of their request. Use this understanding to refine the current query, ensuring it builds upon or clarifies previous interactions.
|
||||
|
||||
**Query Reformulation Guidelines:**
|
||||
Your reformulated query should:
|
||||
1. **Enhance Specificity and Detail:** Add precision to narrow the search focus effectively, making the query less ambiguous and more targeted.
|
||||
2. **Resolve Ambiguities:** Identify and clarify vague terms or phrases. If a term has multiple meanings, orient the query towards the most likely one given the context.
|
||||
3. **Expand Key Concepts:** Incorporate relevant synonyms, related terms, and alternative phrasings for core concepts. This helps capture a wider range of relevant documents.
|
||||
4. **Deconstruct Complex Questions:** If the original query is multifaceted, break it down into its core searchable components or rephrase it to address each aspect clearly. The final output must still be a single, coherent query string.
|
||||
5. **Optimize for Comprehensiveness:** Ensure the query is structured to uncover all essential facets of the original request, aiming for thorough information retrieval suitable for research.
|
||||
6. **Maintain User Intent:** The reformulated query must stay true to the original intent of the user's query. Do not introduce new topics or shift the focus significantly.
|
||||
|
||||
**Crucial Constraints:**
|
||||
* **Conciseness and Effectiveness:** While aiming for comprehensiveness, the reformulated query MUST be as concise as possible. Eliminate all unnecessary verbosity. Focus on essential keywords, entities, and concepts that directly contribute to effective retrieval.
|
||||
* **Single, Direct Output:** Return ONLY the reformulated query itself. Do NOT include any explanations, introductory phrases (e.g., "Reformulated query:", "Here is the optimized query:"), or any other surrounding text or markdown formatting.
|
||||
|
||||
Your output should be a single, optimized query string, ready for immediate use in a search system.
|
||||
"""
|
||||
)
|
||||
|
||||
# Create human message with the user query
|
||||
human_message = HumanMessage(
|
||||
content=f"Reformulate this query for better research results: {user_query}"
|
||||
)
|
||||
|
||||
# Get the response from the LLM
|
||||
response = await llm.agenerate(messages=[[system_message, human_message]])
|
||||
|
||||
# Extract the reformulated query from the response
|
||||
reformulated_query = response.generations[0][0].text.strip()
|
||||
|
||||
# Return the original query if the reformulation is empty
|
||||
if not reformulated_query:
|
||||
return user_query
|
||||
|
||||
return reformulated_query
|
||||
|
||||
except Exception as e:
|
||||
# Log the error and return the original query
|
||||
print(f"Error reformulating query: {e}")
|
||||
return user_query
|
||||
|
||||
@staticmethod
|
||||
async def langchain_chat_history_to_str(chat_history: list[Any]) -> str:
|
||||
"""
|
||||
Convert a list of chat history messages to a string.
|
||||
"""
|
||||
chat_history_str = "<chat_history>\n"
|
||||
|
||||
for chat_message in chat_history:
|
||||
if isinstance(chat_message, HumanMessage):
|
||||
chat_history_str += f"<user>{chat_message.content}</user>\n"
|
||||
elif isinstance(chat_message, AIMessage):
|
||||
chat_history_str += f"<assistant>{chat_message.content}</assistant>\n"
|
||||
elif isinstance(chat_message, SystemMessage):
|
||||
chat_history_str += f"<system>{chat_message.content}</system>\n"
|
||||
|
||||
chat_history_str += "</chat_history>"
|
||||
return chat_history_str
|
||||
|
|
@ -564,6 +564,49 @@ async def _index_discord_messages(
|
|||
)
|
||||
|
||||
|
||||
@celery_app.task(name="index_teams_messages", bind=True)
|
||||
def index_teams_messages_task(
|
||||
self,
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
):
|
||||
"""Celery task to index Microsoft Teams messages."""
|
||||
import asyncio
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
try:
|
||||
loop.run_until_complete(
|
||||
_index_teams_messages(
|
||||
connector_id, search_space_id, user_id, start_date, end_date
|
||||
)
|
||||
)
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
|
||||
async def _index_teams_messages(
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
):
|
||||
"""Index Microsoft Teams messages with new session."""
|
||||
from app.routes.search_source_connectors_routes import (
|
||||
run_teams_indexing,
|
||||
)
|
||||
|
||||
async with get_celery_session_maker()() as session:
|
||||
await run_teams_indexing(
|
||||
session, connector_id, search_space_id, user_id, start_date, end_date
|
||||
)
|
||||
|
||||
|
||||
@celery_app.task(name="index_luma_events", bind=True)
|
||||
def index_luma_events_task(
|
||||
self,
|
||||
|
|
|
|||
|
|
@ -6,10 +6,8 @@ from sqlalchemy.exc import SQLAlchemyError
|
|||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.config import config
|
||||
from app.connectors.airtable_connector import AirtableConnector
|
||||
from app.connectors.airtable_history import AirtableHistoryConnector
|
||||
from app.db import Document, DocumentType, SearchSourceConnectorType
|
||||
from app.routes.airtable_add_connector_route import refresh_airtable_token
|
||||
from app.schemas.airtable_auth_credentials import AirtableAuthCredentialsBase
|
||||
from app.services.llm_service import get_user_long_context_llm
|
||||
from app.services.task_logging_service import TaskLoggingService
|
||||
from app.utils.document_converters import (
|
||||
|
|
@ -18,7 +16,6 @@ from app.utils.document_converters import (
|
|||
generate_document_summary,
|
||||
generate_unique_identifier_hash,
|
||||
)
|
||||
from app.utils.oauth_security import TokenEncryption
|
||||
|
||||
from .base import (
|
||||
calculate_date_range,
|
||||
|
|
@ -85,76 +82,11 @@ async def index_airtable_records(
|
|||
)
|
||||
return 0, f"Connector with ID {connector_id} not found"
|
||||
|
||||
# Create credentials from connector config
|
||||
config_data = (
|
||||
connector.config.copy()
|
||||
) # Work with a copy to avoid modifying original
|
||||
|
||||
# Decrypt tokens if they are encrypted (only when explicitly marked)
|
||||
token_encrypted = config_data.get("_token_encrypted", False)
|
||||
if token_encrypted:
|
||||
# Tokens are explicitly marked as encrypted, attempt decryption
|
||||
if not config.SECRET_KEY:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"SECRET_KEY not configured but tokens are marked as encrypted for connector {connector_id}",
|
||||
"Missing SECRET_KEY for token decryption",
|
||||
{"error_type": "MissingSecretKey"},
|
||||
)
|
||||
return 0, "SECRET_KEY not configured but tokens are marked as encrypted"
|
||||
try:
|
||||
token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
|
||||
# Decrypt access_token
|
||||
if config_data.get("access_token"):
|
||||
config_data["access_token"] = token_encryption.decrypt_token(
|
||||
config_data["access_token"]
|
||||
)
|
||||
logger.info(
|
||||
f"Decrypted Airtable access token for connector {connector_id}"
|
||||
)
|
||||
|
||||
# Decrypt refresh_token if present
|
||||
if config_data.get("refresh_token"):
|
||||
config_data["refresh_token"] = token_encryption.decrypt_token(
|
||||
config_data["refresh_token"]
|
||||
)
|
||||
logger.info(
|
||||
f"Decrypted Airtable refresh token for connector {connector_id}"
|
||||
)
|
||||
except Exception as e:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Failed to decrypt Airtable tokens for connector {connector_id}: {e!s}",
|
||||
"Token decryption failed",
|
||||
{"error_type": "TokenDecryptionError"},
|
||||
)
|
||||
return 0, f"Failed to decrypt Airtable tokens: {e!s}"
|
||||
# If _token_encrypted is False or not set, treat tokens as plaintext
|
||||
|
||||
try:
|
||||
credentials = AirtableAuthCredentialsBase.from_dict(config_data)
|
||||
except Exception as e:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Invalid Airtable credentials in connector {connector_id}",
|
||||
str(e),
|
||||
{"error_type": "InvalidCredentials"},
|
||||
)
|
||||
return 0, f"Invalid Airtable credentials: {e!s}"
|
||||
|
||||
# Check if credentials are expired
|
||||
if credentials.is_expired:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Airtable credentials expired for connector {connector_id}",
|
||||
"Credentials expired",
|
||||
{"error_type": "ExpiredCredentials"},
|
||||
)
|
||||
|
||||
connector = await refresh_airtable_token(session, connector)
|
||||
|
||||
# return 0, "Airtable credentials have expired. Please re-authenticate."
|
||||
# Normalize "undefined" strings to None (from frontend)
|
||||
if start_date == "undefined" or start_date == "":
|
||||
start_date = None
|
||||
if end_date == "undefined" or end_date == "":
|
||||
end_date = None
|
||||
|
||||
# Calculate date range for indexing
|
||||
start_date_str, end_date_str = calculate_date_range(
|
||||
|
|
@ -166,8 +98,9 @@ async def index_airtable_records(
|
|||
f"from {start_date_str} to {end_date_str}"
|
||||
)
|
||||
|
||||
# Initialize Airtable connector
|
||||
airtable_connector = AirtableConnector(credentials)
|
||||
# Initialize Airtable history connector with auto-refresh capability
|
||||
airtable_history = AirtableHistoryConnector(session, connector_id)
|
||||
airtable_connector = await airtable_history._get_connector()
|
||||
total_processed = 0
|
||||
|
||||
try:
|
||||
|
|
@ -459,47 +392,56 @@ async def index_airtable_records(
|
|||
documents_skipped += 1
|
||||
continue # Skip this message and continue with others
|
||||
|
||||
# Update the last_indexed_at timestamp for the connector only if requested
|
||||
total_processed = documents_indexed
|
||||
if total_processed > 0:
|
||||
await update_connector_last_indexed(
|
||||
session, connector, update_last_indexed
|
||||
)
|
||||
# Accumulate total processed across all tables
|
||||
total_processed += documents_indexed
|
||||
|
||||
# Final commit for any remaining documents not yet committed in batches
|
||||
logger.info(
|
||||
f"Final commit: Total {documents_indexed} Airtable records processed"
|
||||
)
|
||||
await session.commit()
|
||||
logger.info(
|
||||
"Successfully committed all Airtable document changes to database"
|
||||
)
|
||||
if documents_indexed > 0:
|
||||
logger.info(
|
||||
f"Final commit for table {table_name}: {documents_indexed} Airtable records processed"
|
||||
)
|
||||
await session.commit()
|
||||
logger.info(
|
||||
f"Successfully committed all Airtable document changes for table {table_name}"
|
||||
)
|
||||
|
||||
# Log success
|
||||
await task_logger.log_task_success(
|
||||
log_entry,
|
||||
f"Successfully completed Airtable indexing for connector {connector_id}",
|
||||
{
|
||||
"events_processed": total_processed,
|
||||
"documents_indexed": documents_indexed,
|
||||
"documents_skipped": documents_skipped,
|
||||
"skipped_messages_count": len(skipped_messages),
|
||||
},
|
||||
)
|
||||
# Update the last_indexed_at timestamp for the connector only if requested
|
||||
# (after all tables in all bases are processed)
|
||||
if total_processed > 0:
|
||||
await update_connector_last_indexed(
|
||||
session, connector, update_last_indexed
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Airtable indexing completed: {documents_indexed} new records, {documents_skipped} skipped"
|
||||
)
|
||||
return (
|
||||
total_processed,
|
||||
None,
|
||||
) # Return None as the error message to indicate success
|
||||
# Log success after processing all bases and tables
|
||||
await task_logger.log_task_success(
|
||||
log_entry,
|
||||
f"Successfully completed Airtable indexing for connector {connector_id}",
|
||||
{
|
||||
"events_processed": total_processed,
|
||||
"documents_indexed": total_processed,
|
||||
},
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Airtable indexing completed: {total_processed} total records processed"
|
||||
)
|
||||
return (
|
||||
total_processed,
|
||||
None,
|
||||
) # Return None as the error message to indicate success
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Fetching Airtable bases for connector {connector_id} failed: {e!s}",
|
||||
exc_info=True,
|
||||
)
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Failed to fetch Airtable bases for connector {connector_id}",
|
||||
str(e),
|
||||
{"error_type": type(e).__name__},
|
||||
)
|
||||
return 0, f"Failed to fetch Airtable bases: {e!s}"
|
||||
|
||||
except SQLAlchemyError as db_error:
|
||||
await session.rollback()
|
||||
|
|
|
|||
|
|
@ -2,13 +2,14 @@
|
|||
ClickUp connector indexer.
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.config import config
|
||||
from app.connectors.clickup_connector import ClickUpConnector
|
||||
from app.connectors.clickup_history import ClickUpHistoryConnector
|
||||
from app.db import Document, DocumentType, SearchSourceConnectorType
|
||||
from app.services.llm_service import get_user_long_context_llm
|
||||
from app.services.task_logging_service import TaskLoggingService
|
||||
|
|
@ -82,26 +83,30 @@ async def index_clickup_tasks(
|
|||
)
|
||||
return 0, error_msg
|
||||
|
||||
# Extract ClickUp configuration
|
||||
clickup_api_token = connector.config.get("CLICKUP_API_TOKEN")
|
||||
# Check if using OAuth (has access_token in config) or legacy (has CLICKUP_API_TOKEN)
|
||||
has_oauth = connector.config.get("access_token") is not None
|
||||
has_legacy = connector.config.get("CLICKUP_API_TOKEN") is not None
|
||||
|
||||
if not clickup_api_token:
|
||||
error_msg = "ClickUp API token not found in connector configuration"
|
||||
if not has_oauth and not has_legacy:
|
||||
error_msg = "ClickUp credentials not found in connector configuration (neither OAuth nor API token)"
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"ClickUp API token not found in connector config for connector {connector_id}",
|
||||
"Missing ClickUp token",
|
||||
{"error_type": "MissingToken"},
|
||||
f"ClickUp credentials not found in connector config for connector {connector_id}",
|
||||
"Missing ClickUp credentials",
|
||||
{"error_type": "MissingCredentials"},
|
||||
)
|
||||
return 0, error_msg
|
||||
|
||||
await task_logger.log_task_progress(
|
||||
log_entry,
|
||||
f"Initializing ClickUp client for connector {connector_id}",
|
||||
f"Initializing ClickUp client for connector {connector_id} ({'OAuth' if has_oauth else 'API Token'})",
|
||||
{"stage": "client_initialization"},
|
||||
)
|
||||
|
||||
clickup_client = ClickUpConnector(api_token=clickup_api_token)
|
||||
# Use history connector which supports both OAuth and legacy API tokens
|
||||
clickup_client = ClickUpHistoryConnector(
|
||||
session=session, connector_id=connector_id
|
||||
)
|
||||
|
||||
# Get authorized workspaces
|
||||
await task_logger.log_task_progress(
|
||||
|
|
@ -110,7 +115,7 @@ async def index_clickup_tasks(
|
|||
{"stage": "workspace_fetching"},
|
||||
)
|
||||
|
||||
workspaces_response = clickup_client.get_authorized_workspaces()
|
||||
workspaces_response = await clickup_client.get_authorized_workspaces()
|
||||
workspaces = workspaces_response.get("teams", [])
|
||||
|
||||
if not workspaces:
|
||||
|
|
@ -141,7 +146,7 @@ async def index_clickup_tasks(
|
|||
|
||||
# Fetch tasks for date range if provided
|
||||
if start_date and end_date:
|
||||
tasks, error = clickup_client.get_tasks_in_date_range(
|
||||
tasks, error = await clickup_client.get_tasks_in_date_range(
|
||||
workspace_id=workspace_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
|
|
@ -153,7 +158,7 @@ async def index_clickup_tasks(
|
|||
)
|
||||
continue
|
||||
else:
|
||||
tasks = clickup_client.get_workspace_tasks(
|
||||
tasks = await clickup_client.get_workspace_tasks(
|
||||
workspace_id=workspace_id, include_closed=True
|
||||
)
|
||||
|
||||
|
|
@ -393,10 +398,21 @@ async def index_clickup_tasks(
|
|||
logger.info(
|
||||
f"clickup indexing completed: {documents_indexed} new tasks, {documents_skipped} skipped"
|
||||
)
|
||||
|
||||
# Close client connection
|
||||
try:
|
||||
await clickup_client.close()
|
||||
except Exception as e:
|
||||
logger.warning(f"Error closing ClickUp client: {e!s}")
|
||||
|
||||
return total_processed, None
|
||||
|
||||
except SQLAlchemyError as db_error:
|
||||
await session.rollback()
|
||||
# Clean up the connector in case of error
|
||||
if "clickup_client" in locals():
|
||||
with contextlib.suppress(Exception):
|
||||
await clickup_client.close()
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Database error during ClickUp indexing for connector {connector_id}",
|
||||
|
|
@ -407,6 +423,10 @@ async def index_clickup_tasks(
|
|||
return 0, f"Database error: {db_error!s}"
|
||||
except Exception as e:
|
||||
await session.rollback()
|
||||
# Clean up the connector in case of error
|
||||
if "clickup_client" in locals():
|
||||
with contextlib.suppress(Exception):
|
||||
await clickup_client.close()
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Failed to index ClickUp tasks for connector {connector_id}",
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
Confluence connector indexer.
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
|
@ -142,10 +143,8 @@ async def index_confluence_pages(
|
|||
)
|
||||
# Close client before returning
|
||||
if confluence_client:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
await confluence_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
return 0, None
|
||||
else:
|
||||
await task_logger.log_task_failure(
|
||||
|
|
@ -156,10 +155,8 @@ async def index_confluence_pages(
|
|||
)
|
||||
# Close client on error
|
||||
if confluence_client:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
await confluence_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
return 0, f"Failed to get Confluence pages: {error}"
|
||||
|
||||
logger.info(f"Retrieved {len(pages)} pages from Confluence API")
|
||||
|
|
@ -168,10 +165,8 @@ async def index_confluence_pages(
|
|||
logger.error(f"Error fetching Confluence pages: {e!s}", exc_info=True)
|
||||
# Close client on error
|
||||
if confluence_client:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
await confluence_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
return 0, f"Error fetching Confluence pages: {e!s}"
|
||||
|
||||
# Process and index each page
|
||||
|
|
@ -437,10 +432,8 @@ async def index_confluence_pages(
|
|||
await session.rollback()
|
||||
# Close client if it exists
|
||||
if confluence_client:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
await confluence_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Database error during Confluence indexing for connector {connector_id}",
|
||||
|
|
@ -453,10 +446,8 @@ async def index_confluence_pages(
|
|||
await session.rollback()
|
||||
# Close client if it exists
|
||||
if confluence_client:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
await confluence_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Failed to index Confluence pages for connector {connector_id}",
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
Jira connector indexer.
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
|
@ -413,10 +414,8 @@ async def index_jira_issues(
|
|||
logger.error(f"Database error: {db_error!s}", exc_info=True)
|
||||
# Clean up the connector in case of error
|
||||
if "jira_client" in locals():
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
await jira_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
return 0, f"Database error: {db_error!s}"
|
||||
except Exception as e:
|
||||
await session.rollback()
|
||||
|
|
@ -429,8 +428,6 @@ async def index_jira_issues(
|
|||
logger.error(f"Failed to index JIRA issues: {e!s}", exc_info=True)
|
||||
# Clean up the connector in case of error
|
||||
if "jira_client" in locals():
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
await jira_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
return 0, f"Failed to index JIRA issues: {e!s}"
|
||||
|
|
|
|||
473
surfsense_backend/app/tasks/connector_indexers/teams_indexer.py
Normal file
473
surfsense_backend/app/tasks/connector_indexers/teams_indexer.py
Normal file
|
|
@ -0,0 +1,473 @@
|
|||
"""
|
||||
Microsoft Teams connector indexer.
|
||||
"""
|
||||
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.config import config
|
||||
from app.connectors.teams_history import TeamsHistory
|
||||
from app.db import Document, DocumentType, SearchSourceConnectorType
|
||||
from app.services.task_logging_service import TaskLoggingService
|
||||
from app.utils.document_converters import (
|
||||
create_document_chunks,
|
||||
generate_content_hash,
|
||||
generate_unique_identifier_hash,
|
||||
)
|
||||
|
||||
from .base import (
|
||||
build_document_metadata_markdown,
|
||||
calculate_date_range,
|
||||
check_document_by_unique_identifier,
|
||||
get_connector_by_id,
|
||||
get_current_timestamp,
|
||||
logger,
|
||||
update_connector_last_indexed,
|
||||
)
|
||||
|
||||
|
||||
async def index_teams_messages(
|
||||
session: AsyncSession,
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str | None = None,
|
||||
end_date: str | None = None,
|
||||
update_last_indexed: bool = True,
|
||||
) -> tuple[int, str | None]:
|
||||
"""
|
||||
Index Microsoft Teams messages from all accessible teams and channels.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
connector_id: ID of the Teams connector
|
||||
search_space_id: ID of the search space to store documents in
|
||||
user_id: ID of the user
|
||||
start_date: Start date for indexing (YYYY-MM-DD format)
|
||||
end_date: End date for indexing (YYYY-MM-DD format)
|
||||
update_last_indexed: Whether to update the last_indexed_at timestamp (default: True)
|
||||
|
||||
Returns:
|
||||
Tuple containing (number of documents indexed, error message or None)
|
||||
"""
|
||||
task_logger = TaskLoggingService(session, search_space_id)
|
||||
|
||||
# Log task start
|
||||
log_entry = await task_logger.log_task_start(
|
||||
task_name="teams_messages_indexing",
|
||||
source="connector_indexing_task",
|
||||
message=f"Starting Microsoft Teams messages indexing for connector {connector_id}",
|
||||
metadata={
|
||||
"connector_id": connector_id,
|
||||
"user_id": str(user_id),
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
},
|
||||
)
|
||||
|
||||
try:
|
||||
# Get the connector
|
||||
await task_logger.log_task_progress(
|
||||
log_entry,
|
||||
f"Retrieving Teams connector {connector_id} from database",
|
||||
{"stage": "connector_retrieval"},
|
||||
)
|
||||
|
||||
connector = await get_connector_by_id(
|
||||
session, connector_id, SearchSourceConnectorType.TEAMS_CONNECTOR
|
||||
)
|
||||
|
||||
if not connector:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Connector with ID {connector_id} not found or is not a Teams connector",
|
||||
"Connector not found",
|
||||
{"error_type": "ConnectorNotFound"},
|
||||
)
|
||||
return (
|
||||
0,
|
||||
f"Connector with ID {connector_id} not found or is not a Teams connector",
|
||||
)
|
||||
|
||||
# Initialize Teams client with auto-refresh support
|
||||
await task_logger.log_task_progress(
|
||||
log_entry,
|
||||
f"Initializing Teams client for connector {connector_id}",
|
||||
{"stage": "client_initialization"},
|
||||
)
|
||||
|
||||
teams_client = TeamsHistory(session=session, connector_id=connector_id)
|
||||
|
||||
# Handle 'undefined' string from frontend (treat as None)
|
||||
if start_date == "undefined" or start_date == "":
|
||||
start_date = None
|
||||
if end_date == "undefined" or end_date == "":
|
||||
end_date = None
|
||||
|
||||
# Calculate date range
|
||||
await task_logger.log_task_progress(
|
||||
log_entry,
|
||||
"Calculating date range for Teams indexing",
|
||||
{
|
||||
"stage": "date_calculation",
|
||||
"provided_start_date": start_date,
|
||||
"provided_end_date": end_date,
|
||||
},
|
||||
)
|
||||
|
||||
start_date_str, end_date_str = calculate_date_range(
|
||||
connector, start_date, end_date, default_days_back=365
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Indexing Teams messages from %s to %s", start_date_str, end_date_str
|
||||
)
|
||||
|
||||
await task_logger.log_task_progress(
|
||||
log_entry,
|
||||
f"Fetching Teams from {start_date_str} to {end_date_str}",
|
||||
{
|
||||
"stage": "fetch_teams",
|
||||
"start_date": start_date_str,
|
||||
"end_date": end_date_str,
|
||||
},
|
||||
)
|
||||
|
||||
# Get all teams
|
||||
try:
|
||||
teams = await teams_client.get_all_teams()
|
||||
except Exception as e:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Failed to get Teams for connector {connector_id}",
|
||||
str(e),
|
||||
{"error_type": "TeamsFetchError"},
|
||||
)
|
||||
return 0, f"Failed to get Teams: {e!s}"
|
||||
|
||||
if not teams:
|
||||
await task_logger.log_task_success(
|
||||
log_entry,
|
||||
f"No Teams found for connector {connector_id}",
|
||||
{"teams_found": 0},
|
||||
)
|
||||
return 0, "No Teams found"
|
||||
|
||||
# Track the number of documents indexed
|
||||
documents_indexed = 0
|
||||
documents_skipped = 0
|
||||
skipped_channels = []
|
||||
|
||||
await task_logger.log_task_progress(
|
||||
log_entry,
|
||||
f"Starting to process {len(teams)} Teams",
|
||||
{"stage": "process_teams", "total_teams": len(teams)},
|
||||
)
|
||||
|
||||
# Convert date strings to datetime objects for filtering
|
||||
from datetime import datetime, timezone
|
||||
|
||||
start_datetime = None
|
||||
end_datetime = None
|
||||
if start_date_str:
|
||||
# Parse as naive datetime and make it timezone-aware (UTC)
|
||||
start_datetime = datetime.strptime(start_date_str, "%Y-%m-%d").replace(tzinfo=timezone.utc)
|
||||
if end_date_str:
|
||||
# Parse as naive datetime, set to end of day, and make it timezone-aware (UTC)
|
||||
end_datetime = datetime.strptime(end_date_str, "%Y-%m-%d").replace(hour=23, minute=59, second=59, tzinfo=timezone.utc)
|
||||
|
||||
# Process each team
|
||||
for team in teams:
|
||||
team_id = team.get("id")
|
||||
team_name = team.get("displayName", "Unknown Team")
|
||||
|
||||
try:
|
||||
# Get channels for this team
|
||||
channels = await teams_client.get_channels_for_team(team_id)
|
||||
|
||||
if not channels:
|
||||
logger.info("No channels found in team %s", team_name)
|
||||
continue
|
||||
|
||||
# Process each channel in the team
|
||||
for channel in channels:
|
||||
channel_id = channel.get("id")
|
||||
channel_name = channel.get("displayName", "Unknown Channel")
|
||||
|
||||
try:
|
||||
# Get messages for this channel
|
||||
messages = await teams_client.get_messages_from_channel(
|
||||
team_id,
|
||||
channel_id,
|
||||
start_datetime,
|
||||
end_datetime,
|
||||
include_replies=True,
|
||||
)
|
||||
|
||||
if not messages:
|
||||
logger.info(
|
||||
"No messages found in channel %s of team %s for the specified date range.",
|
||||
channel_name,
|
||||
team_name,
|
||||
)
|
||||
documents_skipped += 1
|
||||
continue
|
||||
|
||||
# Process each message
|
||||
for msg in messages:
|
||||
# Skip deleted messages or empty content
|
||||
if msg.get("deletedDateTime"):
|
||||
continue
|
||||
|
||||
# Extract message details
|
||||
message_id = msg.get("id", "")
|
||||
created_datetime = msg.get("createdDateTime", "")
|
||||
from_user = msg.get("from", {})
|
||||
user_name = from_user.get("user", {}).get(
|
||||
"displayName", "Unknown User"
|
||||
)
|
||||
user_email = from_user.get("user", {}).get(
|
||||
"userPrincipalName", "Unknown Email"
|
||||
)
|
||||
|
||||
# Extract message content
|
||||
body = msg.get("body", {})
|
||||
content_type = body.get("contentType", "text")
|
||||
msg_text = body.get("content", "")
|
||||
|
||||
# Skip empty messages
|
||||
if not msg_text or msg_text.strip() == "":
|
||||
continue
|
||||
|
||||
# Format document metadata
|
||||
metadata_sections = [
|
||||
(
|
||||
"METADATA",
|
||||
[
|
||||
f"TEAM_NAME: {team_name}",
|
||||
f"TEAM_ID: {team_id}",
|
||||
f"CHANNEL_NAME: {channel_name}",
|
||||
f"CHANNEL_ID: {channel_id}",
|
||||
f"MESSAGE_TIMESTAMP: {created_datetime}",
|
||||
f"MESSAGE_USER_NAME: {user_name}",
|
||||
f"MESSAGE_USER_EMAIL: {user_email}",
|
||||
f"CONTENT_TYPE: {content_type}",
|
||||
],
|
||||
),
|
||||
(
|
||||
"CONTENT",
|
||||
[
|
||||
f"FORMAT: {content_type}",
|
||||
"TEXT_START",
|
||||
msg_text,
|
||||
"TEXT_END",
|
||||
],
|
||||
),
|
||||
]
|
||||
|
||||
# Build the document string
|
||||
combined_document_string = build_document_metadata_markdown(
|
||||
metadata_sections
|
||||
)
|
||||
|
||||
# Generate unique identifier hash for this Teams message
|
||||
unique_identifier = f"{team_id}_{channel_id}_{message_id}"
|
||||
unique_identifier_hash = generate_unique_identifier_hash(
|
||||
DocumentType.TEAMS_CONNECTOR,
|
||||
unique_identifier,
|
||||
search_space_id,
|
||||
)
|
||||
|
||||
# Generate content hash
|
||||
content_hash = generate_content_hash(
|
||||
combined_document_string, search_space_id
|
||||
)
|
||||
|
||||
# Check if document with this unique identifier already exists
|
||||
existing_document = (
|
||||
await check_document_by_unique_identifier(
|
||||
session, unique_identifier_hash
|
||||
)
|
||||
)
|
||||
|
||||
if existing_document:
|
||||
# Document exists - check if content has changed
|
||||
if existing_document.content_hash == content_hash:
|
||||
logger.info(
|
||||
"Document for Teams message %s in channel %s unchanged. Skipping.",
|
||||
message_id,
|
||||
channel_name,
|
||||
)
|
||||
documents_skipped += 1
|
||||
continue
|
||||
else:
|
||||
# Content has changed - update the existing document
|
||||
logger.info(
|
||||
"Content changed for Teams message %s in channel %s. Updating document.",
|
||||
message_id,
|
||||
channel_name,
|
||||
)
|
||||
|
||||
# Update chunks and embedding
|
||||
chunks = await create_document_chunks(
|
||||
combined_document_string
|
||||
)
|
||||
doc_embedding = config.embedding_model_instance.embed(
|
||||
combined_document_string
|
||||
)
|
||||
|
||||
# Update existing document
|
||||
existing_document.content = combined_document_string
|
||||
existing_document.content_hash = content_hash
|
||||
existing_document.embedding = doc_embedding
|
||||
existing_document.document_metadata = {
|
||||
"team_name": team_name,
|
||||
"team_id": team_id,
|
||||
"channel_name": channel_name,
|
||||
"channel_id": channel_id,
|
||||
"start_date": start_date_str,
|
||||
"end_date": end_date_str,
|
||||
"message_count": len(messages),
|
||||
"indexed_at": datetime.now().strftime(
|
||||
"%Y-%m-%d %H:%M:%S"
|
||||
),
|
||||
}
|
||||
|
||||
# Delete old chunks and add new ones
|
||||
existing_document.chunks = chunks
|
||||
existing_document.updated_at = get_current_timestamp()
|
||||
|
||||
documents_indexed += 1
|
||||
logger.info(
|
||||
"Successfully updated Teams message %s", message_id
|
||||
)
|
||||
continue
|
||||
|
||||
# Document doesn't exist - create new one
|
||||
# Process chunks
|
||||
chunks = await create_document_chunks(
|
||||
combined_document_string
|
||||
)
|
||||
doc_embedding = config.embedding_model_instance.embed(
|
||||
combined_document_string
|
||||
)
|
||||
|
||||
# Create and store new document
|
||||
document = Document(
|
||||
search_space_id=search_space_id,
|
||||
title=f"Teams - {team_name} - {channel_name}",
|
||||
document_type=DocumentType.TEAMS_CONNECTOR,
|
||||
document_metadata={
|
||||
"team_name": team_name,
|
||||
"team_id": team_id,
|
||||
"channel_name": channel_name,
|
||||
"channel_id": channel_id,
|
||||
"start_date": start_date_str,
|
||||
"end_date": end_date_str,
|
||||
"message_count": len(messages),
|
||||
"indexed_at": datetime.now().strftime(
|
||||
"%Y-%m-%d %H:%M:%S"
|
||||
),
|
||||
},
|
||||
content=combined_document_string,
|
||||
embedding=doc_embedding,
|
||||
chunks=chunks,
|
||||
content_hash=content_hash,
|
||||
unique_identifier_hash=unique_identifier_hash,
|
||||
updated_at=get_current_timestamp(),
|
||||
)
|
||||
|
||||
session.add(document)
|
||||
documents_indexed += 1
|
||||
|
||||
# Batch commit every 10 documents
|
||||
if documents_indexed % 10 == 0:
|
||||
logger.info(
|
||||
"Committing batch: %s Teams messages processed so far",
|
||||
documents_indexed,
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
logger.info(
|
||||
"Successfully indexed channel %s in team %s with %s messages",
|
||||
channel_name,
|
||||
team_name,
|
||||
len(messages),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Error processing channel %s in team %s: %s",
|
||||
channel_name,
|
||||
team_name,
|
||||
str(e),
|
||||
)
|
||||
skipped_channels.append(
|
||||
f"{team_name}/{channel_name} (processing error)"
|
||||
)
|
||||
documents_skipped += 1
|
||||
continue
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Error processing team %s: %s", team_name, str(e))
|
||||
continue
|
||||
|
||||
# Update the last_indexed_at timestamp for the connector only if requested
|
||||
# and if we successfully indexed at least one document
|
||||
total_processed = documents_indexed
|
||||
if total_processed > 0:
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
# Final commit for any remaining documents not yet committed in batches
|
||||
logger.info(
|
||||
"Final commit: Total %s Teams messages processed", documents_indexed
|
||||
)
|
||||
await session.commit()
|
||||
|
||||
# Prepare result message
|
||||
result_message = None
|
||||
if skipped_channels:
|
||||
result_message = f"Processed {total_processed} messages. Skipped {len(skipped_channels)} channels: {', '.join(skipped_channels)}"
|
||||
else:
|
||||
result_message = f"Processed {total_processed} messages."
|
||||
|
||||
# Log success
|
||||
await task_logger.log_task_success(
|
||||
log_entry,
|
||||
f"Successfully completed Teams indexing for connector {connector_id}",
|
||||
{
|
||||
"messages_processed": total_processed,
|
||||
"documents_indexed": documents_indexed,
|
||||
"documents_skipped": documents_skipped,
|
||||
"skipped_channels_count": len(skipped_channels),
|
||||
"result_message": result_message,
|
||||
},
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Teams indexing completed: %s new messages, %s skipped",
|
||||
documents_indexed,
|
||||
documents_skipped,
|
||||
)
|
||||
return total_processed, result_message
|
||||
|
||||
except SQLAlchemyError as db_error:
|
||||
await session.rollback()
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Database error during Teams indexing for connector {connector_id}",
|
||||
str(db_error),
|
||||
{"error_type": "SQLAlchemyError"},
|
||||
)
|
||||
logger.error("Database error: %s", str(db_error))
|
||||
return 0, f"Database error: {db_error!s}"
|
||||
except Exception as e:
|
||||
await session.rollback()
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Failed to index Teams messages for connector {connector_id}",
|
||||
str(e),
|
||||
{"error_type": type(e).__name__},
|
||||
)
|
||||
logger.error("Failed to index Teams messages: %s", str(e))
|
||||
return 0, f"Failed to index Teams messages: {e!s}"
|
||||
193
surfsense_backend/app/utils/connector_naming.py
Normal file
193
surfsense_backend/app/utils/connector_naming.py
Normal file
|
|
@ -0,0 +1,193 @@
|
|||
"""
|
||||
Connector Naming Utilities.
|
||||
|
||||
Provides functions for generating unique, user-friendly connector names.
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
from uuid import UUID
|
||||
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.db import SearchSourceConnector, SearchSourceConnectorType
|
||||
|
||||
# Friendly display names for connector types
|
||||
BASE_NAME_FOR_TYPE = {
|
||||
SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR: "Gmail",
|
||||
SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR: "Google Drive",
|
||||
SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR: "Google Calendar",
|
||||
SearchSourceConnectorType.SLACK_CONNECTOR: "Slack",
|
||||
SearchSourceConnectorType.TEAMS_CONNECTOR: "Microsoft Teams",
|
||||
SearchSourceConnectorType.NOTION_CONNECTOR: "Notion",
|
||||
SearchSourceConnectorType.LINEAR_CONNECTOR: "Linear",
|
||||
SearchSourceConnectorType.JIRA_CONNECTOR: "Jira",
|
||||
SearchSourceConnectorType.DISCORD_CONNECTOR: "Discord",
|
||||
SearchSourceConnectorType.CONFLUENCE_CONNECTOR: "Confluence",
|
||||
SearchSourceConnectorType.AIRTABLE_CONNECTOR: "Airtable",
|
||||
}
|
||||
|
||||
|
||||
def get_base_name_for_type(connector_type: SearchSourceConnectorType) -> str:
|
||||
"""Get a friendly display name for a connector type."""
|
||||
return BASE_NAME_FOR_TYPE.get(
|
||||
connector_type, connector_type.replace("_", " ").title()
|
||||
)
|
||||
|
||||
|
||||
def extract_identifier_from_credentials(
|
||||
connector_type: SearchSourceConnectorType,
|
||||
credentials: dict[str, Any],
|
||||
) -> str | None:
|
||||
"""
|
||||
Extract a unique identifier from connector credentials.
|
||||
|
||||
Args:
|
||||
connector_type: The type of connector
|
||||
credentials: The connector credentials dict
|
||||
|
||||
Returns:
|
||||
Identifier string (workspace name, email, etc.) or None
|
||||
"""
|
||||
if connector_type == SearchSourceConnectorType.SLACK_CONNECTOR:
|
||||
return credentials.get("team_name")
|
||||
|
||||
if connector_type == SearchSourceConnectorType.TEAMS_CONNECTOR:
|
||||
return credentials.get("tenant_name")
|
||||
|
||||
if connector_type == SearchSourceConnectorType.NOTION_CONNECTOR:
|
||||
return credentials.get("workspace_name")
|
||||
|
||||
if connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR:
|
||||
return credentials.get("guild_name")
|
||||
|
||||
if connector_type in (
|
||||
SearchSourceConnectorType.JIRA_CONNECTOR,
|
||||
SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
|
||||
):
|
||||
base_url = credentials.get("base_url", "")
|
||||
if base_url:
|
||||
try:
|
||||
parsed = urlparse(base_url)
|
||||
hostname = parsed.netloc or parsed.path
|
||||
if ".atlassian.net" in hostname:
|
||||
return hostname.replace(".atlassian.net", "")
|
||||
return hostname
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
# Google, Linear, Airtable require API calls - return None
|
||||
return None
|
||||
|
||||
|
||||
def generate_connector_name_with_identifier(
|
||||
connector_type: SearchSourceConnectorType,
|
||||
identifier: str | None,
|
||||
) -> str:
|
||||
"""
|
||||
Generate a connector name with an identifier.
|
||||
|
||||
Args:
|
||||
connector_type: The type of connector
|
||||
identifier: User identifier (email, workspace name, etc.)
|
||||
|
||||
Returns:
|
||||
Name like "Gmail - john@example.com" or just "Gmail" if no identifier
|
||||
"""
|
||||
base = get_base_name_for_type(connector_type)
|
||||
if identifier:
|
||||
return f"{base} - {identifier}"
|
||||
return base
|
||||
|
||||
|
||||
async def count_connectors_of_type(
|
||||
session: AsyncSession,
|
||||
connector_type: SearchSourceConnectorType,
|
||||
search_space_id: int,
|
||||
user_id: UUID,
|
||||
) -> int:
|
||||
"""Count existing connectors of a type for a user in a search space."""
|
||||
result = await session.execute(
|
||||
select(func.count(SearchSourceConnector.id)).where(
|
||||
SearchSourceConnector.connector_type == connector_type,
|
||||
SearchSourceConnector.search_space_id == search_space_id,
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
)
|
||||
)
|
||||
return result.scalar() or 0
|
||||
|
||||
|
||||
async def check_duplicate_connector(
|
||||
session: AsyncSession,
|
||||
connector_type: SearchSourceConnectorType,
|
||||
search_space_id: int,
|
||||
user_id: UUID,
|
||||
identifier: str | None,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if a connector with the same identifier already exists.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
connector_type: The type of connector
|
||||
search_space_id: The search space ID
|
||||
user_id: The user ID
|
||||
identifier: User identifier (email, workspace name, etc.)
|
||||
|
||||
Returns:
|
||||
True if a duplicate exists, False otherwise
|
||||
"""
|
||||
if not identifier:
|
||||
return False
|
||||
|
||||
expected_name = f"{get_base_name_for_type(connector_type)} - {identifier}"
|
||||
result = await session.execute(
|
||||
select(func.count(SearchSourceConnector.id)).where(
|
||||
SearchSourceConnector.connector_type == connector_type,
|
||||
SearchSourceConnector.search_space_id == search_space_id,
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
SearchSourceConnector.name == expected_name,
|
||||
)
|
||||
)
|
||||
return (result.scalar() or 0) > 0
|
||||
|
||||
|
||||
async def generate_unique_connector_name(
|
||||
session: AsyncSession,
|
||||
connector_type: SearchSourceConnectorType,
|
||||
search_space_id: int,
|
||||
user_id: UUID,
|
||||
identifier: str | None = None,
|
||||
) -> str:
|
||||
"""
|
||||
Generate a unique connector name.
|
||||
|
||||
If an identifier is provided (email, workspace name, etc.), uses it with base name.
|
||||
Otherwise, falls back to counting existing connectors for uniqueness.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
connector_type: The type of connector
|
||||
search_space_id: The search space ID
|
||||
user_id: The user ID
|
||||
identifier: Optional user identifier (email, workspace name, etc.)
|
||||
|
||||
Returns:
|
||||
Unique name like "Gmail - john@example.com" or "Gmail (2)"
|
||||
"""
|
||||
base = get_base_name_for_type(connector_type)
|
||||
|
||||
if identifier:
|
||||
return f"{base} - {identifier}"
|
||||
|
||||
# Fallback: use counter for uniqueness
|
||||
count = await count_connectors_of_type(
|
||||
session, connector_type, search_space_id, user_id
|
||||
)
|
||||
|
||||
if count == 0:
|
||||
return base
|
||||
return f"{base} ({count + 1})"
|
||||
|
|
@ -222,88 +222,6 @@ async def convert_document_to_markdown(elements):
|
|||
return "".join(markdown_parts)
|
||||
|
||||
|
||||
def convert_chunks_to_langchain_documents(chunks):
|
||||
"""
|
||||
Convert chunks from hybrid search results to LangChain Document objects.
|
||||
|
||||
Args:
|
||||
chunks: List of chunk dictionaries from hybrid search results
|
||||
|
||||
Returns:
|
||||
List of LangChain Document objects
|
||||
"""
|
||||
try:
|
||||
from langchain_core.documents import Document as LangChainDocument
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"LangChain is not installed. Please install it with `pip install langchain langchain-core`"
|
||||
) from None
|
||||
|
||||
langchain_docs = []
|
||||
|
||||
for chunk in chunks:
|
||||
# Extract content from the chunk
|
||||
content = chunk.get("content", "")
|
||||
|
||||
# Create metadata dictionary
|
||||
metadata = {
|
||||
"chunk_id": chunk.get("chunk_id"),
|
||||
"score": chunk.get("score"),
|
||||
"rank": chunk.get("rank") if "rank" in chunk else None,
|
||||
}
|
||||
|
||||
# Add document information to metadata
|
||||
if "document" in chunk:
|
||||
doc = chunk["document"]
|
||||
metadata.update(
|
||||
{
|
||||
"document_id": doc.get("id"),
|
||||
"document_title": doc.get("title"),
|
||||
"document_type": doc.get("document_type"),
|
||||
}
|
||||
)
|
||||
|
||||
# Add document metadata if available
|
||||
if "metadata" in doc:
|
||||
# Prefix document metadata keys to avoid conflicts
|
||||
doc_metadata = {
|
||||
f"doc_meta_{k}": v for k, v in doc.get("metadata", {}).items()
|
||||
}
|
||||
metadata.update(doc_metadata)
|
||||
|
||||
# Add source URL if available in metadata
|
||||
if "url" in doc.get("metadata", {}):
|
||||
metadata["source"] = doc["metadata"]["url"]
|
||||
elif "sourceURL" in doc.get("metadata", {}):
|
||||
metadata["source"] = doc["metadata"]["sourceURL"]
|
||||
|
||||
# Ensure source_id is set for citation purposes
|
||||
# Use document_id as the source_id if available
|
||||
if "document_id" in metadata:
|
||||
metadata["source_id"] = metadata["document_id"]
|
||||
|
||||
# Update content for citation mode - format as XML with explicit source_id
|
||||
new_content = f"""
|
||||
<document>
|
||||
<metadata>
|
||||
<source_id>{metadata.get("source_id", metadata.get("document_id", "unknown"))}</source_id>
|
||||
</metadata>
|
||||
<content>
|
||||
<text>
|
||||
{content}
|
||||
</text>
|
||||
</content>
|
||||
</document>
|
||||
"""
|
||||
|
||||
# Create LangChain Document
|
||||
langchain_doc = LangChainDocument(page_content=new_content, metadata=metadata)
|
||||
|
||||
langchain_docs.append(langchain_doc)
|
||||
|
||||
return langchain_docs
|
||||
|
||||
|
||||
def generate_content_hash(content: str, search_space_id: int) -> str:
|
||||
"""Generate SHA-256 hash for the given content combined with search space ID."""
|
||||
combined_data = f"{search_space_id}:{content}"
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@ logger = logging.getLogger(__name__)
|
|||
# Mapping of connector types to their corresponding Celery task names
|
||||
CONNECTOR_TASK_MAP = {
|
||||
SearchSourceConnectorType.SLACK_CONNECTOR: "index_slack_messages",
|
||||
SearchSourceConnectorType.TEAMS_CONNECTOR: "index_teams_messages",
|
||||
SearchSourceConnectorType.NOTION_CONNECTOR: "index_notion_pages",
|
||||
SearchSourceConnectorType.GITHUB_CONNECTOR: "index_github_repos",
|
||||
SearchSourceConnectorType.LINEAR_CONNECTOR: "index_linear_issues",
|
||||
|
|
|
|||
|
|
@ -551,7 +551,7 @@ def validate_connector_config(
|
|||
# ],
|
||||
# "validators": {},
|
||||
# },
|
||||
"CLICKUP_CONNECTOR": {"required": ["CLICKUP_API_TOKEN"], "validators": {}},
|
||||
# "CLICKUP_CONNECTOR": {"required": ["CLICKUP_API_TOKEN"], "validators": {}},
|
||||
# "GOOGLE_CALENDAR_CONNECTOR": {
|
||||
# "required": ["token", "refresh_token", "token_uri", "client_id", "expiry", "scopes", "client_secret"],
|
||||
# "validators": {},
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue