Merge pull request #673 from MODSetter/dev

feat: multi oauth connection management & shifted connectors to their oauth alternatives
This commit is contained in:
Rohan Verma 2026-01-07 21:25:25 -08:00 committed by GitHub
commit 2fd38615e8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
189 changed files with 11566 additions and 6089 deletions

94
.vscode/launch.json vendored
View file

@ -5,7 +5,7 @@
"version": "0.2.0",
"configurations": [
{
"name": "Python Debugger: UV Run with Reload",
"name": "Backend: FastAPI",
"type": "debugpy",
"request": "launch",
"module": "uvicorn",
@ -25,7 +25,7 @@
"python": "${command:python.interpreterPath}"
},
{
"name": "Python Debugger: main.py (direct)",
"name": "Backend: FastAPI (main.py)",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/surfsense_backend/main.py",
@ -34,17 +34,95 @@
"cwd": "${workspaceFolder}/surfsense_backend"
},
{
"name": "Python Debugger: Chat DeepAgent",
"name": "Frontend: Next.js",
"type": "node",
"request": "launch",
"cwd": "${workspaceFolder}/surfsense_web",
"runtimeExecutable": "npm",
"runtimeArgs": ["run", "dev"],
"console": "integratedTerminal",
"serverReadyAction": {
"pattern": "- Local:.+(https?://.+)",
"uriFormat": "%s",
"action": "debugWithChrome"
}
},
{
"name": "Frontend: Next.js (Server-Side Debug)",
"type": "node",
"request": "launch",
"cwd": "${workspaceFolder}/surfsense_web",
"runtimeExecutable": "npm",
"runtimeArgs": ["run", "debug:server"],
"console": "integratedTerminal",
"serverReadyAction": {
"pattern": "- Local:.+(https?://.+)",
"uriFormat": "%s",
"action": "debugWithChrome"
}
},
{
"name": "Celery: Worker",
"type": "debugpy",
"request": "launch",
"module": "app.agents.new_chat.chat_deepagent",
"module": "celery",
"args": [
"-A",
"app.celery_app:celery_app",
"worker",
"--loglevel=info",
"--pool=solo"
],
"console": "integratedTerminal",
"justMyCode": false,
"cwd": "${workspaceFolder}/surfsense_backend",
"python": "${command:python.interpreterPath}",
"env": {
"PYTHONPATH": "${workspaceFolder}/surfsense_backend"
"python": "${command:python.interpreterPath}"
},
{
"name": "Celery: Beat Scheduler",
"type": "debugpy",
"request": "launch",
"module": "celery",
"args": [
"-A",
"app.celery_app:celery_app",
"beat",
"--loglevel=info"
],
"console": "integratedTerminal",
"justMyCode": false,
"cwd": "${workspaceFolder}/surfsense_backend",
"python": "${command:python.interpreterPath}"
}
],
"compounds": [
{
"name": "Full Stack: Backend + Frontend + Celery",
"configurations": [
"Backend: FastAPI",
"Frontend: Next.js",
"Celery: Worker",
"Celery: Beat Scheduler"
],
"stopAll": true,
"presentation": {
"hidden": false,
"group": "Full Stack",
"order": 1
}
},
{
"name": "Full Stack: Backend + Frontend",
"configurations": [
"Backend: FastAPI",
"Frontend: Next.js"
],
"stopAll": true,
"presentation": {
"hidden": false,
"group": "Full Stack",
"order": 2
}
}
]
}
}

View file

@ -37,14 +37,11 @@ COPY surfsense_web/ ./
# Run fumadocs-mdx postinstall now that source files are available
RUN pnpm fumadocs-mdx
# Build args for frontend
ARG NEXT_PUBLIC_FASTAPI_BACKEND_URL=http://localhost:8000
ARG NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE=LOCAL
ARG NEXT_PUBLIC_ETL_SERVICE=DOCLING
ENV NEXT_PUBLIC_FASTAPI_BACKEND_URL=$NEXT_PUBLIC_FASTAPI_BACKEND_URL
ENV NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE=$NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE
ENV NEXT_PUBLIC_ETL_SERVICE=$NEXT_PUBLIC_ETL_SERVICE
# Build with placeholder values that will be replaced at runtime
# These unique strings allow runtime substitution via entrypoint script
ENV NEXT_PUBLIC_FASTAPI_BACKEND_URL=__NEXT_PUBLIC_FASTAPI_BACKEND_URL__
ENV NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE=__NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE__
ENV NEXT_PUBLIC_ETL_SERVICE=__NEXT_PUBLIC_ETL_SERVICE__
# Build
RUN pnpm run build
@ -233,6 +230,12 @@ ENV AUTH_TYPE=LOCAL
ENV ETL_SERVICE=DOCLING
ENV EMBEDDING_MODEL=sentence-transformers/all-MiniLM-L6-v2
# Frontend configuration (can be overridden at runtime)
# These are injected into the Next.js build at container startup
ENV NEXT_PUBLIC_FASTAPI_BACKEND_URL=http://localhost:8000
ENV NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE=LOCAL
ENV NEXT_PUBLIC_ETL_SERVICE=DOCLING
# Data volume
VOLUME ["/data"]

View file

@ -174,44 +174,27 @@ docker run -d -p 3000:3000 -p 8000:8000 `
ghcr.io/modsetter/surfsense:latest
```
**With Custom Configuration (e.g., OpenAI Embeddings):**
**With Custom Configuration:**
You can pass any environment variable using `-e` flags:
```bash
docker run -d -p 3000:3000 -p 8000:8000 \
-v surfsense-data:/data \
-e EMBEDDING_MODEL=openai://text-embedding-ada-002 \
-e OPENAI_API_KEY=your_openai_api_key \
--name surfsense \
--restart unless-stopped \
ghcr.io/modsetter/surfsense:latest
```
**With OAuth-based Connectors (Google Calendar, Gmail, Drive, Airtable):**
To use OAuth-based connectors, you need to configure the respective client credentials:
```bash
docker run -d -p 3000:3000 -p 8000:8000 \
-v surfsense-data:/data \
# Google Connectors (Calendar, Gmail, Drive)
-e AUTH_TYPE=GOOGLE \
-e GOOGLE_OAUTH_CLIENT_ID=your_google_client_id \
-e GOOGLE_OAUTH_CLIENT_SECRET=your_google_client_secret \
-e GOOGLE_CALENDAR_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/calendar/connector/callback \
-e GOOGLE_GMAIL_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/gmail/connector/callback \
-e GOOGLE_DRIVE_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/drive/connector/callback \
# Airtable Connector
-e AIRTABLE_CLIENT_ID=your_airtable_client_id \
-e AIRTABLE_CLIENT_SECRET=your_airtable_client_secret \
-e AIRTABLE_REDIRECT_URI=http://localhost:8000/api/v1/auth/airtable/connector/callback \
-e ETL_SERVICE=LLAMACLOUD \
-e LLAMA_CLOUD_API_KEY=your_llama_cloud_key \
--name surfsense \
--restart unless-stopped \
ghcr.io/modsetter/surfsense:latest
```
> [!NOTE]
> - For Google connectors, create OAuth 2.0 credentials in the [Google Cloud Console](https://console.cloud.google.com/apis/credentials)
> - For Airtable connector, create an OAuth integration in the [Airtable Developer Hub](https://airtable.com/create/oauth)
> - If deploying behind a reverse proxy with HTTPS, add `-e BACKEND_URL=https://api.yourdomain.com` and update the redirect URIs accordingly
> - If deploying behind a reverse proxy with HTTPS, add `-e BACKEND_URL=https://api.yourdomain.com`
After starting, access SurfSense at:
- **Frontend**: [http://localhost:3000](http://localhost:3000)

View file

@ -181,44 +181,27 @@ docker run -d -p 3000:3000 -p 8000:8000 `
ghcr.io/modsetter/surfsense:latest
```
**使用自定义配置(例如 OpenAI 嵌入):**
**使用自定义配置:**
您可以使用 `-e` 标志传递任何环境变量:
```bash
docker run -d -p 3000:3000 -p 8000:8000 \
-v surfsense-data:/data \
-e EMBEDDING_MODEL=openai://text-embedding-ada-002 \
-e OPENAI_API_KEY=your_openai_api_key \
--name surfsense \
--restart unless-stopped \
ghcr.io/modsetter/surfsense:latest
```
**使用 OAuth 连接器Google 日历、Gmail、云端硬盘、Airtable**
要使用基于 OAuth 的连接器,您需要配置相应的客户端凭据:
```bash
docker run -d -p 3000:3000 -p 8000:8000 \
-v surfsense-data:/data \
# Google 连接器日历、Gmail、云端硬盘
-e AUTH_TYPE=GOOGLE \
-e GOOGLE_OAUTH_CLIENT_ID=your_google_client_id \
-e GOOGLE_OAUTH_CLIENT_SECRET=your_google_client_secret \
-e GOOGLE_CALENDAR_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/calendar/connector/callback \
-e GOOGLE_GMAIL_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/gmail/connector/callback \
-e GOOGLE_DRIVE_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/drive/connector/callback \
# Airtable 连接器
-e AIRTABLE_CLIENT_ID=your_airtable_client_id \
-e AIRTABLE_CLIENT_SECRET=your_airtable_client_secret \
-e AIRTABLE_REDIRECT_URI=http://localhost:8000/api/v1/auth/airtable/connector/callback \
-e ETL_SERVICE=LLAMACLOUD \
-e LLAMA_CLOUD_API_KEY=your_llama_cloud_key \
--name surfsense \
--restart unless-stopped \
ghcr.io/modsetter/surfsense:latest
```
> [!NOTE]
> - 对于 Google 连接器,请在 [Google Cloud Console](https://console.cloud.google.com/apis/credentials) 中创建 OAuth 2.0 凭据
> - 对于 Airtable 连接器,请在 [Airtable 开发者中心](https://airtable.com/create/oauth) 中创建 OAuth 集成
> - 如果部署在带有 HTTPS 的反向代理后面,请添加 `-e BACKEND_URL=https://api.yourdomain.com` 并相应地更新重定向 URI
> - 如果部署在带有 HTTPS 的反向代理后面,请添加 `-e BACKEND_URL=https://api.yourdomain.com`
启动后,访问 SurfSense
- **前端**: [http://localhost:3000](http://localhost:3000)

View file

@ -96,6 +96,30 @@ if [ -d /app/frontend/.next/standalone ]; then
cp -r /app/frontend/.next/static /app/frontend/.next/static 2>/dev/null || true
fi
# ================================================
# Runtime Environment Variable Replacement
# ================================================
# Next.js NEXT_PUBLIC_* vars are baked in at build time.
# This replaces placeholder values with actual runtime env vars.
echo "🔧 Applying runtime environment configuration..."
# Set defaults if not provided
NEXT_PUBLIC_FASTAPI_BACKEND_URL="${NEXT_PUBLIC_FASTAPI_BACKEND_URL:-http://localhost:8000}"
NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE="${NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE:-LOCAL}"
NEXT_PUBLIC_ETL_SERVICE="${NEXT_PUBLIC_ETL_SERVICE:-DOCLING}"
# Replace placeholders in all JS files
find /app/frontend -type f \( -name "*.js" -o -name "*.json" \) -exec sed -i \
-e "s|__NEXT_PUBLIC_FASTAPI_BACKEND_URL__|${NEXT_PUBLIC_FASTAPI_BACKEND_URL}|g" \
-e "s|__NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE__|${NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE}|g" \
-e "s|__NEXT_PUBLIC_ETL_SERVICE__|${NEXT_PUBLIC_ETL_SERVICE}|g" \
{} +
echo "✅ Environment configuration applied"
echo " Backend URL: ${NEXT_PUBLIC_FASTAPI_BACKEND_URL}"
echo " Auth Type: ${NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE}"
echo " ETL Service: ${NEXT_PUBLIC_ETL_SERVICE}"
# ================================================
# Run database migrations
# ================================================
@ -135,10 +159,10 @@ echo "==========================================="
echo " 📋 Configuration"
echo "==========================================="
echo " Frontend URL: http://localhost:3000"
echo " Backend API: http://localhost:8000"
echo " API Docs: http://localhost:8000/docs"
echo " Auth Type: ${AUTH_TYPE:-LOCAL}"
echo " ETL Service: ${ETL_SERVICE:-DOCLING}"
echo " Backend API: ${NEXT_PUBLIC_FASTAPI_BACKEND_URL}"
echo " API Docs: ${NEXT_PUBLIC_FASTAPI_BACKEND_URL}/docs"
echo " Auth Type: ${NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE}"
echo " ETL Service: ${NEXT_PUBLIC_ETL_SERVICE}"
echo " TTS Service: ${TTS_SERVICE}"
echo " STT Service: ${STT_SERVICE}"
echo "==========================================="

View file

@ -34,17 +34,48 @@ REGISTRATION_ENABLED=TRUE or FALSE
GOOGLE_OAUTH_CLIENT_ID=924507538m
GOOGLE_OAUTH_CLIENT_SECRET=GOCSV
# Connector Specific Configs
# Google Connector Specific Configurations
GOOGLE_CALENDAR_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/calendar/connector/callback
GOOGLE_GMAIL_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/gmail/connector/callback
GOOGLE_DRIVE_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/drive/connector/callback
GOOGLE_DRIVE_REDIRECT_URI=http://localhost:8000/api/v1/auth/google/drive/connector/callback
# Airtable OAuth for Aitable Connector
AIRTABLE_CLIENT_ID=your_airtable_client_id
AIRTABLE_CLIENT_SECRET=your_airtable_client_secret
# Aitable OAuth Configuration
AIRTABLE_CLIENT_ID=your_airtable_client_id_here
AIRTABLE_CLIENT_SECRET=your_airtable_client_secret_here
AIRTABLE_REDIRECT_URI=http://localhost:8000/api/v1/auth/airtable/connector/callback
# ClickUp OAuth Configuration
CLICKUP_CLIENT_ID=your_clickup_client_id_here
CLICKUP_CLIENT_SECRET=your_clickup_client_secret_here
CLICKUP_REDIRECT_URI=http://localhost:8000/api/v1/auth/clickup/connector/callback
# Discord OAuth Configuration
DISCORD_CLIENT_ID=your_discord_client_id_here
DISCORD_CLIENT_SECRET=your_discord_client_secret_here
DISCORD_REDIRECT_URI=http://localhost:8000/api/v1/auth/discord/connector/callback
DISCORD_BOT_TOKEN=your_bot_token_from_developer_portal
# Atlassian OAuth Configuration
ATLASSIAN_CLIENT_ID=your_atlassian_client_id_here
ATLASSIAN_CLIENT_SECRET=your_atlassian_client_secret_here
JIRA_REDIRECT_URI=http://localhost:8000/api/v1/auth/jira/connector/callback
CONFLUENCE_REDIRECT_URI=http://localhost:8000/api/v1/auth/confluence/connector/callback
# Linear OAuth Configuration
LINEAR_CLIENT_ID=your_linear_client_id_here
LINEAR_CLIENT_SECRET=your_linear_client_secret_here
LINEAR_REDIRECT_URI=http://localhost:8000/api/v1/auth/linear/connector/callback
# Notion OAuth Configuration
NOTION_CLIENT_ID=your_notion_client_id_here
NOTION_CLIENT_SECRET=your_notion_client_secret_here
NOTION_REDIRECT_URI=http://localhost:8000/api/v1/auth/notion/connector/callback
# Slack OAuth Configuration
SLACK_CLIENT_ID=your_slack_client_id_here
SLACK_CLIENT_SECRET=your_slack_client_secret_here
SLACK_REDIRECT_URI=http://localhost:8000/api/v1/auth/slack/connector/callback
# Embedding Model
# Examples:
# # Get sentence transformers embeddings

View file

@ -0,0 +1,55 @@
"""Allow multiple connectors of same type per search space
Revision ID: 57
Revises: 56
Create Date: 2026-01-06 12:00:00.000000
"""
from collections.abc import Sequence
from sqlalchemy import text
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "57"
down_revision: str | None = "56"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
connection = op.get_bind()
constraint_exists = connection.execute(
text("""
SELECT 1 FROM information_schema.table_constraints
WHERE table_name='search_source_connectors'
AND constraint_type='UNIQUE'
AND constraint_name='uq_searchspace_user_connector_type'
""")
).scalar()
if constraint_exists:
op.drop_constraint(
"uq_searchspace_user_connector_type",
"search_source_connectors",
type_="unique",
)
def downgrade() -> None:
connection = op.get_bind()
constraint_exists = connection.execute(
text("""
SELECT 1 FROM information_schema.table_constraints
WHERE table_name='search_source_connectors'
AND constraint_type='UNIQUE'
AND constraint_name='uq_searchspace_user_connector_type'
""")
).scalar()
if not constraint_exists:
op.create_unique_constraint(
"uq_searchspace_user_connector_type",
"search_source_connectors",
["search_space_id", "user_id", "connector_type"],
)

View file

@ -0,0 +1,55 @@
"""
Add unique constraint for (search_space_id, user_id, name) on search_source_connectors.
Revision ID: 58
Revises: 57
Create Date: 2026-01-06 14:00:00.000000
"""
from collections.abc import Sequence
from sqlalchemy import text
from alembic import op
revision: str = "58"
down_revision: str | None = "57"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
connection = op.get_bind()
constraint_exists = connection.execute(
text("""
SELECT 1 FROM information_schema.table_constraints
WHERE table_name='search_source_connectors'
AND constraint_type='UNIQUE'
AND constraint_name='uq_searchspace_user_connector_name'
""")
).scalar()
if not constraint_exists:
op.create_unique_constraint(
"uq_searchspace_user_connector_name",
"search_source_connectors",
["search_space_id", "user_id", "name"],
)
def downgrade() -> None:
connection = op.get_bind()
constraint_exists = connection.execute(
text("""
SELECT 1 FROM information_schema.table_constraints
WHERE table_name='search_source_connectors'
AND constraint_type='UNIQUE'
AND constraint_name='uq_searchspace_user_connector_name'
""")
).scalar()
if constraint_exists:
op.drop_constraint(
"uq_searchspace_user_connector_name",
"search_source_connectors",
type_="unique",
)

View file

@ -90,6 +90,38 @@ class Config:
AIRTABLE_CLIENT_SECRET = os.getenv("AIRTABLE_CLIENT_SECRET")
AIRTABLE_REDIRECT_URI = os.getenv("AIRTABLE_REDIRECT_URI")
# Notion OAuth
NOTION_CLIENT_ID = os.getenv("NOTION_CLIENT_ID")
NOTION_CLIENT_SECRET = os.getenv("NOTION_CLIENT_SECRET")
NOTION_REDIRECT_URI = os.getenv("NOTION_REDIRECT_URI")
# Atlassian OAuth (shared for Jira and Confluence)
ATLASSIAN_CLIENT_ID = os.getenv("ATLASSIAN_CLIENT_ID")
ATLASSIAN_CLIENT_SECRET = os.getenv("ATLASSIAN_CLIENT_SECRET")
JIRA_REDIRECT_URI = os.getenv("JIRA_REDIRECT_URI")
CONFLUENCE_REDIRECT_URI = os.getenv("CONFLUENCE_REDIRECT_URI")
# Linear OAuth
LINEAR_CLIENT_ID = os.getenv("LINEAR_CLIENT_ID")
LINEAR_CLIENT_SECRET = os.getenv("LINEAR_CLIENT_SECRET")
LINEAR_REDIRECT_URI = os.getenv("LINEAR_REDIRECT_URI")
# Slack OAuth
SLACK_CLIENT_ID = os.getenv("SLACK_CLIENT_ID")
SLACK_CLIENT_SECRET = os.getenv("SLACK_CLIENT_SECRET")
SLACK_REDIRECT_URI = os.getenv("SLACK_REDIRECT_URI")
# Discord OAuth
DISCORD_CLIENT_ID = os.getenv("DISCORD_CLIENT_ID")
DISCORD_CLIENT_SECRET = os.getenv("DISCORD_CLIENT_SECRET")
DISCORD_REDIRECT_URI = os.getenv("DISCORD_REDIRECT_URI")
DISCORD_BOT_TOKEN = os.getenv("DISCORD_BOT_TOKEN")
# ClickUp OAuth
CLICKUP_CLIENT_ID = os.getenv("CLICKUP_CLIENT_ID")
CLICKUP_CLIENT_SECRET = os.getenv("CLICKUP_CLIENT_SECRET")
CLICKUP_REDIRECT_URI = os.getenv("CLICKUP_REDIRECT_URI")
# LLM instances are now managed per-user through the LLMConfig system
# Legacy environment variables removed in favor of user-specific configurations

View file

@ -294,6 +294,12 @@ class AirtableConnector:
Tuple of (records, error_message)
"""
try:
# Validate date strings before parsing
if not start_date or start_date.lower() in ("undefined", "null", "none"):
return [], "Invalid start_date: date string is required"
if not end_date or end_date.lower() in ("undefined", "null", "none"):
return [], "Invalid end_date: date string is required"
# Parse and validate dates
start_dt = isoparse(start_date)
end_dt = isoparse(end_date)
@ -382,3 +388,43 @@ class AirtableConnector:
markdown_parts.append("")
return "\n".join(markdown_parts)
# --- OAuth User Info ---
AIRTABLE_WHOAMI_URL = "https://api.airtable.com/v0/meta/whoami"
async def fetch_airtable_user_email(access_token: str) -> str | None:
"""
Fetch user email from Airtable whoami API.
Args:
access_token: The Airtable OAuth access token
Returns:
User's email address or None if fetch fails
"""
try:
async with httpx.AsyncClient() as client:
response = await client.get(
AIRTABLE_WHOAMI_URL,
headers={"Authorization": f"Bearer {access_token}"},
timeout=10.0,
)
if response.status_code == 200:
data = response.json()
email = data.get("email")
if email:
logger.debug(f"Fetched Airtable user email: {email}")
return email
logger.warning(
f"Failed to fetch Airtable user info: {response.status_code}"
)
return None
except Exception as e:
logger.warning(f"Error fetching Airtable user email: {e!s}")
return None

View file

@ -0,0 +1,175 @@
"""
Airtable OAuth Connector.
Handles OAuth-based authentication and token refresh for Airtable API access.
"""
import logging
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.config import config
from app.connectors.airtable_connector import AirtableConnector
from app.db import SearchSourceConnector
from app.routes.airtable_add_connector_route import refresh_airtable_token
from app.schemas.airtable_auth_credentials import AirtableAuthCredentialsBase
from app.utils.oauth_security import TokenEncryption
logger = logging.getLogger(__name__)
class AirtableHistoryConnector:
"""
Airtable connector with OAuth support and automatic token refresh.
This connector uses OAuth 2.0 access tokens to authenticate with the
Airtable API. It automatically refreshes expired tokens when needed.
"""
def __init__(
self,
session: AsyncSession,
connector_id: int,
credentials: AirtableAuthCredentialsBase | None = None,
):
"""
Initialize the AirtableHistoryConnector with auto-refresh capability.
Args:
session: Database session for updating connector
connector_id: Connector ID for direct updates
credentials: Airtable OAuth credentials (optional, will be loaded from DB if not provided)
"""
self._session = session
self._connector_id = connector_id
self._credentials = credentials
self._airtable_connector: AirtableConnector | None = None
async def _get_valid_token(self) -> str:
"""
Get valid Airtable access token, refreshing if needed.
Returns:
Valid access token
Raises:
ValueError: If credentials are missing or invalid
Exception: If token refresh fails
"""
# Load credentials from DB if not provided
if self._credentials is None:
result = await self._session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == self._connector_id
)
)
connector = result.scalars().first()
if not connector:
raise ValueError(f"Connector {self._connector_id} not found")
config_data = connector.config.copy()
# Decrypt credentials if they are encrypted
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
try:
token_encryption = TokenEncryption(config.SECRET_KEY)
# Decrypt sensitive fields
if config_data.get("access_token"):
config_data["access_token"] = token_encryption.decrypt_token(
config_data["access_token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
logger.info(
f"Decrypted Airtable credentials for connector {self._connector_id}"
)
except Exception as e:
logger.error(
f"Failed to decrypt Airtable credentials for connector {self._connector_id}: {e!s}"
)
raise ValueError(
f"Failed to decrypt Airtable credentials: {e!s}"
) from e
try:
self._credentials = AirtableAuthCredentialsBase.from_dict(config_data)
except Exception as e:
raise ValueError(f"Invalid Airtable credentials: {e!s}") from e
# Check if token is expired and refreshable
if self._credentials.is_expired and self._credentials.is_refreshable:
try:
logger.info(
f"Airtable token expired for connector {self._connector_id}, refreshing..."
)
# Get connector for refresh
result = await self._session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == self._connector_id
)
)
connector = result.scalars().first()
if not connector:
raise RuntimeError(
f"Connector {self._connector_id} not found; cannot refresh token."
)
# Refresh token
connector = await refresh_airtable_token(self._session, connector)
# Reload credentials after refresh
config_data = connector.config.copy()
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
if config_data.get("access_token"):
config_data["access_token"] = token_encryption.decrypt_token(
config_data["access_token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
self._credentials = AirtableAuthCredentialsBase.from_dict(config_data)
# Invalidate cached connector so it's recreated with new token
self._airtable_connector = None
logger.info(
f"Successfully refreshed Airtable token for connector {self._connector_id}"
)
except Exception as e:
logger.error(
f"Failed to refresh Airtable token for connector {self._connector_id}: {e!s}"
)
raise Exception(
f"Failed to refresh Airtable OAuth credentials: {e!s}"
) from e
return self._credentials.access_token
async def _get_connector(self) -> AirtableConnector:
"""
Get or create AirtableConnector with valid token.
Returns:
AirtableConnector instance
"""
if self._airtable_connector is None:
# Ensure we have valid credentials (this will refresh if needed)
await self._get_valid_token()
# Use the credentials object which is now guaranteed to be valid
if not self._credentials:
raise ValueError("Credentials not loaded")
self._airtable_connector = AirtableConnector(self._credentials)
return self._airtable_connector

View file

@ -0,0 +1,349 @@
"""
ClickUp History Module
A module for retrieving data from ClickUp with OAuth support and backward compatibility.
Allows fetching tasks from workspaces and lists with automatic token refresh.
"""
import logging
from typing import Any
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.config import config
from app.connectors.clickup_connector import ClickUpConnector
from app.db import SearchSourceConnector
from app.routes.clickup_add_connector_route import refresh_clickup_token
from app.schemas.clickup_auth_credentials import ClickUpAuthCredentialsBase
from app.utils.oauth_security import TokenEncryption
logger = logging.getLogger(__name__)
class ClickUpHistoryConnector:
"""
Class for retrieving data from ClickUp with OAuth support and backward compatibility.
"""
def __init__(
self,
session: AsyncSession,
connector_id: int,
credentials: ClickUpAuthCredentialsBase | None = None,
api_token: str | None = None, # For backward compatibility
):
"""
Initialize the ClickUpHistoryConnector.
Args:
session: Database session for token refresh
connector_id: Connector ID for direct updates
credentials: ClickUp OAuth credentials (optional, will be loaded from DB if not provided)
api_token: Legacy API token for backward compatibility (optional)
"""
self._session = session
self._connector_id = connector_id
self._credentials = credentials
self._api_token = api_token # Legacy API token
self._use_oauth = False
self._use_legacy = api_token is not None
self._clickup_client: ClickUpConnector | None = None
async def _get_valid_token(self) -> str:
"""
Get valid ClickUp access token, refreshing if needed.
For legacy API tokens, returns the token directly.
Returns:
Valid access token or API token
Raises:
ValueError: If credentials are missing or invalid
Exception: If token refresh fails
"""
# If using legacy API token, return it directly
if self._use_legacy and self._api_token:
return self._api_token
# Load credentials from DB if not provided
if self._credentials is None:
result = await self._session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == self._connector_id
)
)
connector = result.scalars().first()
if not connector:
raise ValueError(f"Connector {self._connector_id} not found")
config_data = connector.config.copy()
# Check if using OAuth or legacy API token
is_oauth = config_data.get("_token_encrypted", False) or config_data.get(
"access_token"
)
has_legacy_token = config_data.get("CLICKUP_API_TOKEN") is not None
if is_oauth:
# OAuth 2.0 authentication
self._use_oauth = True
# Decrypt credentials if they are encrypted
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
try:
token_encryption = TokenEncryption(config.SECRET_KEY)
# Decrypt sensitive fields
if config_data.get("access_token"):
config_data["access_token"] = (
token_encryption.decrypt_token(
config_data["access_token"]
)
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = (
token_encryption.decrypt_token(
config_data["refresh_token"]
)
)
logger.info(
f"Decrypted ClickUp OAuth credentials for connector {self._connector_id}"
)
except Exception as e:
logger.error(
f"Failed to decrypt ClickUp OAuth credentials for connector {self._connector_id}: {e!s}"
)
raise ValueError(
f"Failed to decrypt ClickUp OAuth credentials: {e!s}"
) from e
try:
self._credentials = ClickUpAuthCredentialsBase.from_dict(
config_data
)
except Exception as e:
raise ValueError(f"Invalid ClickUp OAuth credentials: {e!s}") from e
elif has_legacy_token:
# Legacy API token authentication (backward compatibility)
self._use_legacy = True
self._api_token = config_data.get("CLICKUP_API_TOKEN")
# Decrypt token if it's encrypted (legacy tokens might be encrypted)
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY and self._api_token:
try:
token_encryption = TokenEncryption(config.SECRET_KEY)
self._api_token = token_encryption.decrypt_token(
self._api_token
)
logger.info(
f"Decrypted legacy ClickUp API token for connector {self._connector_id}"
)
except Exception as e:
logger.warning(
f"Failed to decrypt legacy ClickUp API token for connector {self._connector_id}: {e!s}. "
"Trying to use token as-is (might be unencrypted)."
)
# Continue with token as-is - might be unencrypted legacy token
if not self._api_token:
raise ValueError("ClickUp API token not found in connector config")
# Return legacy token directly (no refresh needed)
return self._api_token
else:
raise ValueError(
"ClickUp credentials not found in connector config (neither OAuth nor API token)"
)
# Check if token is expired and refreshable (only for OAuth)
if (
self._use_oauth
and self._credentials.is_expired
and self._credentials.is_refreshable
):
try:
logger.info(
f"ClickUp token expired for connector {self._connector_id}, refreshing..."
)
# Get connector for refresh
result = await self._session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == self._connector_id
)
)
connector = result.scalars().first()
if not connector:
raise RuntimeError(
f"Connector {self._connector_id} not found; cannot refresh token."
)
# Refresh token
connector = await refresh_clickup_token(self._session, connector)
# Reload credentials after refresh
config_data = connector.config.copy()
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
if config_data.get("access_token"):
config_data["access_token"] = token_encryption.decrypt_token(
config_data["access_token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
self._credentials = ClickUpAuthCredentialsBase.from_dict(config_data)
# Invalidate cached client so it's recreated with new token
self._clickup_client = None
logger.info(
f"Successfully refreshed ClickUp token for connector {self._connector_id}"
)
except Exception as e:
logger.error(
f"Failed to refresh ClickUp token for connector {self._connector_id}: {e!s}"
)
raise Exception(
f"Failed to refresh ClickUp OAuth credentials: {e!s}"
) from e
if self._use_oauth:
return self._credentials.access_token
else:
return self._api_token
async def _get_client(self) -> ClickUpConnector:
"""
Get or create ClickUpConnector with valid token.
Returns:
ClickUpConnector instance
"""
if self._clickup_client is None:
token = await self._get_valid_token()
# ClickUp API uses Bearer token for OAuth, or direct token for legacy
if self._use_oauth:
# For OAuth, use Bearer token format (ClickUp OAuth expects "Bearer {token}")
self._clickup_client = ClickUpConnector(api_token=f"Bearer {token}")
else:
# For legacy API token, use token directly (format: "pk_...")
self._clickup_client = ClickUpConnector(api_token=token)
return self._clickup_client
async def close(self):
"""Close any open connections."""
self._clickup_client = None
async def __aenter__(self):
"""Async context manager entry."""
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
"""Async context manager exit."""
await self.close()
async def get_authorized_workspaces(self) -> dict[str, Any]:
"""
Fetch authorized workspaces (teams) from ClickUp.
Returns:
Dictionary containing teams data
Raises:
ValueError: If credentials have not been set
Exception: If the API request fails
"""
client = await self._get_client()
return client.get_authorized_workspaces()
async def get_workspace_tasks(
self, workspace_id: str, include_closed: bool = False
) -> list[dict[str, Any]]:
"""
Fetch all tasks from a ClickUp workspace.
Args:
workspace_id: ClickUp workspace (team) ID
include_closed: Whether to include closed tasks (default: False)
Returns:
List of task objects
Raises:
ValueError: If credentials have not been set
Exception: If the API request fails
"""
client = await self._get_client()
return client.get_workspace_tasks(
workspace_id=workspace_id, include_closed=include_closed
)
async def get_tasks_in_date_range(
self,
workspace_id: str,
start_date: str,
end_date: str,
include_closed: bool = False,
) -> tuple[list[dict[str, Any]], str | None]:
"""
Fetch tasks from ClickUp within a specific date range.
Args:
workspace_id: ClickUp workspace (team) ID
start_date: Start date in YYYY-MM-DD format
end_date: End date in YYYY-MM-DD format
include_closed: Whether to include closed tasks (default: False)
Returns:
Tuple containing (tasks list, error message or None)
"""
client = await self._get_client()
return client.get_tasks_in_date_range(
workspace_id=workspace_id,
start_date=start_date,
end_date=end_date,
include_closed=include_closed,
)
async def get_task_details(self, task_id: str) -> dict[str, Any]:
"""
Fetch detailed information about a specific task.
Args:
task_id: ClickUp task ID
Returns:
Task details
Raises:
ValueError: If credentials have not been set
Exception: If the API request fails
"""
client = await self._get_client()
return client.get_task_details(task_id)
async def get_task_comments(self, task_id: str) -> dict[str, Any]:
"""
Fetch comments for a specific task.
Args:
task_id: ClickUp task ID
Returns:
Task comments
Raises:
ValueError: If credentials have not been set
Exception: If the API request fails
"""
client = await self._get_client()
return client.get_task_comments(task_id)

View file

@ -0,0 +1,592 @@
"""
Confluence OAuth Connector.
Handles OAuth-based authentication and token refresh for Confluence API access.
"""
import logging
from typing import Any
import httpx
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.config import config
from app.connectors.confluence_connector import ConfluenceConnector
from app.db import SearchSourceConnector
from app.routes.confluence_add_connector_route import refresh_confluence_token
from app.schemas.atlassian_auth_credentials import AtlassianAuthCredentialsBase
from app.utils.oauth_security import TokenEncryption
logger = logging.getLogger(__name__)
class ConfluenceHistoryConnector:
"""
Confluence connector with OAuth support and automatic token refresh.
This connector uses OAuth 2.0 access tokens to authenticate with the
Confluence API. It automatically refreshes expired tokens when needed.
Also supports legacy API token authentication for backward compatibility.
"""
def __init__(
self,
session: AsyncSession,
connector_id: int,
credentials: AtlassianAuthCredentialsBase | None = None,
):
"""
Initialize the ConfluenceHistoryConnector with auto-refresh capability.
Args:
session: Database session for updating connector
connector_id: Connector ID for direct updates
credentials: Confluence OAuth credentials (optional, will be loaded from DB if not provided)
"""
self._session = session
self._connector_id = connector_id
self._credentials = credentials
self._cloud_id: str | None = None
self._base_url: str | None = None
self._http_client: httpx.AsyncClient | None = None
self._use_oauth = True
self._legacy_email: str | None = None
self._legacy_api_token: str | None = None
self._legacy_confluence_client: ConfluenceConnector | None = None
async def _get_valid_token(self) -> str:
"""
Get valid Confluence access token, refreshing if needed.
Returns:
Valid access token
Raises:
ValueError: If credentials are missing or invalid
Exception: If token refresh fails
"""
# Load credentials from DB if not provided
if self._credentials is None:
result = await self._session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == self._connector_id
)
)
connector = result.scalars().first()
if not connector:
raise ValueError(f"Connector {self._connector_id} not found")
config_data = connector.config.copy()
# Check if using OAuth or legacy API token
is_oauth = config_data.get("_token_encrypted", False) or config_data.get(
"access_token"
)
if is_oauth:
# OAuth 2.0 authentication
# Decrypt credentials if they are encrypted
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
try:
token_encryption = TokenEncryption(config.SECRET_KEY)
# Decrypt sensitive fields
if config_data.get("access_token"):
config_data["access_token"] = (
token_encryption.decrypt_token(
config_data["access_token"]
)
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = (
token_encryption.decrypt_token(
config_data["refresh_token"]
)
)
logger.info(
f"Decrypted Confluence credentials for connector {self._connector_id}"
)
except Exception as e:
logger.error(
f"Failed to decrypt Confluence credentials for connector {self._connector_id}: {e!s}"
)
raise ValueError(
f"Failed to decrypt Confluence credentials: {e!s}"
) from e
try:
self._credentials = AtlassianAuthCredentialsBase.from_dict(
config_data
)
# Store cloud_id and base_url for API calls (with backward compatibility for site_url)
self._cloud_id = config_data.get("cloud_id")
self._base_url = config_data.get("base_url") or config_data.get(
"site_url"
)
self._use_oauth = True
except Exception as e:
raise ValueError(
f"Invalid Confluence OAuth credentials: {e!s}"
) from e
else:
# Legacy API token authentication
self._legacy_email = config_data.get("CONFLUENCE_EMAIL")
self._legacy_api_token = config_data.get("CONFLUENCE_API_TOKEN")
self._base_url = config_data.get("CONFLUENCE_BASE_URL")
self._use_oauth = False
if (
not self._legacy_email
or not self._legacy_api_token
or not self._base_url
):
raise ValueError(
"Confluence credentials not found in connector config"
)
# Check if token is expired and refreshable (only for OAuth)
if (
self._use_oauth
and self._credentials.is_expired
and self._credentials.is_refreshable
):
try:
logger.info(
f"Confluence token expired for connector {self._connector_id}, refreshing..."
)
# Get connector for refresh
result = await self._session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == self._connector_id
)
)
connector = result.scalars().first()
if not connector:
raise RuntimeError(
f"Connector {self._connector_id} not found; cannot refresh token."
)
# Refresh token
connector = await refresh_confluence_token(self._session, connector)
# Reload credentials after refresh
config_data = connector.config.copy()
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
if config_data.get("access_token"):
config_data["access_token"] = token_encryption.decrypt_token(
config_data["access_token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
self._credentials = AtlassianAuthCredentialsBase.from_dict(config_data)
self._cloud_id = config_data.get("cloud_id")
# Handle backward compatibility: check both base_url and site_url
self._base_url = config_data.get("base_url") or config_data.get(
"site_url"
)
# Invalidate cached client so it's recreated with new token
if self._http_client:
await self._http_client.aclose()
self._http_client = None
logger.info(
f"Successfully refreshed Confluence token for connector {self._connector_id}"
)
except Exception as e:
logger.error(
f"Failed to refresh Confluence token for connector {self._connector_id}: {e!s}"
)
raise Exception(
f"Failed to refresh Confluence OAuth credentials: {e!s}"
) from e
if self._use_oauth:
return self._credentials.access_token
else:
# For legacy auth, return empty string (not used for token-based auth)
return ""
async def _get_client(self) -> httpx.AsyncClient:
"""
Get or create HTTP client with valid token.
Returns:
httpx.AsyncClient instance
"""
if self._http_client is None:
self._http_client = httpx.AsyncClient(timeout=30.0)
return self._http_client
async def _get_legacy_client(self) -> ConfluenceConnector:
"""
Get or create ConfluenceConnector with legacy credentials.
Returns:
ConfluenceConnector instance
"""
if self._legacy_confluence_client is None:
self._legacy_confluence_client = ConfluenceConnector(
base_url=self._base_url,
email=self._legacy_email,
api_token=self._legacy_api_token,
)
return self._legacy_confluence_client
async def _get_base_url(self) -> str:
"""
Get the base URL for Confluence API calls.
Returns:
Base URL string
"""
if not self._use_oauth:
# For legacy auth, use the base_url directly
return self._base_url or ""
if not self._cloud_id:
raise ValueError("Cloud ID not available. Cannot construct API URL.")
# Use the Atlassian API format: https://api.atlassian.com/ex/confluence/{cloudid}
return f"https://api.atlassian.com/ex/confluence/{self._cloud_id}"
async def _make_api_request(
self, endpoint: str, params: dict[str, Any] | None = None
) -> dict[str, Any]:
"""
Make a request to the Confluence API.
Args:
endpoint: API endpoint (without base URL)
params: Query parameters for the request (optional)
Returns:
Response data from the API
Raises:
ValueError: If credentials have not been set
Exception: If the API request fails
"""
if not self._use_oauth:
# Use legacy ConfluenceConnector for API requests
client = await self._get_legacy_client()
# ConfluenceConnector uses synchronous requests, so we need to handle this differently
# For now, we'll use the legacy client's make_api_request method
# But since it's sync, we'll need to wrap it
import asyncio
loop = asyncio.get_event_loop()
return await loop.run_in_executor(
None, client.make_api_request, endpoint, params
)
# OAuth flow
token = await self._get_valid_token()
base_url = await self._get_base_url()
http_client = await self._get_client()
url = f"{base_url}/wiki/api/v2/{endpoint}"
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {token}",
"Accept": "application/json",
}
try:
response = await http_client.get(url, headers=headers, params=params)
response.raise_for_status()
return response.json()
except httpx.HTTPStatusError as e:
# Enhanced error logging to see the actual error
error_detail = {
"status_code": e.response.status_code,
"url": str(e.request.url),
"response_text": e.response.text,
"headers": dict(e.response.headers),
}
logger.error(f"Confluence API HTTP error: {error_detail}")
raise Exception(
f"Confluence API request failed (HTTP {e.response.status_code}): {e.response.text}"
) from e
except httpx.RequestError as e:
logger.error(f"Confluence API request error: {e!s}", exc_info=True)
raise Exception(f"Confluence API request failed: {e!s}") from e
async def get_all_spaces(self) -> list[dict[str, Any]]:
"""
Fetch all spaces from Confluence.
Returns:
List of space objects
Raises:
ValueError: If credentials have not been set
Exception: If the API request fails
"""
params = {
"limit": 100,
}
all_spaces = []
cursor = None
while True:
if cursor:
params["cursor"] = cursor
result = await self._make_api_request("spaces", params)
if not isinstance(result, dict) or "results" not in result:
raise Exception("Invalid response from Confluence API")
spaces = result["results"]
all_spaces.extend(spaces)
# Check if there are more spaces to fetch
links = result.get("_links", {})
if "next" not in links:
break
# Extract cursor from next link if available
next_link = links["next"]
if "cursor=" in next_link:
cursor = next_link.split("cursor=")[1].split("&")[0]
else:
break
return all_spaces
async def get_pages_in_space(
self, space_id: str, include_body: bool = True
) -> list[dict[str, Any]]:
"""
Fetch all pages in a specific space.
Args:
space_id: The ID of the space to fetch pages from
include_body: Whether to include page body content
Returns:
List of page objects
Raises:
ValueError: If credentials have not been set
Exception: If the API request fails
"""
params = {
"limit": 100,
}
if include_body:
params["body-format"] = "storage"
all_pages = []
cursor = None
while True:
if cursor:
params["cursor"] = cursor
result = await self._make_api_request(f"spaces/{space_id}/pages", params)
if not isinstance(result, dict) or "results" not in result:
raise Exception("Invalid response from Confluence API")
pages = result["results"]
all_pages.extend(pages)
# Check if there are more pages to fetch
links = result.get("_links", {})
if "next" not in links:
break
# Extract cursor from next link if available
next_link = links["next"]
if "cursor=" in next_link:
cursor = next_link.split("cursor=")[1].split("&")[0]
else:
break
return all_pages
async def get_page_comments(self, page_id: str) -> list[dict[str, Any]]:
"""
Fetch all comments for a specific page (both footer and inline comments).
Args:
page_id: The ID of the page to fetch comments from
Returns:
List of comment objects
Raises:
ValueError: If credentials have not been set
Exception: If the API request fails
"""
all_comments = []
# Get footer comments
footer_comments = await self._get_comments_for_page(page_id, "footer-comments")
all_comments.extend(footer_comments)
# Get inline comments
inline_comments = await self._get_comments_for_page(page_id, "inline-comments")
all_comments.extend(inline_comments)
return all_comments
async def _get_comments_for_page(
self, page_id: str, comment_type: str
) -> list[dict[str, Any]]:
"""
Helper method to fetch comments of a specific type for a page.
Args:
page_id: The ID of the page
comment_type: Type of comments ('footer-comments' or 'inline-comments')
Returns:
List of comment objects
"""
params = {
"limit": 100,
"body-format": "storage",
}
all_comments = []
cursor = None
while True:
if cursor:
params["cursor"] = cursor
result = await self._make_api_request(
f"pages/{page_id}/{comment_type}", params
)
if not isinstance(result, dict) or "results" not in result:
break # No comments or invalid response
comments = result["results"]
all_comments.extend(comments)
# Check if there are more comments to fetch
links = result.get("_links", {})
if "next" not in links:
break
# Extract cursor from next link if available
next_link = links["next"]
if "cursor=" in next_link:
cursor = next_link.split("cursor=")[1].split("&")[0]
else:
break
return all_comments
async def get_pages_by_date_range(
self,
start_date: str,
end_date: str,
space_ids: list[str] | None = None,
include_comments: bool = True,
) -> tuple[list[dict[str, Any]], str | None]:
"""
Fetch pages within a date range, optionally filtered by spaces.
Args:
start_date: Start date in YYYY-MM-DD format
end_date: End date in YYYY-MM-DD format (inclusive)
space_ids: Optional list of space IDs to filter pages
include_comments: Whether to include comments for each page
Returns:
Tuple containing (pages list with comments, error message or None)
"""
try:
if not self._use_oauth:
# Use legacy ConfluenceConnector for API requests
client = await self._get_legacy_client()
# Ensure credentials are loaded
await self._get_valid_token()
# ConfluenceConnector.get_pages_by_date_range is synchronous
import asyncio
loop = asyncio.get_event_loop()
return await loop.run_in_executor(
None,
client.get_pages_by_date_range,
start_date,
end_date,
space_ids,
include_comments,
)
# OAuth flow
all_pages = []
if space_ids:
# Fetch pages from specific spaces
for space_id in space_ids:
pages = await self.get_pages_in_space(space_id, include_body=True)
all_pages.extend(pages)
else:
# Fetch all pages (this might be expensive for large instances)
params = {
"limit": 100,
"body-format": "storage",
}
cursor = None
while True:
if cursor:
params["cursor"] = cursor
result = await self._make_api_request("pages", params)
if not isinstance(result, dict) or "results" not in result:
break
pages = result["results"]
all_pages.extend(pages)
links = result.get("_links", {})
if "next" not in links:
break
next_link = links["next"]
if "cursor=" in next_link:
cursor = next_link.split("cursor=")[1].split("&")[0]
else:
break
return all_pages, None
except Exception as e:
return [], f"Error fetching pages: {e!s}"
async def close(self):
"""Close the HTTP client connection."""
if self._http_client:
await self._http_client.aclose()
self._http_client = None
# Legacy client doesn't need explicit closing
self._legacy_confluence_client = None
async def __aenter__(self):
"""Async context manager entry."""
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
"""Async context manager exit."""
await self.close()

View file

@ -3,7 +3,7 @@ Discord Connector
A module for interacting with Discord's HTTP API to retrieve guilds, channels, and message history.
Requires a Discord bot token.
Supports both direct bot token and OAuth-based authentication with token refresh.
"""
import asyncio
@ -12,6 +12,14 @@ import logging
import discord
from discord.ext import commands
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.config import config
from app.db import SearchSourceConnector
from app.routes.discord_add_connector_route import refresh_discord_token
from app.schemas.discord_auth_credentials import DiscordAuthCredentialsBase
from app.utils.oauth_security import TokenEncryption
logger = logging.getLogger(__name__)
@ -19,12 +27,21 @@ logger = logging.getLogger(__name__)
class DiscordConnector(commands.Bot):
"""Class for retrieving guild, channel, and message history from Discord."""
def __init__(self, token: str | None = None):
def __init__(
self,
token: str | None = None,
session: AsyncSession | None = None,
connector_id: int | None = None,
credentials: DiscordAuthCredentialsBase | None = None,
):
"""
Initialize the DiscordConnector with a bot token.
Initialize the DiscordConnector with a bot token or OAuth credentials.
Args:
token (str): The Discord bot token.
token: Discord bot token (optional, for backward compatibility)
session: Database session for token refresh (optional)
connector_id: Connector ID for token refresh (optional)
credentials: Discord OAuth credentials (optional, will be loaded from DB if not provided)
"""
intents = discord.Intents.default()
intents.guilds = True # Required to fetch guilds and channels
@ -34,7 +51,14 @@ class DiscordConnector(commands.Bot):
super().__init__(
command_prefix="!", intents=intents
) # command_prefix is required but not strictly used here
self.token = token
self._session = session
self._connector_id = connector_id
self._credentials = credentials
# For backward compatibility, if token is provided directly, use it
if token:
self.token = token
else:
self.token = None
self._bot_task = None # Holds the async bot task
self._is_running = False # Flag to track if the bot is running
@ -57,12 +81,143 @@ class DiscordConnector(commands.Bot):
async def on_resumed():
logger.debug("Bot resumed connection to Discord gateway.")
async def _get_valid_token(self) -> str:
"""
Get valid Discord bot token, refreshing if needed.
Returns:
Valid bot token
Raises:
ValueError: If credentials are missing or invalid
Exception: If token refresh fails
"""
# If we have a direct token (backward compatibility), use it
if (
self.token
and self._session is None
and self._connector_id is None
and self._credentials is None
):
# This means it was initialized with a direct token, use it
return self.token
# Load credentials from DB if not provided
if self._credentials is None:
if not self._session or not self._connector_id:
raise ValueError(
"Cannot load credentials: session and connector_id required"
)
result = await self._session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == self._connector_id
)
)
connector = result.scalars().first()
if not connector:
raise ValueError(f"Connector {self._connector_id} not found")
config_data = connector.config.copy()
# Decrypt credentials if they are encrypted
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
try:
token_encryption = TokenEncryption(config.SECRET_KEY)
# Decrypt sensitive fields
if config_data.get("bot_token"):
config_data["bot_token"] = token_encryption.decrypt_token(
config_data["bot_token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
logger.info(
f"Decrypted Discord credentials for connector {self._connector_id}"
)
except Exception as e:
logger.error(
f"Failed to decrypt Discord credentials for connector {self._connector_id}: {e!s}"
)
raise ValueError(
f"Failed to decrypt Discord credentials: {e!s}"
) from e
try:
self._credentials = DiscordAuthCredentialsBase.from_dict(config_data)
except Exception as e:
raise ValueError(f"Invalid Discord credentials: {e!s}") from e
# Check if token is expired and refreshable
if self._credentials.is_expired and self._credentials.is_refreshable:
try:
logger.info(
f"Discord token expired for connector {self._connector_id}, refreshing..."
)
# Get connector for refresh
result = await self._session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == self._connector_id
)
)
connector = result.scalars().first()
if not connector:
raise RuntimeError(
f"Connector {self._connector_id} not found; cannot refresh token."
)
# Refresh token
connector = await refresh_discord_token(self._session, connector)
# Reload credentials after refresh
config_data = connector.config.copy()
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
if config_data.get("bot_token"):
config_data["bot_token"] = token_encryption.decrypt_token(
config_data["bot_token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
self._credentials = DiscordAuthCredentialsBase.from_dict(config_data)
logger.info(
f"Successfully refreshed Discord token for connector {self._connector_id}"
)
except Exception as e:
logger.error(
f"Failed to refresh Discord token for connector {self._connector_id}: {e!s}"
)
raise Exception(
f"Failed to refresh Discord OAuth credentials: {e!s}"
) from e
return self._credentials.bot_token
async def start_bot(self):
"""Starts the bot to connect to Discord."""
logger.info("Starting Discord bot...")
# Get valid token (with auto-refresh if using OAuth)
if not self.token:
raise ValueError("Discord bot token not set. Call set_token(token) first.")
# Try to get token from credentials
try:
self.token = await self._get_valid_token()
except ValueError as e:
raise ValueError(
f"Discord bot token not set. {e!s} Please authenticate via OAuth or provide a token."
) from e
try:
if self._is_running:
@ -107,7 +262,7 @@ class DiscordConnector(commands.Bot):
def set_token(self, token: str) -> None:
"""
Set the discord bot token.
Set the discord bot token (for backward compatibility).
Args:
token (str): The Discord bot token.

View file

@ -109,7 +109,36 @@ class GoogleCalendarConnector:
raise RuntimeError(
"GOOGLE_CALENDAR_CONNECTOR connector not found; cannot persist refreshed token."
)
connector.config = json.loads(self._credentials.to_json())
# Encrypt sensitive credentials before storing
from app.config import config
from app.utils.oauth_security import TokenEncryption
creds_dict = json.loads(self._credentials.to_json())
token_encrypted = connector.config.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
# Encrypt sensitive fields
if creds_dict.get("token"):
creds_dict["token"] = token_encryption.encrypt_token(
creds_dict["token"]
)
if creds_dict.get("refresh_token"):
creds_dict["refresh_token"] = (
token_encryption.encrypt_token(
creds_dict["refresh_token"]
)
)
if creds_dict.get("client_secret"):
creds_dict["client_secret"] = (
token_encryption.encrypt_token(
creds_dict["client_secret"]
)
)
creds_dict["_token_encrypted"] = True
connector.config = creds_dict
flag_modified(connector, "config")
await self._session.commit()
except Exception as e:
@ -182,6 +211,18 @@ class GoogleCalendarConnector:
Tuple containing (events list, error message or None)
"""
try:
# Validate date strings
if not start_date or start_date.lower() in ("undefined", "null", "none"):
return (
[],
"Invalid start_date: must be a valid date string in YYYY-MM-DD format",
)
if not end_date or end_date.lower() in ("undefined", "null", "none"):
return (
[],
"Invalid end_date: must be a valid date string in YYYY-MM-DD format",
)
service = await self._get_service()
# Parse both dates

View file

@ -1,6 +1,7 @@
"""Google Drive OAuth credential management."""
import json
import logging
from datetime import datetime
from google.auth.transport.requests import Request
@ -9,7 +10,11 @@ from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from sqlalchemy.orm.attributes import flag_modified
from app.config import config
from app.db import SearchSourceConnector
from app.utils.oauth_security import TokenEncryption
logger = logging.getLogger(__name__)
async def get_valid_credentials(
@ -38,7 +43,41 @@ async def get_valid_credentials(
if not connector:
raise ValueError(f"Connector {connector_id} not found")
config_data = connector.config
config_data = (
connector.config.copy()
) # Work with a copy to avoid modifying original
# Decrypt credentials if they are encrypted
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
try:
token_encryption = TokenEncryption(config.SECRET_KEY)
# Decrypt sensitive fields
if config_data.get("token"):
config_data["token"] = token_encryption.decrypt_token(
config_data["token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
if config_data.get("client_secret"):
config_data["client_secret"] = token_encryption.decrypt_token(
config_data["client_secret"]
)
logger.info(
f"Decrypted Google Drive credentials for connector {connector_id}"
)
except Exception as e:
logger.error(
f"Failed to decrypt Google Drive credentials for connector {connector_id}: {e!s}"
)
raise ValueError(
f"Failed to decrypt Google Drive credentials: {e!s}"
) from e
exp = config_data.get("expiry", "").replace("Z", "")
if not all(
@ -66,7 +105,29 @@ async def get_valid_credentials(
try:
credentials.refresh(Request())
connector.config = json.loads(credentials.to_json())
creds_dict = json.loads(credentials.to_json())
# Encrypt sensitive credentials before storing
token_encrypted = connector.config.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
# Encrypt sensitive fields
if creds_dict.get("token"):
creds_dict["token"] = token_encryption.encrypt_token(
creds_dict["token"]
)
if creds_dict.get("refresh_token"):
creds_dict["refresh_token"] = token_encryption.encrypt_token(
creds_dict["refresh_token"]
)
if creds_dict.get("client_secret"):
creds_dict["client_secret"] = token_encryption.encrypt_token(
creds_dict["client_secret"]
)
creds_dict["_token_encrypted"] = True
connector.config = creds_dict
flag_modified(connector, "config")
await session.commit()

View file

@ -6,6 +6,7 @@ Allows fetching emails from Gmail mailbox using Google OAuth credentials.
import base64
import json
import logging
import re
from typing import Any
@ -21,6 +22,34 @@ from app.db import (
SearchSourceConnectorType,
)
logger = logging.getLogger(__name__)
def fetch_google_user_email(credentials: Credentials) -> str | None:
"""
Fetch user email from Gmail API using Google credentials.
Uses the Gmail users.getProfile endpoint which returns the authenticated
user's email address.
Args:
credentials: Google OAuth Credentials object (not encrypted)
Returns:
User's email address or None if fetch fails
"""
try:
service = build("gmail", "v1", credentials=credentials)
profile = service.users().getProfile(userId="me").execute()
email = profile.get("emailAddress")
if email:
logger.debug(f"Fetched Google user email: {email}")
return email
return None
except Exception as e:
logger.warning(f"Error fetching Google user email: {e!s}")
return None
class GoogleGmailConnector:
"""Class for retrieving emails from Gmail using Google OAuth credentials."""

View file

@ -3,6 +3,7 @@ Jira Connector Module
A module for retrieving data from Jira.
Allows fetching issue lists and their comments, projects and more.
Supports both OAuth 2.0 (preferred) and legacy API token authentication.
"""
import base64
@ -18,6 +19,8 @@ class JiraConnector:
def __init__(
self,
base_url: str | None = None,
access_token: str | None = None,
cloud_id: str | None = None,
email: str | None = None,
api_token: str | None = None,
):
@ -25,18 +28,39 @@ class JiraConnector:
Initialize the JiraConnector class.
Args:
base_url: Jira instance base URL (e.g., 'https://yourcompany.atlassian.net') (optional)
email: Jira account email address (optional)
api_token: Jira API token (optional)
base_url: Jira instance base URL (e.g., 'https://yourcompany.atlassian.net')
access_token: OAuth 2.0 access token (preferred method)
cloud_id: Atlassian cloud ID (used with OAuth for API URL construction)
email: Jira account email address (legacy method, used with api_token)
api_token: Jira API token (legacy method, used with email)
"""
self.base_url = base_url.rstrip("/") if base_url else None
self.access_token = access_token
self.cloud_id = cloud_id
self.email = email
self.api_token = api_token
self.api_version = "3" # Jira Cloud API version
self._use_oauth = access_token is not None
def set_oauth_credentials(
self, base_url: str, access_token: str, cloud_id: str | None = None
) -> None:
"""
Set OAuth 2.0 credentials (preferred method).
Args:
base_url: Jira instance base URL
access_token: OAuth 2.0 access token
cloud_id: Atlassian cloud ID (optional, used for API URL construction)
"""
self.base_url = base_url.rstrip("/")
self.access_token = access_token
self.cloud_id = cloud_id
self._use_oauth = True
def set_credentials(self, base_url: str, email: str, api_token: str) -> None:
"""
Set the Jira credentials.
Set the Jira credentials (legacy method using API token).
Args:
base_url: Jira instance base URL
@ -46,50 +70,69 @@ class JiraConnector:
self.base_url = base_url.rstrip("/")
self.email = email
self.api_token = api_token
self._use_oauth = False
def set_email(self, email: str) -> None:
"""
Set the Jira account email.
Set the Jira account email (legacy method).
Args:
email: Jira account email address
"""
self.email = email
self._use_oauth = False
def set_api_token(self, api_token: str) -> None:
"""
Set the Jira API token.
Set the Jira API token (legacy method).
Args:
api_token: Jira API token
"""
self.api_token = api_token
self._use_oauth = False
def get_headers(self) -> dict[str, str]:
"""
Get headers for Jira API requests using Basic Authentication.
Get headers for Jira API requests.
Uses OAuth Bearer token if available, otherwise falls back to Basic Auth.
Returns:
Dictionary of headers
Raises:
ValueError: If email, api_token, or base_url have not been set
ValueError: If credentials have not been set
"""
if not all([self.base_url, self.email, self.api_token]):
raise ValueError(
"Jira credentials not initialized. Call set_credentials() first."
)
if self._use_oauth:
# OAuth 2.0 authentication
if not self.base_url or not self.access_token:
raise ValueError(
"Jira OAuth credentials not initialized. Call set_oauth_credentials() first."
)
# Create Basic Auth header using email:api_token
auth_str = f"{self.email}:{self.api_token}"
auth_bytes = auth_str.encode("utf-8")
auth_header = "Basic " + base64.b64encode(auth_bytes).decode("ascii")
return {
"Content-Type": "application/json",
"Authorization": f"Bearer {self.access_token}",
"Accept": "application/json",
}
else:
# Legacy Basic Auth
if not all([self.base_url, self.email, self.api_token]):
raise ValueError(
"Jira credentials not initialized. Call set_credentials() first."
)
return {
"Content-Type": "application/json",
"Authorization": auth_header,
"Accept": "application/json",
}
# Create Basic Auth header using email:api_token
auth_str = f"{self.email}:{self.api_token}"
auth_bytes = auth_str.encode("utf-8")
auth_header = "Basic " + base64.b64encode(auth_bytes).decode("ascii")
return {
"Content-Type": "application/json",
"Authorization": auth_header,
"Accept": "application/json",
}
def make_api_request(
self,
@ -104,22 +147,26 @@ class JiraConnector:
Args:
endpoint: API endpoint (without base URL)
params: Query parameters for the request (optional)
method: HTTP method (GET or POST)
json_payload: JSON payload for POST requests (optional)
Returns:
Response data from the API
Raises:
ValueError: If email, api_token, or base_url have not been set
ValueError: If credentials have not been set
Exception: If the API request fails
"""
if not all([self.base_url, self.email, self.api_token]):
raise ValueError(
"Jira credentials not initialized. Call set_credentials() first."
)
url = f"{self.base_url}/rest/api/{self.api_version}/{endpoint}"
headers = self.get_headers()
# Construct API URL based on authentication method
if self._use_oauth and self.cloud_id:
# Use Atlassian API gateway with cloud_id for OAuth
url = f"https://api.atlassian.com/ex/jira/{self.cloud_id}/rest/api/{self.api_version}/{endpoint}"
else:
# Use direct base URL (works for both OAuth and legacy)
url = f"{self.base_url}/rest/api/{self.api_version}/{endpoint}"
if method.upper() == "POST":
response = requests.post(
url, headers=headers, json=json_payload, timeout=500
@ -234,16 +281,23 @@ class JiraConnector:
try:
# Build JQL query for date range
# Query issues that were either created OR updated within the date range
date_filter = (
f"(createdDate >= '{start_date}' AND createdDate <= '{end_date}')"
)
# TODO : This JQL needs some improvement to work as expected
# Use end_date + 1 day with < operator to include the full end date
from datetime import datetime, timedelta
jql = f"{date_filter}"
# Parse end_date and add 1 day for inclusive end date
end_date_obj = datetime.strptime(end_date, "%Y-%m-%d")
end_date_next = (end_date_obj + timedelta(days=1)).strftime("%Y-%m-%d")
# Check both created and updated dates to catch all relevant issues
# Use 'created' and 'updated' (standard JQL field names)
date_filter = (
f"(created >= '{start_date}' AND created < '{end_date_next}') "
f"OR (updated >= '{start_date}' AND updated < '{end_date_next}')"
)
jql = f"{date_filter} ORDER BY created DESC"
if project_key:
jql = (
f'project = "{project_key}" AND {date_filter} ORDER BY created DESC'
)
jql = f'project = "{project_key}" AND ({date_filter}) ORDER BY created DESC'
# Define fields to retrieve
fields = [

View file

@ -0,0 +1,331 @@
"""
Jira OAuth Connector.
Handles OAuth-based authentication and token refresh for Jira API access.
Supports both OAuth 2.0 (preferred) and legacy API token authentication.
"""
import logging
from typing import Any
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.config import config
from app.connectors.jira_connector import JiraConnector
from app.db import SearchSourceConnector
from app.routes.jira_add_connector_route import refresh_jira_token
from app.schemas.atlassian_auth_credentials import AtlassianAuthCredentialsBase
from app.utils.oauth_security import TokenEncryption
logger = logging.getLogger(__name__)
class JiraHistoryConnector:
"""
Jira connector with OAuth support and automatic token refresh.
This connector uses OAuth 2.0 access tokens to authenticate with the
Jira API. It automatically refreshes expired tokens when needed.
Also supports legacy API token authentication for backward compatibility.
"""
def __init__(
self,
session: AsyncSession,
connector_id: int,
credentials: AtlassianAuthCredentialsBase | None = None,
):
"""
Initialize the JiraHistoryConnector with auto-refresh capability.
Args:
session: Database session for updating connector
connector_id: Connector ID for direct updates
credentials: Jira OAuth credentials (optional, will be loaded from DB if not provided)
"""
self._session = session
self._connector_id = connector_id
self._credentials = credentials
self._cloud_id: str | None = None
self._base_url: str | None = None
self._jira_client: JiraConnector | None = None
self._use_oauth = True
self._legacy_email: str | None = None
self._legacy_api_token: str | None = None
async def _get_valid_token(self) -> str:
"""
Get valid Jira access token, refreshing if needed.
Returns:
Valid access token
Raises:
ValueError: If credentials are missing or invalid
Exception: If token refresh fails
"""
# Load credentials from DB if not provided
if self._credentials is None:
result = await self._session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == self._connector_id
)
)
connector = result.scalars().first()
if not connector:
raise ValueError(f"Connector {self._connector_id} not found")
config_data = connector.config.copy()
# Check if using OAuth or legacy API token
is_oauth = config_data.get("_token_encrypted", False) or config_data.get(
"access_token"
)
if is_oauth:
# OAuth 2.0 authentication
if not config.SECRET_KEY:
raise ValueError(
"SECRET_KEY not configured but tokens are marked as encrypted"
)
try:
token_encryption = TokenEncryption(config.SECRET_KEY)
# Decrypt access_token
if config_data.get("access_token"):
config_data["access_token"] = token_encryption.decrypt_token(
config_data["access_token"]
)
logger.info(
f"Decrypted Jira access token for connector {self._connector_id}"
)
# Decrypt refresh_token if present
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
logger.info(
f"Decrypted Jira refresh token for connector {self._connector_id}"
)
except Exception as e:
logger.error(
f"Failed to decrypt Jira credentials for connector {self._connector_id}: {e!s}"
)
raise ValueError(
f"Failed to decrypt Jira credentials: {e!s}"
) from e
try:
self._credentials = AtlassianAuthCredentialsBase.from_dict(
config_data
)
self._cloud_id = config_data.get("cloud_id")
self._base_url = config_data.get("base_url")
self._use_oauth = True
except Exception as e:
raise ValueError(f"Invalid Jira OAuth credentials: {e!s}") from e
else:
# Legacy API token authentication
self._legacy_email = config_data.get("JIRA_EMAIL")
self._legacy_api_token = config_data.get("JIRA_API_TOKEN")
self._base_url = config_data.get("JIRA_BASE_URL")
self._use_oauth = False
if (
not self._legacy_email
or not self._legacy_api_token
or not self._base_url
):
raise ValueError("Jira credentials not found in connector config")
# Check if token is expired and refreshable (only for OAuth)
if (
self._use_oauth
and self._credentials.is_expired
and self._credentials.is_refreshable
):
try:
logger.info(
f"Jira token expired for connector {self._connector_id}, refreshing..."
)
# Get connector for refresh
result = await self._session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == self._connector_id
)
)
connector = result.scalars().first()
if not connector:
raise RuntimeError(
f"Connector {self._connector_id} not found; cannot refresh token."
)
# Refresh token
connector = await refresh_jira_token(self._session, connector)
# Reload credentials after refresh
config_data = connector.config.copy()
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
if config_data.get("access_token"):
config_data["access_token"] = token_encryption.decrypt_token(
config_data["access_token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
self._credentials = AtlassianAuthCredentialsBase.from_dict(config_data)
self._cloud_id = config_data.get("cloud_id")
self._base_url = config_data.get("base_url")
# Invalidate cached client so it's recreated with new token
self._jira_client = None
logger.info(
f"Successfully refreshed Jira token for connector {self._connector_id}"
)
except Exception as e:
logger.error(
f"Failed to refresh Jira token for connector {self._connector_id}: {e!s}"
)
raise Exception(
f"Failed to refresh Jira OAuth credentials: {e!s}"
) from e
if self._use_oauth:
return self._credentials.access_token
else:
# For legacy auth, return empty string (not used for token-based auth)
return ""
async def _get_jira_client(self) -> JiraConnector:
"""
Get or create JiraConnector with valid credentials.
Returns:
JiraConnector instance
"""
if self._jira_client is None:
if self._use_oauth:
# Ensure we have valid token (will refresh if needed)
await self._get_valid_token()
self._jira_client = JiraConnector(
base_url=self._base_url,
access_token=self._credentials.access_token,
cloud_id=self._cloud_id,
)
else:
# Legacy API token authentication
self._jira_client = JiraConnector(
base_url=self._base_url,
email=self._legacy_email,
api_token=self._legacy_api_token,
)
else:
# If OAuth, refresh token if expired before returning client
if self._use_oauth:
await self._get_valid_token()
# Update client with new token if it was refreshed
if self._credentials:
self._jira_client.set_oauth_credentials(
base_url=self._base_url or "",
access_token=self._credentials.access_token,
cloud_id=self._cloud_id,
)
return self._jira_client
async def get_issues_by_date_range(
self,
start_date: str,
end_date: str,
include_comments: bool = True,
project_key: str | None = None,
) -> tuple[list[dict[str, Any]], str | None]:
"""
Fetch issues within a date range.
This method wraps JiraConnector.get_issues_by_date_range() with automatic token refresh.
Args:
start_date: Start date in YYYY-MM-DD format
end_date: End date in YYYY-MM-DD format (inclusive)
include_comments: Whether to include comments in the response
project_key: Optional project key to filter issues
Returns:
Tuple containing (issues list, error message or None)
"""
# Ensure token is valid (will refresh if needed)
if self._use_oauth:
await self._get_valid_token()
# Get client with valid credentials
client = await self._get_jira_client()
# JiraConnector methods are synchronous, so we call them directly
# Token refresh has already been handled above
return client.get_issues_by_date_range(
start_date=start_date,
end_date=end_date,
include_comments=include_comments,
project_key=project_key,
)
def format_issue(self, issue: dict[str, Any]) -> dict[str, Any]:
"""
Format an issue for easier consumption.
Wraps JiraConnector.format_issue().
Args:
issue: The issue object from Jira API
Returns:
Formatted issue dictionary
"""
# This is a synchronous method that doesn't need token refresh
# since it just formats data that's already been fetched
if self._jira_client is None:
# Create a minimal client just for formatting (doesn't need credentials)
self._jira_client = JiraConnector()
return self._jira_client.format_issue(issue)
def format_issue_to_markdown(self, issue: dict[str, Any]) -> str:
"""
Convert an issue to markdown format.
Wraps JiraConnector.format_issue_to_markdown().
Args:
issue: The issue object (either raw or formatted)
Returns:
Markdown string representation of the issue
"""
# This is a synchronous method that doesn't need token refresh
# since it just formats data that's already been fetched
if self._jira_client is None:
# Create a minimal client just for formatting (doesn't need credentials)
self._jira_client = JiraConnector()
return self._jira_client.format_issue_to_markdown(issue)
async def close(self):
"""Close any resources (currently no-op for JiraConnector)."""
# JiraConnector doesn't maintain persistent connections, so nothing to close
self._jira_client = None
async def __aenter__(self):
"""Async context manager entry."""
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
"""Async context manager exit."""
await self.close()

View file

@ -5,33 +5,203 @@ A module for retrieving issues and comments from Linear.
Allows fetching issue lists and their comments with date range filtering.
"""
import logging
from datetime import datetime
from typing import Any
import httpx
import requests
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.config import config
from app.db import SearchSourceConnector
from app.schemas.linear_auth_credentials import LinearAuthCredentialsBase
from app.utils.oauth_security import TokenEncryption
logger = logging.getLogger(__name__)
LINEAR_GRAPHQL_URL = "https://api.linear.app/graphql"
ORGANIZATION_QUERY = """
query {
organization {
name
}
}
"""
async def fetch_linear_organization_name(access_token: str) -> str | None:
"""
Fetch organization/workspace name from Linear GraphQL API.
Args:
access_token: The Linear OAuth access token
Returns:
Organization name or None if fetch fails
"""
try:
async with httpx.AsyncClient() as client:
response = await client.post(
LINEAR_GRAPHQL_URL,
headers={
"Authorization": access_token,
"Content-Type": "application/json",
},
json={"query": ORGANIZATION_QUERY},
timeout=10.0,
)
if response.status_code == 200:
data = response.json()
org_name = data.get("data", {}).get("organization", {}).get("name")
if org_name:
logger.debug(f"Fetched Linear organization name: {org_name}")
return org_name
logger.warning(f"Failed to fetch Linear org info: {response.status_code}")
return None
except Exception as e:
logger.warning(f"Error fetching Linear organization name: {e!s}")
return None
class LinearConnector:
"""Class for retrieving issues and comments from Linear."""
def __init__(self, token: str | None = None):
def __init__(
self,
session: AsyncSession,
connector_id: int,
credentials: LinearAuthCredentialsBase | None = None,
):
"""
Initialize the LinearConnector class.
Initialize the LinearConnector class with auto-refresh capability.
Args:
token: Linear API token (optional, can be set later with set_token)
session: Database session for updating connector
connector_id: Connector ID for direct updates
credentials: Linear OAuth credentials (optional, will be loaded from DB if not provided)
"""
self.token = token
self._session = session
self._connector_id = connector_id
self._credentials = credentials
self.api_url = "https://api.linear.app/graphql"
def set_token(self, token: str) -> None:
async def _get_valid_token(self) -> str:
"""
Set the Linear API token.
Get valid Linear access token, refreshing if needed.
Args:
token: Linear API token
Returns:
Valid access token
Raises:
ValueError: If credentials are missing or invalid
Exception: If token refresh fails
"""
self.token = token
# Load credentials from DB if not provided
if self._credentials is None:
result = await self._session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == self._connector_id
)
)
connector = result.scalars().first()
if not connector:
raise ValueError(f"Connector {self._connector_id} not found")
config_data = connector.config.copy()
# Decrypt credentials if they are encrypted
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
try:
token_encryption = TokenEncryption(config.SECRET_KEY)
# Decrypt sensitive fields
if config_data.get("access_token"):
config_data["access_token"] = token_encryption.decrypt_token(
config_data["access_token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
logger.info(
f"Decrypted Linear credentials for connector {self._connector_id}"
)
except Exception as e:
logger.error(
f"Failed to decrypt Linear credentials for connector {self._connector_id}: {e!s}"
)
raise ValueError(
f"Failed to decrypt Linear credentials: {e!s}"
) from e
try:
self._credentials = LinearAuthCredentialsBase.from_dict(config_data)
except Exception as e:
raise ValueError(f"Invalid Linear credentials: {e!s}") from e
# Check if token is expired and refreshable
if self._credentials.is_expired and self._credentials.is_refreshable:
try:
logger.info(
f"Linear token expired for connector {self._connector_id}, refreshing..."
)
# Get connector for refresh
result = await self._session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == self._connector_id
)
)
connector = result.scalars().first()
if not connector:
raise RuntimeError(
f"Connector {self._connector_id} not found; cannot refresh token."
)
# Lazy import to avoid circular dependency
from app.routes.linear_add_connector_route import refresh_linear_token
# Refresh token
connector = await refresh_linear_token(self._session, connector)
# Reload credentials after refresh
config_data = connector.config.copy()
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
if config_data.get("access_token"):
config_data["access_token"] = token_encryption.decrypt_token(
config_data["access_token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
self._credentials = LinearAuthCredentialsBase.from_dict(config_data)
logger.info(
f"Successfully refreshed Linear token for connector {self._connector_id}"
)
except Exception as e:
logger.error(
f"Failed to refresh Linear token for connector {self._connector_id}: {e!s}"
)
raise Exception(
f"Failed to refresh Linear OAuth credentials: {e!s}"
) from e
return self._credentials.access_token
def get_headers(self) -> dict[str, str]:
"""
@ -41,18 +211,26 @@ class LinearConnector:
Dictionary of headers
Raises:
ValueError: If no Linear token has been set
ValueError: If no Linear access token has been set
"""
if not self.token:
raise ValueError("Linear token not initialized. Call set_token() first.")
# This is a synchronous method, but we need async token refresh
# For now, we'll raise an error if called directly
# All API calls should go through execute_graphql_query which handles async refresh
if not self._credentials or not self._credentials.access_token:
raise ValueError(
"Linear access token not initialized. Use execute_graphql_query() method."
)
return {"Content-Type": "application/json", "Authorization": self.token}
return {
"Content-Type": "application/json",
"Authorization": f"Bearer {self._credentials.access_token}",
}
def execute_graphql_query(
async def execute_graphql_query(
self, query: str, variables: dict[str, Any] | None = None
) -> dict[str, Any]:
"""
Execute a GraphQL query against the Linear API.
Execute a GraphQL query against the Linear API with automatic token refresh.
Args:
query: GraphQL query string
@ -62,13 +240,17 @@ class LinearConnector:
Response data from the API
Raises:
ValueError: If no Linear token has been set
ValueError: If no Linear access token has been set
Exception: If the API request fails
"""
if not self.token:
raise ValueError("Linear token not initialized. Call set_token() first.")
# Get valid token (refreshes if needed)
access_token = await self._get_valid_token()
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {access_token}",
}
headers = self.get_headers()
payload = {"query": query}
if variables:
@ -83,7 +265,9 @@ class LinearConnector:
f"Query failed with status code {response.status_code}: {response.text}"
)
def get_all_issues(self, include_comments: bool = True) -> list[dict[str, Any]]:
async def get_all_issues(
self, include_comments: bool = True
) -> list[dict[str, Any]]:
"""
Fetch all issues from Linear.
@ -94,7 +278,7 @@ class LinearConnector:
List of issue objects
Raises:
ValueError: If no Linear token has been set
ValueError: If no Linear access token has been set
Exception: If the API request fails
"""
comments_query = ""
@ -146,7 +330,7 @@ class LinearConnector:
}}
"""
result = self.execute_graphql_query(query)
result = await self.execute_graphql_query(query)
# Extract issues from the response
if (
@ -158,7 +342,7 @@ class LinearConnector:
return []
def get_issues_by_date_range(
async def get_issues_by_date_range(
self, start_date: str, end_date: str, include_comments: bool = True
) -> tuple[list[dict[str, Any]], str | None]:
"""
@ -172,6 +356,18 @@ class LinearConnector:
Returns:
Tuple containing (issues list, error message or None)
"""
# Validate date strings
if not start_date or start_date.lower() in ("undefined", "null", "none"):
return (
[],
"Invalid start_date: must be a valid date string in YYYY-MM-DD format",
)
if not end_date or end_date.lower() in ("undefined", "null", "none"):
return (
[],
"Invalid end_date: must be a valid date string in YYYY-MM-DD format",
)
# Convert date strings to ISO format
try:
# For Linear API: we need to use a more specific format for the filter
@ -258,7 +454,7 @@ class LinearConnector:
# Handle pagination to get all issues
while has_next_page:
variables = {"after": cursor} if cursor else {}
result = self.execute_graphql_query(query, variables)
result = await self.execute_graphql_query(query, variables)
# Check for errors
if "errors" in result:
@ -446,37 +642,3 @@ class LinearConnector:
return dt.strftime("%Y-%m-%d %H:%M:%S")
except ValueError:
return iso_date
# Example usage (uncomment to use):
"""
if __name__ == "__main__":
# Set your token here
token = "YOUR_LINEAR_API_KEY"
linear = LinearConnector(token)
try:
# Get all issues with comments
issues = linear.get_all_issues()
print(f"Retrieved {len(issues)} issues")
# Format and print the first issue as markdown
if issues:
issue_md = linear.format_issue_to_markdown(issues[0])
print("\nSample Issue in Markdown:\n")
print(issue_md)
# Get issues by date range
start_date = "2023-01-01"
end_date = "2023-01-31"
date_issues, error = linear.get_issues_by_date_range(start_date, end_date)
if error:
print(f"Error: {error}")
else:
print(f"\nRetrieved {len(date_issues)} issues from {start_date} to {end_date}")
except Exception as e:
print(f"Error: {e}")
"""

View file

@ -1,19 +1,167 @@
import logging
from notion_client import AsyncClient
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.config import config
from app.db import SearchSourceConnector
from app.routes.notion_add_connector_route import refresh_notion_token
from app.schemas.notion_auth_credentials import NotionAuthCredentialsBase
from app.utils.oauth_security import TokenEncryption
logger = logging.getLogger(__name__)
class NotionHistoryConnector:
def __init__(self, token):
def __init__(
self,
session: AsyncSession,
connector_id: int,
credentials: NotionAuthCredentialsBase | None = None,
):
"""
Initialize the NotionPageFetcher with a token.
Initialize the NotionHistoryConnector with auto-refresh capability.
Args:
token (str): Notion integration token
session: Database session for updating connector
connector_id: Connector ID for direct updates
credentials: Notion OAuth credentials (optional, will be loaded from DB if not provided)
"""
self.notion = AsyncClient(auth=token)
self._session = session
self._connector_id = connector_id
self._credentials = credentials
self._notion_client: AsyncClient | None = None
async def _get_valid_token(self) -> str:
"""
Get valid Notion access token, refreshing if needed.
Returns:
Valid access token
Raises:
ValueError: If credentials are missing or invalid
Exception: If token refresh fails
"""
# Load credentials from DB if not provided
if self._credentials is None:
result = await self._session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == self._connector_id
)
)
connector = result.scalars().first()
if not connector:
raise ValueError(f"Connector {self._connector_id} not found")
config_data = connector.config.copy()
# Decrypt credentials if they are encrypted
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
try:
token_encryption = TokenEncryption(config.SECRET_KEY)
# Decrypt sensitive fields
if config_data.get("access_token"):
config_data["access_token"] = token_encryption.decrypt_token(
config_data["access_token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
logger.info(
f"Decrypted Notion credentials for connector {self._connector_id}"
)
except Exception as e:
logger.error(
f"Failed to decrypt Notion credentials for connector {self._connector_id}: {e!s}"
)
raise ValueError(
f"Failed to decrypt Notion credentials: {e!s}"
) from e
try:
self._credentials = NotionAuthCredentialsBase.from_dict(config_data)
except Exception as e:
raise ValueError(f"Invalid Notion credentials: {e!s}") from e
# Check if token is expired and refreshable
if self._credentials.is_expired and self._credentials.is_refreshable:
try:
logger.info(
f"Notion token expired for connector {self._connector_id}, refreshing..."
)
# Get connector for refresh
result = await self._session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == self._connector_id
)
)
connector = result.scalars().first()
if not connector:
raise RuntimeError(
f"Connector {self._connector_id} not found; cannot refresh token."
)
# Refresh token
connector = await refresh_notion_token(self._session, connector)
# Reload credentials after refresh
config_data = connector.config.copy()
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
if config_data.get("access_token"):
config_data["access_token"] = token_encryption.decrypt_token(
config_data["access_token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
self._credentials = NotionAuthCredentialsBase.from_dict(config_data)
# Invalidate cached client so it's recreated with new token
self._notion_client = None
logger.info(
f"Successfully refreshed Notion token for connector {self._connector_id}"
)
except Exception as e:
logger.error(
f"Failed to refresh Notion token for connector {self._connector_id}: {e!s}"
)
raise Exception(
f"Failed to refresh Notion OAuth credentials: {e!s}"
) from e
return self._credentials.access_token
async def _get_client(self) -> AsyncClient:
"""
Get or create Notion AsyncClient with valid token.
Returns:
Notion AsyncClient instance
"""
if self._notion_client is None:
token = await self._get_valid_token()
self._notion_client = AsyncClient(auth=token)
return self._notion_client
async def close(self):
"""Close the async client connection."""
await self.notion.aclose()
if self._notion_client:
await self._notion_client.aclose()
self._notion_client = None
async def __aenter__(self):
"""Async context manager entry."""
@ -34,6 +182,8 @@ class NotionHistoryConnector:
Returns:
list: List of dictionaries containing page data
"""
notion = await self._get_client()
# Build the filter for the search
# Note: Notion API requires specific filter structure
search_params = {}
@ -67,7 +217,7 @@ class NotionHistoryConnector:
if cursor:
search_params["start_cursor"] = cursor
search_results = await self.notion.search(**search_params)
search_results = await notion.search(**search_params)
pages.extend(search_results["results"])
has_more = search_results.get("has_more", False)
@ -125,6 +275,8 @@ class NotionHistoryConnector:
Returns:
list: List of processed blocks from the page
"""
notion = await self._get_client()
blocks = []
has_more = True
cursor = None
@ -132,11 +284,11 @@ class NotionHistoryConnector:
# Paginate through all blocks
while has_more:
if cursor:
response = await self.notion.blocks.children.list(
response = await notion.blocks.children.list(
block_id=page_id, start_cursor=cursor
)
else:
response = await self.notion.blocks.children.list(block_id=page_id)
response = await notion.blocks.children.list(block_id=page_id)
blocks.extend(response["results"])
has_more = response["has_more"]
@ -162,6 +314,8 @@ class NotionHistoryConnector:
Returns:
dict: Processed block with content and children
"""
notion = await self._get_client()
block_id = block["id"]
block_type = block["type"]
@ -174,9 +328,7 @@ class NotionHistoryConnector:
if has_children:
# Fetch and process child blocks
children_response = await self.notion.blocks.children.list(
block_id=block_id
)
children_response = await notion.blocks.children.list(block_id=block_id)
for child_block in children_response["results"]:
child_blocks.append(await self.process_block(child_block))

View file

@ -12,6 +12,14 @@ from typing import Any
from slack_sdk import WebClient
from slack_sdk.errors import SlackApiError
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.config import config
from app.db import SearchSourceConnector
from app.routes.slack_add_connector_route import refresh_slack_token
from app.schemas.slack_auth_credentials import SlackAuthCredentialsBase
from app.utils.oauth_security import TokenEncryption
logger = logging.getLogger(__name__) # Added logger
@ -19,25 +27,199 @@ logger = logging.getLogger(__name__) # Added logger
class SlackHistory:
"""Class for retrieving conversation history from Slack channels."""
def __init__(self, token: str | None = None):
def __init__(
self,
token: str | None = None,
session: AsyncSession | None = None,
connector_id: int | None = None,
credentials: SlackAuthCredentialsBase | None = None,
):
"""
Initialize the SlackHistory class.
Args:
token: Slack API token (optional, can be set later with set_token)
token: Slack API token (optional, for backward compatibility)
session: Database session for token refresh (optional)
connector_id: Connector ID for token refresh (optional)
credentials: Slack OAuth credentials (optional, will be loaded from DB if not provided)
"""
self.client = WebClient(token=token) if token else None
self._session = session
self._connector_id = connector_id
self._credentials = credentials
# For backward compatibility, if token is provided directly, use it
if token:
self.client = WebClient(token=token)
else:
self.client = None
async def _get_valid_token(self) -> str:
"""
Get valid Slack bot token, refreshing if needed.
Returns:
Valid bot token
Raises:
ValueError: If credentials are missing or invalid
Exception: If token refresh fails
"""
# If we have a direct token (backward compatibility), use it
# Check if client was initialized with a token directly (not via credentials)
if (
self.client
and self._session is None
and self._connector_id is None
and self._credentials is None
):
# This means it was initialized with a direct token, extract it
# WebClient stores token internally, we need to get it from the client
# For backward compatibility, we'll use the client directly
# But we can't easily extract the token, so we'll just use the client
# In this case, we'll skip refresh logic
# This is the old pattern - just use the client as-is
# We can't extract token easily, so we'll raise an error
# asking to use the new pattern
raise ValueError(
"Cannot refresh token: Please use session and connector_id for auto-refresh support"
)
# Load credentials from DB if not provided
if self._credentials is None:
if not self._session or not self._connector_id:
raise ValueError(
"Cannot load credentials: session and connector_id required"
)
result = await self._session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == self._connector_id
)
)
connector = result.scalars().first()
if not connector:
raise ValueError(f"Connector {self._connector_id} not found")
config_data = connector.config.copy()
# Decrypt credentials if they are encrypted
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
try:
token_encryption = TokenEncryption(config.SECRET_KEY)
# Decrypt sensitive fields
if config_data.get("bot_token"):
config_data["bot_token"] = token_encryption.decrypt_token(
config_data["bot_token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
logger.info(
f"Decrypted Slack credentials for connector {self._connector_id}"
)
except Exception as e:
logger.error(
f"Failed to decrypt Slack credentials for connector {self._connector_id}: {e!s}"
)
raise ValueError(
f"Failed to decrypt Slack credentials: {e!s}"
) from e
try:
self._credentials = SlackAuthCredentialsBase.from_dict(config_data)
except Exception as e:
raise ValueError(f"Invalid Slack credentials: {e!s}") from e
# Check if token is expired and refreshable
if self._credentials.is_expired and self._credentials.is_refreshable:
try:
logger.info(
f"Slack token expired for connector {self._connector_id}, refreshing..."
)
# Get connector for refresh
result = await self._session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == self._connector_id
)
)
connector = result.scalars().first()
if not connector:
raise RuntimeError(
f"Connector {self._connector_id} not found; cannot refresh token."
)
# Refresh token
connector = await refresh_slack_token(self._session, connector)
# Reload credentials after refresh
config_data = connector.config.copy()
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
token_encryption = TokenEncryption(config.SECRET_KEY)
if config_data.get("bot_token"):
config_data["bot_token"] = token_encryption.decrypt_token(
config_data["bot_token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
self._credentials = SlackAuthCredentialsBase.from_dict(config_data)
# Invalidate cached client so it's recreated with new token
self.client = None
logger.info(
f"Successfully refreshed Slack token for connector {self._connector_id}"
)
except Exception as e:
logger.error(
f"Failed to refresh Slack token for connector {self._connector_id}: {e!s}"
)
raise Exception(
f"Failed to refresh Slack OAuth credentials: {e!s}"
) from e
return self._credentials.bot_token
async def _ensure_client(self) -> WebClient:
"""
Ensure Slack client is initialized with valid token.
Returns:
WebClient instance
"""
# If client was initialized with direct token (backward compatibility), use it
if self.client and (self._session is None or self._connector_id is None):
return self.client
# Otherwise, initialize with token from credentials (with auto-refresh)
if self.client is None:
token = await self._get_valid_token()
# Skip if it's the placeholder for direct token initialization
if token != "direct_token_initialized":
self.client = WebClient(token=token)
return self.client
def set_token(self, token: str) -> None:
"""
Set the Slack API token.
Set the Slack API token (for backward compatibility).
Args:
token: Slack API token
"""
self.client = WebClient(token=token)
def get_all_channels(self, include_private: bool = True) -> list[dict[str, Any]]:
async def get_all_channels(
self, include_private: bool = True
) -> list[dict[str, Any]]:
"""
Fetch all channels that the bot has access to, with rate limit handling.
@ -52,8 +234,7 @@ class SlackHistory:
SlackApiError: If there's an unrecoverable error calling the Slack API
RuntimeError: For unexpected errors during channel fetching.
"""
if not self.client:
raise ValueError("Slack client not initialized. Call set_token() first.")
client = await self._ensure_client()
channels_list = [] # Changed from dict to list
types = "public_channel"
@ -72,7 +253,7 @@ class SlackHistory:
time.sleep(3)
current_limit = 1000 # Max limit
api_result = self.client.conversations_list(
api_result = client.conversations_list(
types=types, cursor=next_cursor, limit=current_limit
)
@ -129,7 +310,7 @@ class SlackHistory:
return channels_list
def get_conversation_history(
async def get_conversation_history(
self,
channel_id: str,
limit: int = 1000,
@ -152,8 +333,7 @@ class SlackHistory:
ValueError: If no Slack client has been initialized
SlackApiError: If there's an error calling the Slack API
"""
if not self.client:
raise ValueError("Slack client not initialized. Call set_token() first.")
client = await self._ensure_client()
messages = []
next_cursor = None
@ -177,7 +357,7 @@ class SlackHistory:
current_api_call_successful = False
result = None # Ensure result is defined
try:
result = self.client.conversations_history(**kwargs)
result = client.conversations_history(**kwargs)
current_api_call_successful = True
except SlackApiError as e_history:
if (
@ -197,7 +377,7 @@ class SlackHistory:
else:
raise # Re-raise to outer handler for not_in_channel or other SlackApiErrors
if not current_api_call_successful:
if not current_api_call_successful or result is None:
continue # Retry the current page fetch due to handled rate limit
# Process result if successful
@ -252,7 +432,7 @@ class SlackHistory:
except ValueError:
return None
def get_history_by_date_range(
async def get_history_by_date_range(
self, channel_id: str, start_date: str, end_date: str, limit: int = 1000
) -> tuple[list[dict[str, Any]], str | None]:
"""
@ -282,7 +462,7 @@ class SlackHistory:
latest += 86400 # seconds in a day
try:
messages = self.get_conversation_history(
messages = await self.get_conversation_history(
channel_id=channel_id, limit=limit, oldest=oldest, latest=latest
)
return messages, None
@ -291,7 +471,7 @@ class SlackHistory:
except ValueError as e:
return [], str(e)
def get_user_info(self, user_id: str) -> dict[str, Any]:
async def get_user_info(self, user_id: str) -> dict[str, Any]:
"""
Get information about a user.
@ -305,8 +485,7 @@ class SlackHistory:
ValueError: If no Slack client has been initialized
SlackApiError: If there's an error calling the Slack API
"""
if not self.client:
raise ValueError("Slack client not initialized. Call set_token() first.")
client = await self._ensure_client()
while True:
try:
@ -314,7 +493,7 @@ class SlackHistory:
# For now, we are only adding Retry-After as per plan.
# time.sleep(0.6) # Optional: ~100 req/min if ever needed.
result = self.client.users_info(user=user_id)
result = client.users_info(user=user_id)
return result["user"] # Success, return and exit loop implicitly
except SlackApiError as e_user_info:
@ -343,7 +522,7 @@ class SlackHistory:
)
raise general_error from general_error # Re-raise unexpected errors
def format_message(
async def format_message(
self, msg: dict[str, Any], include_user_info: bool = False
) -> dict[str, Any]:
"""
@ -369,9 +548,9 @@ class SlackHistory:
"is_thread": "thread_ts" in msg,
}
if include_user_info and "user" in msg and self.client:
if include_user_info and "user" in msg:
try:
user_info = self.get_user_info(msg["user"])
user_info = await self.get_user_info(msg["user"])
formatted["user_name"] = user_info.get("real_name", "Unknown")
formatted["user_email"] = user_info.get("profile", {}).get("email", "")
except Exception:

View file

@ -4,6 +4,9 @@ from .airtable_add_connector_route import (
router as airtable_add_connector_router,
)
from .circleback_webhook_route import router as circleback_webhook_router
from .clickup_add_connector_route import router as clickup_add_connector_router
from .confluence_add_connector_route import router as confluence_add_connector_router
from .discord_add_connector_route import router as discord_add_connector_router
from .documents_routes import router as documents_router
from .editor_routes import router as editor_router
from .google_calendar_add_connector_route import (
@ -15,15 +18,19 @@ from .google_drive_add_connector_route import (
from .google_gmail_add_connector_route import (
router as google_gmail_add_connector_router,
)
from .jira_add_connector_route import router as jira_add_connector_router
from .linear_add_connector_route import router as linear_add_connector_router
from .logs_routes import router as logs_router
from .luma_add_connector_route import router as luma_add_connector_router
from .new_chat_routes import router as new_chat_router
from .new_llm_config_routes import router as new_llm_config_router
from .notes_routes import router as notes_router
from .notion_add_connector_route import router as notion_add_connector_router
from .podcasts_routes import router as podcasts_router
from .rbac_routes import router as rbac_router
from .search_source_connectors_routes import router as search_source_connectors_router
from .search_spaces_routes import router as search_spaces_router
from .slack_add_connector_route import router as slack_add_connector_router
router = APIRouter()
@ -39,7 +46,14 @@ router.include_router(google_calendar_add_connector_router)
router.include_router(google_gmail_add_connector_router)
router.include_router(google_drive_add_connector_router)
router.include_router(airtable_add_connector_router)
router.include_router(linear_add_connector_router)
router.include_router(luma_add_connector_router)
router.include_router(notion_add_connector_router)
router.include_router(slack_add_connector_router)
router.include_router(discord_add_connector_router)
router.include_router(jira_add_connector_router)
router.include_router(confluence_add_connector_router)
router.include_router(clickup_add_connector_router)
router.include_router(new_llm_config_router) # LLM configs with prompt configuration
router.include_router(logs_router)
router.include_router(circleback_webhook_router) # Circleback meeting webhooks

View file

@ -1,6 +1,5 @@
import base64
import hashlib
import json
import logging
import secrets
from datetime import UTC, datetime, timedelta
@ -12,9 +11,9 @@ from fastapi.responses import RedirectResponse
from pydantic import ValidationError
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.config import config
from app.connectors.airtable_connector import fetch_airtable_user_email
from app.db import (
SearchSourceConnector,
SearchSourceConnectorType,
@ -23,6 +22,11 @@ from app.db import (
)
from app.schemas.airtable_auth_credentials import AirtableAuthCredentialsBase
from app.users import current_active_user
from app.utils.connector_naming import (
check_duplicate_connector,
generate_unique_connector_name,
)
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
logger = logging.getLogger(__name__)
@ -40,6 +44,30 @@ SCOPES = [
"user.email:read",
]
# Initialize security utilities
_state_manager = None
_token_encryption = None
def get_state_manager() -> OAuthStateManager:
"""Get or create OAuth state manager instance."""
global _state_manager
if _state_manager is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for OAuth security")
_state_manager = OAuthStateManager(config.SECRET_KEY)
return _state_manager
def get_token_encryption() -> TokenEncryption:
"""Get or create token encryption instance."""
global _token_encryption
if _token_encryption is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for token encryption")
_token_encryption = TokenEncryption(config.SECRET_KEY)
return _token_encryption
def make_basic_auth_header(client_id: str, client_secret: str) -> str:
credentials = f"{client_id}:{client_secret}".encode()
@ -90,18 +118,19 @@ async def connect_airtable(space_id: int, user: User = Depends(current_active_us
status_code=500, detail="Airtable OAuth not configured."
)
if not config.SECRET_KEY:
raise HTTPException(
status_code=500, detail="SECRET_KEY not configured for OAuth security."
)
# Generate PKCE parameters
code_verifier, code_challenge = generate_pkce_pair()
# Generate state parameter
state_payload = json.dumps(
{
"space_id": space_id,
"user_id": str(user.id),
"code_verifier": code_verifier,
}
# Generate secure state parameter with HMAC signature (including code_verifier for PKCE)
state_manager = get_state_manager()
state_encoded = state_manager.generate_secure_state(
space_id, user.id, code_verifier=code_verifier
)
state_encoded = base64.urlsafe_b64encode(state_payload.encode()).decode()
# Build authorization URL
auth_params = {
@ -134,8 +163,9 @@ async def connect_airtable(space_id: int, user: User = Depends(current_active_us
@router.get("/auth/airtable/connector/callback")
async def airtable_callback(
request: Request,
code: str,
state: str,
code: str | None = None,
error: str | None = None,
state: str | None = None,
session: AsyncSession = Depends(get_async_session),
):
"""
@ -143,7 +173,8 @@ async def airtable_callback(
Args:
request: FastAPI request object
code: Authorization code from Airtable
code: Authorization code from Airtable (if user granted access)
error: Error code from Airtable (if user denied access or error occurred)
state: State parameter containing user/space info
session: Database session
@ -151,10 +182,42 @@ async def airtable_callback(
Redirect response to frontend
"""
try:
# Decode and parse the state
# Handle OAuth errors (e.g., user denied access)
if error:
logger.warning(f"Airtable OAuth error: {error}")
# Try to decode state to get space_id for redirect, but don't fail if it's invalid
space_id = None
if state:
try:
state_manager = get_state_manager()
data = state_manager.validate_state(state)
space_id = data.get("space_id")
except Exception:
# If state is invalid, we'll redirect without space_id
logger.warning("Failed to validate state in error handler")
# Redirect to frontend with error parameter
if space_id:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=airtable_oauth_denied"
)
else:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=airtable_oauth_denied"
)
# Validate required parameters for successful flow
if not code:
raise HTTPException(status_code=400, detail="Missing authorization code")
if not state:
raise HTTPException(status_code=400, detail="Missing state parameter")
# Validate and decode state with signature verification
state_manager = get_state_manager()
try:
decoded_state = base64.urlsafe_b64decode(state.encode()).decode()
data = json.loads(decoded_state)
data = state_manager.validate_state(state)
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=400, detail=f"Invalid state parameter: {e!s}"
@ -162,7 +225,12 @@ async def airtable_callback(
user_id = UUID(data["user_id"])
space_id = data["space_id"]
code_verifier = data["code_verifier"]
code_verifier = data.get("code_verifier")
if not code_verifier:
raise HTTPException(
status_code=400, detail="Missing code_verifier in state parameter"
)
auth_header = make_basic_auth_header(
config.AIRTABLE_CLIENT_ID, config.AIRTABLE_CLIENT_SECRET
)
@ -201,56 +269,78 @@ async def airtable_callback(
token_json = token_response.json()
# Encrypt sensitive tokens before storing
token_encryption = get_token_encryption()
access_token = token_json.get("access_token")
refresh_token = token_json.get("refresh_token")
if not access_token:
raise HTTPException(
status_code=400, detail="No access token received from Airtable"
)
user_email = await fetch_airtable_user_email(access_token)
# Calculate expiration time (UTC, tz-aware)
expires_at = None
if token_json.get("expires_in"):
now_utc = datetime.now(UTC)
expires_at = now_utc + timedelta(seconds=int(token_json["expires_in"]))
# Create credentials object
# Create credentials object with encrypted tokens
credentials = AirtableAuthCredentialsBase(
access_token=token_json["access_token"],
refresh_token=token_json.get("refresh_token"),
access_token=token_encryption.encrypt_token(access_token),
refresh_token=token_encryption.encrypt_token(refresh_token)
if refresh_token
else None,
token_type=token_json.get("token_type", "Bearer"),
expires_in=token_json.get("expires_in"),
expires_at=expires_at,
scope=token_json.get("scope"),
)
# Check if connector already exists for this search space and user
existing_connector_result = await session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.AIRTABLE_CONNECTOR,
)
)
existing_connector = existing_connector_result.scalars().first()
# Mark that tokens are encrypted for backward compatibility
credentials_dict = credentials.to_dict()
credentials_dict["_token_encrypted"] = True
if existing_connector:
# Update existing connector
existing_connector.config = credentials.to_dict()
existing_connector.name = "Airtable Connector"
existing_connector.is_indexable = True
logger.info(
f"Updated existing Airtable connector for user {user_id} in space {space_id}"
# Check for duplicate connector (same account already connected)
is_duplicate = await check_duplicate_connector(
session,
SearchSourceConnectorType.AIRTABLE_CONNECTOR,
space_id,
user_id,
user_email,
)
if is_duplicate:
logger.warning(
f"Duplicate Airtable connector detected for user {user_id} with email {user_email}"
)
else:
# Create new connector
new_connector = SearchSourceConnector(
name="Airtable Connector",
connector_type=SearchSourceConnectorType.AIRTABLE_CONNECTOR,
is_indexable=True,
config=credentials.to_dict(),
search_space_id=space_id,
user_id=user_id,
)
session.add(new_connector)
logger.info(
f"Created new Airtable connector for user {user_id} in space {space_id}"
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=airtable-connector"
)
# Generate a unique, user-friendly connector name
connector_name = await generate_unique_connector_name(
session,
SearchSourceConnectorType.AIRTABLE_CONNECTOR,
space_id,
user_id,
user_email,
)
# Create new connector
new_connector = SearchSourceConnector(
name=connector_name,
connector_type=SearchSourceConnectorType.AIRTABLE_CONNECTOR,
is_indexable=True,
config=credentials_dict,
search_space_id=space_id,
user_id=user_id,
)
session.add(new_connector)
logger.info(
f"Created new Airtable connector for user {user_id} in space {space_id}"
)
try:
await session.commit()
logger.info(f"Successfully saved Airtable connector for user {user_id}")
@ -258,7 +348,7 @@ async def airtable_callback(
# Redirect to the frontend with success params for indexing config
# Using query params to auto-open the popup with config view on new-chat page
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=airtable-connector"
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=airtable-connector&connectorId={new_connector.id}"
)
except ValidationError as e:
@ -270,7 +360,7 @@ async def airtable_callback(
await session.rollback()
raise HTTPException(
status_code=409,
detail=f"Integrity error: A connector with this type already exists. {e!s}",
detail=f"Database integrity error: {e!s}",
) from e
except Exception as e:
logger.error(f"Failed to create search source connector: {e!s}")
@ -291,7 +381,7 @@ async def airtable_callback(
async def refresh_airtable_token(
session: AsyncSession, connector: SearchSourceConnector
):
) -> SearchSourceConnector:
"""
Refresh the Airtable access token for a connector.
@ -306,6 +396,27 @@ async def refresh_airtable_token(
logger.info(f"Refreshing Airtable token for connector {connector.id}")
credentials = AirtableAuthCredentialsBase.from_dict(connector.config)
# Decrypt tokens if they are encrypted
token_encryption = get_token_encryption()
is_encrypted = connector.config.get("_token_encrypted", False)
refresh_token = credentials.refresh_token
if is_encrypted and refresh_token:
try:
refresh_token = token_encryption.decrypt_token(refresh_token)
except Exception as e:
logger.error(f"Failed to decrypt refresh token: {e!s}")
raise HTTPException(
status_code=500, detail="Failed to decrypt stored refresh token"
) from e
if not refresh_token:
raise HTTPException(
status_code=400,
detail="No refresh token available. Please re-authenticate.",
)
auth_header = make_basic_auth_header(
config.AIRTABLE_CLIENT_ID, config.AIRTABLE_CLIENT_SECRET
)
@ -313,7 +424,7 @@ async def refresh_airtable_token(
# Prepare token refresh data
refresh_data = {
"grant_type": "refresh_token",
"refresh_token": credentials.refresh_token,
"refresh_token": refresh_token,
"client_id": config.AIRTABLE_CLIENT_ID,
"client_secret": config.AIRTABLE_CLIENT_SECRET,
}
@ -330,8 +441,14 @@ async def refresh_airtable_token(
)
if token_response.status_code != 200:
error_detail = token_response.text
try:
error_json = token_response.json()
error_detail = error_json.get("error_description", error_detail)
except Exception:
pass
raise HTTPException(
status_code=400, detail="Token refresh failed: {token_response.text}"
status_code=400, detail=f"Token refresh failed: {error_detail}"
)
token_json = token_response.json()
@ -342,14 +459,29 @@ async def refresh_airtable_token(
now_utc = datetime.now(UTC)
expires_at = now_utc + timedelta(seconds=int(token_json["expires_in"]))
# Update credentials object
credentials.access_token = token_json["access_token"]
# Encrypt new tokens before storing
access_token = token_json.get("access_token")
new_refresh_token = token_json.get("refresh_token")
if not access_token:
raise HTTPException(
status_code=400, detail="No access token received from Airtable refresh"
)
# Update credentials object with encrypted tokens
credentials.access_token = token_encryption.encrypt_token(access_token)
if new_refresh_token:
credentials.refresh_token = token_encryption.encrypt_token(
new_refresh_token
)
credentials.expires_in = token_json.get("expires_in")
credentials.expires_at = expires_at
credentials.scope = token_json.get("scope")
# Update connector config
connector.config = credentials.to_dict()
# Update connector config with encrypted tokens
credentials_dict = credentials.to_dict()
credentials_dict["_token_encrypted"] = True
connector.config = credentials_dict
await session.commit()
await session.refresh(connector)
@ -358,6 +490,8 @@ async def refresh_airtable_token(
)
return connector
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=500, detail=f"Failed to refresh Airtable token: {e!s}"

View file

@ -0,0 +1,481 @@
"""
ClickUp Connector OAuth Routes.
Handles OAuth 2.0 authentication flow for ClickUp connector.
"""
import logging
from datetime import UTC, datetime, timedelta
from uuid import UUID
import httpx
from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi.responses import RedirectResponse
from pydantic import ValidationError
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.config import config
from app.db import (
SearchSourceConnector,
SearchSourceConnectorType,
User,
get_async_session,
)
from app.schemas.clickup_auth_credentials import ClickUpAuthCredentialsBase
from app.users import current_active_user
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
logger = logging.getLogger(__name__)
router = APIRouter()
# ClickUp OAuth endpoints
AUTHORIZATION_URL = "https://app.clickup.com/api"
TOKEN_URL = "https://api.clickup.com/api/v2/oauth/token"
# Initialize security utilities
_state_manager = None
_token_encryption = None
def get_state_manager() -> OAuthStateManager:
"""Get or create OAuth state manager instance."""
global _state_manager
if _state_manager is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for OAuth security")
_state_manager = OAuthStateManager(config.SECRET_KEY)
return _state_manager
def get_token_encryption() -> TokenEncryption:
"""Get or create token encryption instance."""
global _token_encryption
if _token_encryption is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for token encryption")
_token_encryption = TokenEncryption(config.SECRET_KEY)
return _token_encryption
@router.get("/auth/clickup/connector/add")
async def connect_clickup(space_id: int, user: User = Depends(current_active_user)):
"""
Initiate ClickUp OAuth flow.
Args:
space_id: The search space ID
user: Current authenticated user
Returns:
Authorization URL for redirect
"""
try:
if not space_id:
raise HTTPException(status_code=400, detail="space_id is required")
if not config.CLICKUP_CLIENT_ID:
raise HTTPException(status_code=500, detail="ClickUp OAuth not configured.")
if not config.SECRET_KEY:
raise HTTPException(
status_code=500, detail="SECRET_KEY not configured for OAuth security."
)
# Generate secure state parameter with HMAC signature
state_manager = get_state_manager()
state_encoded = state_manager.generate_secure_state(space_id, user.id)
# Build authorization URL
from urllib.parse import urlencode
auth_params = {
"client_id": config.CLICKUP_CLIENT_ID,
"redirect_uri": config.CLICKUP_REDIRECT_URI,
"state": state_encoded,
}
auth_url = f"{AUTHORIZATION_URL}?{urlencode(auth_params)}"
logger.info(f"Generated ClickUp OAuth URL for user {user.id}, space {space_id}")
return {"auth_url": auth_url}
except Exception as e:
logger.error(f"Failed to initiate ClickUp OAuth: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to initiate ClickUp OAuth: {e!s}"
) from e
@router.get("/auth/clickup/connector/callback")
async def clickup_callback(
request: Request,
code: str | None = None,
error: str | None = None,
state: str | None = None,
session: AsyncSession = Depends(get_async_session),
):
"""
Handle ClickUp OAuth callback.
Args:
request: FastAPI request object
code: Authorization code from ClickUp (if user granted access)
error: Error code from ClickUp (if user denied access or error occurred)
state: State parameter containing user/space info
session: Database session
Returns:
Redirect response to frontend
"""
try:
# Handle OAuth errors (e.g., user denied access)
if error:
logger.warning(f"ClickUp OAuth error: {error}")
# Try to decode state to get space_id for redirect, but don't fail if it's invalid
space_id = None
if state:
try:
state_manager = get_state_manager()
data = state_manager.validate_state(state)
space_id = data.get("space_id")
except Exception:
# If state is invalid, we'll redirect without space_id
logger.warning("Failed to validate state in error handler")
# Redirect to frontend with error parameter
if space_id:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=clickup_oauth_denied"
)
else:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=clickup_oauth_denied"
)
# Validate required parameters for successful flow
if not code:
raise HTTPException(status_code=400, detail="Missing authorization code")
if not state:
raise HTTPException(status_code=400, detail="Missing state parameter")
# Validate and decode state with signature verification
state_manager = get_state_manager()
try:
data = state_manager.validate_state(state)
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=400, detail=f"Invalid state parameter: {e!s}"
) from e
user_id = UUID(data["user_id"])
space_id = data["space_id"]
# Validate redirect URI (security: ensure it matches configured value)
if not config.CLICKUP_REDIRECT_URI:
raise HTTPException(
status_code=500, detail="CLICKUP_REDIRECT_URI not configured"
)
# Exchange authorization code for access token
token_data = {
"client_id": config.CLICKUP_CLIENT_ID,
"client_secret": config.CLICKUP_CLIENT_SECRET,
"code": code,
}
async with httpx.AsyncClient() as client:
token_response = await client.post(
TOKEN_URL,
json=token_data,
headers={"Content-Type": "application/json"},
timeout=30.0,
)
if token_response.status_code != 200:
error_detail = token_response.text
try:
error_json = token_response.json()
error_detail = error_json.get("error", error_detail)
except Exception:
pass
raise HTTPException(
status_code=400, detail=f"Token exchange failed: {error_detail}"
)
token_json = token_response.json()
# Extract access token
access_token = token_json.get("access_token")
if not access_token:
raise HTTPException(
status_code=400, detail="No access token received from ClickUp"
)
# Extract refresh token if available
refresh_token = token_json.get("refresh_token")
# Encrypt sensitive tokens before storing
token_encryption = get_token_encryption()
# Calculate expiration time (UTC, tz-aware)
expires_at = None
expires_in = token_json.get("expires_in")
if expires_in:
now_utc = datetime.now(UTC)
expires_at = now_utc + timedelta(seconds=int(expires_in))
# Get user information and workspace information from ClickUp API
user_info = {}
workspace_info = {}
try:
async with httpx.AsyncClient() as client:
# Get user info
user_response = await client.get(
"https://api.clickup.com/api/v2/user",
headers={"Authorization": f"Bearer {access_token}"},
timeout=30.0,
)
if user_response.status_code == 200:
user_data = user_response.json().get("user", {})
user_info = {
"user_id": str(user_data.get("id"))
if user_data.get("id") is not None
else None,
"user_email": user_data.get("email"),
"user_name": user_data.get("username"),
}
# Get workspace (team) info - get the first workspace
team_response = await client.get(
"https://api.clickup.com/api/v2/team",
headers={"Authorization": f"Bearer {access_token}"},
timeout=30.0,
)
if team_response.status_code == 200:
teams_data = team_response.json().get("teams", [])
if teams_data and len(teams_data) > 0:
first_team = teams_data[0]
workspace_info = {
"workspace_id": str(first_team.get("id"))
if first_team.get("id") is not None
else None,
"workspace_name": first_team.get("name"),
}
except Exception as e:
logger.warning(f"Failed to fetch user/workspace info from ClickUp: {e!s}")
# Store the encrypted tokens and user/workspace info in connector config
connector_config = {
"access_token": token_encryption.encrypt_token(access_token),
"refresh_token": token_encryption.encrypt_token(refresh_token)
if refresh_token
else None,
"expires_in": expires_in,
"expires_at": expires_at.isoformat() if expires_at else None,
"user_id": user_info.get("user_id"),
"user_email": user_info.get("user_email"),
"user_name": user_info.get("user_name"),
"workspace_id": workspace_info.get("workspace_id"),
"workspace_name": workspace_info.get("workspace_name"),
# Mark that token is encrypted for backward compatibility
"_token_encrypted": True,
}
# Check if connector already exists for this search space and user
existing_connector_result = await session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.CLICKUP_CONNECTOR,
)
)
existing_connector = existing_connector_result.scalars().first()
if existing_connector:
# Update existing connector
existing_connector.config = connector_config
existing_connector.name = "ClickUp Connector"
existing_connector.is_indexable = True
logger.info(
f"Updated existing ClickUp connector for user {user_id} in space {space_id}"
)
else:
# Create new connector
new_connector = SearchSourceConnector(
name="ClickUp Connector",
connector_type=SearchSourceConnectorType.CLICKUP_CONNECTOR,
is_indexable=True,
config=connector_config,
search_space_id=space_id,
user_id=user_id,
)
session.add(new_connector)
logger.info(
f"Created new ClickUp connector for user {user_id} in space {space_id}"
)
try:
await session.commit()
logger.info(f"Successfully saved ClickUp connector for user {user_id}")
# Redirect to the frontend with success params
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=clickup-connector"
)
except ValidationError as e:
await session.rollback()
raise HTTPException(
status_code=422, detail=f"Validation error: {e!s}"
) from e
except IntegrityError as e:
await session.rollback()
raise HTTPException(
status_code=409,
detail=f"Integrity error: A connector with this type already exists. {e!s}",
) from e
except Exception as e:
logger.error(f"Failed to create search source connector: {e!s}")
await session.rollback()
raise HTTPException(
status_code=500,
detail=f"Failed to create search source connector: {e!s}",
) from e
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to complete ClickUp OAuth: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to complete ClickUp OAuth: {e!s}"
) from e
async def refresh_clickup_token(
session: AsyncSession, connector: SearchSourceConnector
) -> SearchSourceConnector:
"""
Refresh the ClickUp access token for a connector.
Args:
session: Database session
connector: ClickUp connector to refresh
Returns:
Updated connector object
"""
try:
logger.info(f"Refreshing ClickUp token for connector {connector.id}")
credentials = ClickUpAuthCredentialsBase.from_dict(connector.config)
# Decrypt tokens if they are encrypted
token_encryption = get_token_encryption()
is_encrypted = connector.config.get("_token_encrypted", False)
refresh_token = credentials.refresh_token
if is_encrypted and refresh_token:
try:
refresh_token = token_encryption.decrypt_token(refresh_token)
except Exception as e:
logger.error(f"Failed to decrypt refresh token: {e!s}")
raise HTTPException(
status_code=500, detail="Failed to decrypt stored refresh token"
) from e
if not refresh_token:
raise HTTPException(
status_code=400,
detail="No refresh token available. Please re-authenticate.",
)
# Prepare token refresh data
refresh_data = {
"client_id": config.CLICKUP_CLIENT_ID,
"client_secret": config.CLICKUP_CLIENT_SECRET,
"refresh_token": refresh_token,
}
async with httpx.AsyncClient() as client:
token_response = await client.post(
TOKEN_URL,
json=refresh_data,
headers={"Content-Type": "application/json"},
timeout=30.0,
)
if token_response.status_code != 200:
error_detail = token_response.text
try:
error_json = token_response.json()
error_detail = error_json.get("error", error_detail)
except Exception:
pass
raise HTTPException(
status_code=400, detail=f"Token refresh failed: {error_detail}"
)
token_json = token_response.json()
# Calculate expiration time (UTC, tz-aware)
expires_at = None
expires_in = token_json.get("expires_in")
if expires_in:
now_utc = datetime.now(UTC)
expires_at = now_utc + timedelta(seconds=int(expires_in))
# Encrypt new tokens before storing
access_token = token_json.get("access_token")
new_refresh_token = token_json.get("refresh_token")
if not access_token:
raise HTTPException(
status_code=400, detail="No access token received from ClickUp refresh"
)
# Update credentials object with encrypted tokens
credentials.access_token = token_encryption.encrypt_token(access_token)
if new_refresh_token:
credentials.refresh_token = token_encryption.encrypt_token(
new_refresh_token
)
credentials.expires_in = expires_in
credentials.expires_at = expires_at
# Preserve user and workspace info
if not credentials.user_id:
credentials.user_id = connector.config.get("user_id")
if not credentials.user_email:
credentials.user_email = connector.config.get("user_email")
if not credentials.user_name:
credentials.user_name = connector.config.get("user_name")
if not credentials.workspace_id:
credentials.workspace_id = connector.config.get("workspace_id")
if not credentials.workspace_name:
credentials.workspace_name = connector.config.get("workspace_name")
# Update connector config with encrypted tokens
credentials_dict = credentials.to_dict()
credentials_dict["_token_encrypted"] = True
connector.config = credentials_dict
await session.commit()
await session.refresh(connector)
logger.info(
f"Successfully refreshed ClickUp token for connector {connector.id}"
)
return connector
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to refresh ClickUp token: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to refresh ClickUp token: {e!s}"
) from e

View file

@ -0,0 +1,498 @@
"""
Confluence Connector OAuth Routes.
Handles OAuth 2.0 authentication flow for Confluence connector.
"""
import logging
from datetime import UTC, datetime, timedelta
from uuid import UUID
import httpx
from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi.responses import RedirectResponse
from pydantic import ValidationError
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config
from app.db import (
SearchSourceConnector,
SearchSourceConnectorType,
User,
get_async_session,
)
from app.schemas.atlassian_auth_credentials import AtlassianAuthCredentialsBase
from app.users import current_active_user
from app.utils.connector_naming import (
check_duplicate_connector,
extract_identifier_from_credentials,
generate_unique_connector_name,
)
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
logger = logging.getLogger(__name__)
router = APIRouter()
# Atlassian OAuth endpoints
AUTHORIZATION_URL = "https://auth.atlassian.com/authorize"
TOKEN_URL = "https://auth.atlassian.com/oauth/token"
RESOURCES_URL = "https://api.atlassian.com/oauth/token/accessible-resources"
# OAuth scopes for Confluence
SCOPES = [
"read:confluence-user",
"read:space:confluence",
"read:page:confluence",
"read:comment:confluence",
"offline_access", # Required for refresh tokens
]
# Initialize security utilities
_state_manager = None
_token_encryption = None
def get_state_manager() -> OAuthStateManager:
"""Get or create OAuth state manager instance."""
global _state_manager
if _state_manager is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for OAuth security")
_state_manager = OAuthStateManager(config.SECRET_KEY)
return _state_manager
def get_token_encryption() -> TokenEncryption:
"""Get or create token encryption instance."""
global _token_encryption
if _token_encryption is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for token encryption")
_token_encryption = TokenEncryption(config.SECRET_KEY)
return _token_encryption
@router.get("/auth/confluence/connector/add")
async def connect_confluence(space_id: int, user: User = Depends(current_active_user)):
"""
Initiate Confluence OAuth flow.
Args:
space_id: The search space ID
user: Current authenticated user
Returns:
Authorization URL for redirect
"""
try:
if not space_id:
raise HTTPException(status_code=400, detail="space_id is required")
if not config.ATLASSIAN_CLIENT_ID:
raise HTTPException(
status_code=500, detail="Atlassian OAuth not configured."
)
if not config.SECRET_KEY:
raise HTTPException(
status_code=500, detail="SECRET_KEY not configured for OAuth security."
)
# Generate secure state parameter with HMAC signature
state_manager = get_state_manager()
state_encoded = state_manager.generate_secure_state(space_id, user.id)
# Build authorization URL
from urllib.parse import urlencode
auth_params = {
"audience": "api.atlassian.com",
"client_id": config.ATLASSIAN_CLIENT_ID,
"scope": " ".join(SCOPES),
"redirect_uri": config.CONFLUENCE_REDIRECT_URI,
"state": state_encoded,
"response_type": "code",
"prompt": "consent",
}
auth_url = f"{AUTHORIZATION_URL}?{urlencode(auth_params)}"
logger.info(
f"Generated Confluence OAuth URL for user {user.id}, space {space_id}"
)
return {"auth_url": auth_url}
except Exception as e:
logger.error(f"Failed to initiate Confluence OAuth: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to initiate Confluence OAuth: {e!s}"
) from e
@router.get("/auth/confluence/connector/callback")
async def confluence_callback(
request: Request,
code: str | None = None,
error: str | None = None,
state: str | None = None,
session: AsyncSession = Depends(get_async_session),
):
"""
Handle Confluence OAuth callback.
Args:
request: FastAPI request object
code: Authorization code from Atlassian (if user granted access)
error: Error code from Atlassian (if user denied access or error occurred)
state: State parameter containing user/space info
session: Database session
Returns:
Redirect response to frontend
"""
try:
# Handle OAuth errors (e.g., user denied access)
if error:
logger.warning(f"Confluence OAuth error: {error}")
# Try to decode state to get space_id for redirect, but don't fail if it's invalid
space_id = None
if state:
try:
state_manager = get_state_manager()
data = state_manager.validate_state(state)
space_id = data.get("space_id")
except Exception:
# If state is invalid, we'll redirect without space_id
logger.warning("Failed to validate state in error handler")
# Redirect to frontend with error parameter
if space_id:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=confluence_oauth_denied"
)
else:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=confluence_oauth_denied"
)
# Validate required parameters for successful flow
if not code:
raise HTTPException(status_code=400, detail="Missing authorization code")
if not state:
raise HTTPException(status_code=400, detail="Missing state parameter")
# Validate and decode state with signature verification
state_manager = get_state_manager()
try:
data = state_manager.validate_state(state)
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=400, detail=f"Invalid state parameter: {e!s}"
) from e
user_id = UUID(data["user_id"])
space_id = data["space_id"]
# Validate redirect URI (security: ensure it matches configured value)
if not config.CONFLUENCE_REDIRECT_URI:
raise HTTPException(
status_code=500, detail="CONFLUENCE_REDIRECT_URI not configured"
)
# Exchange authorization code for access token
token_data = {
"grant_type": "authorization_code",
"client_id": config.ATLASSIAN_CLIENT_ID,
"client_secret": config.ATLASSIAN_CLIENT_SECRET,
"code": code,
"redirect_uri": config.CONFLUENCE_REDIRECT_URI,
}
async with httpx.AsyncClient() as client:
token_response = await client.post(
TOKEN_URL,
json=token_data,
headers={"Content-Type": "application/json"},
timeout=30.0,
)
if token_response.status_code != 200:
error_detail = token_response.text
try:
error_json = token_response.json()
error_detail = error_json.get(
"error_description", error_json.get("error", error_detail)
)
except Exception:
pass
raise HTTPException(
status_code=400, detail=f"Token exchange failed: {error_detail}"
)
token_json = token_response.json()
access_token = token_json.get("access_token")
refresh_token = token_json.get("refresh_token")
if not access_token:
raise HTTPException(
status_code=400, detail="No access token received from Atlassian"
)
# Get accessible resources to find Confluence cloud ID and site URL
async with httpx.AsyncClient() as client:
resources_response = await client.get(
RESOURCES_URL,
headers={"Authorization": f"Bearer {access_token}"},
timeout=30.0,
)
cloud_id = None
site_url = None
if resources_response.status_code == 200:
resources = resources_response.json()
# Find Confluence resource
for resource in resources:
if resource.get("id") and resource.get("name"):
cloud_id = resource.get("id")
site_url = resource.get("url")
break
if not cloud_id:
logger.warning(
"Could not determine Confluence cloud ID from accessible resources"
)
# Calculate expiration time (UTC, tz-aware)
expires_at = None
expires_in = token_json.get("expires_in")
if expires_in:
now_utc = datetime.now(UTC)
expires_at = now_utc + timedelta(seconds=int(expires_in))
# Encrypt sensitive tokens before storing
token_encryption = get_token_encryption()
# Store the encrypted tokens and metadata in connector config
connector_config = {
"access_token": token_encryption.encrypt_token(access_token),
"refresh_token": token_encryption.encrypt_token(refresh_token)
if refresh_token
else None,
"token_type": token_json.get("token_type", "Bearer"),
"expires_in": expires_in,
"expires_at": expires_at.isoformat() if expires_at else None,
"scope": token_json.get("scope"),
"cloud_id": cloud_id,
"base_url": site_url, # Store as base_url to match shared schema
# Mark that tokens are encrypted for backward compatibility
"_token_encrypted": True,
}
# Extract unique identifier from connector credentials
connector_identifier = extract_identifier_from_credentials(
SearchSourceConnectorType.CONFLUENCE_CONNECTOR, connector_config
)
# Check for duplicate connector (same Confluence instance already connected)
is_duplicate = await check_duplicate_connector(
session,
SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
space_id,
user_id,
connector_identifier,
)
if is_duplicate:
logger.warning(
f"Duplicate Confluence connector detected for user {user_id} with instance {connector_identifier}"
)
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=confluence-connector"
)
# Generate a unique, user-friendly connector name
connector_name = await generate_unique_connector_name(
session,
SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
space_id,
user_id,
connector_identifier,
)
# Create new connector
new_connector = SearchSourceConnector(
name=connector_name,
connector_type=SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
is_indexable=True,
config=connector_config,
search_space_id=space_id,
user_id=user_id,
)
session.add(new_connector)
logger.info(
f"Created new Confluence connector for user {user_id} in space {space_id}"
)
try:
await session.commit()
logger.info(f"Successfully saved Confluence connector for user {user_id}")
# Redirect to the frontend with success params
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=confluence-connector&connectorId={new_connector.id}"
)
except ValidationError as e:
await session.rollback()
raise HTTPException(
status_code=422, detail=f"Validation error: {e!s}"
) from e
except IntegrityError as e:
await session.rollback()
raise HTTPException(
status_code=409,
detail=f"Database integrity error: {e!s}",
) from e
except Exception as e:
logger.error(f"Failed to create search source connector: {e!s}")
await session.rollback()
raise HTTPException(
status_code=500,
detail=f"Failed to create search source connector: {e!s}",
) from e
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to complete Confluence OAuth: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to complete Confluence OAuth: {e!s}"
) from e
async def refresh_confluence_token(
session: AsyncSession, connector: SearchSourceConnector
) -> SearchSourceConnector:
"""
Refresh the Confluence access token for a connector.
Args:
session: Database session
connector: Confluence connector to refresh
Returns:
Updated connector object
"""
try:
logger.info(f"Refreshing Confluence token for connector {connector.id}")
credentials = AtlassianAuthCredentialsBase.from_dict(connector.config)
# Decrypt tokens if they are encrypted
token_encryption = get_token_encryption()
is_encrypted = connector.config.get("_token_encrypted", False)
refresh_token = credentials.refresh_token
if is_encrypted and refresh_token:
try:
refresh_token = token_encryption.decrypt_token(refresh_token)
except Exception as e:
logger.error(f"Failed to decrypt refresh token: {e!s}")
raise HTTPException(
status_code=500, detail="Failed to decrypt stored refresh token"
) from e
if not refresh_token:
raise HTTPException(
status_code=400,
detail="No refresh token available. Please re-authenticate.",
)
# Prepare token refresh data
refresh_data = {
"grant_type": "refresh_token",
"client_id": config.ATLASSIAN_CLIENT_ID,
"client_secret": config.ATLASSIAN_CLIENT_SECRET,
"refresh_token": refresh_token,
}
async with httpx.AsyncClient() as client:
token_response = await client.post(
TOKEN_URL,
json=refresh_data,
headers={"Content-Type": "application/json"},
timeout=30.0,
)
if token_response.status_code != 200:
error_detail = token_response.text
try:
error_json = token_response.json()
error_detail = error_json.get(
"error_description", error_json.get("error", error_detail)
)
except Exception:
pass
raise HTTPException(
status_code=400, detail=f"Token refresh failed: {error_detail}"
)
token_json = token_response.json()
# Calculate expiration time (UTC, tz-aware)
expires_at = None
expires_in = token_json.get("expires_in")
if expires_in:
now_utc = datetime.now(UTC)
expires_at = now_utc + timedelta(seconds=int(expires_in))
# Encrypt new tokens before storing
access_token = token_json.get("access_token")
new_refresh_token = token_json.get("refresh_token")
if not access_token:
raise HTTPException(
status_code=400,
detail="No access token received from Confluence refresh",
)
# Update credentials object with encrypted tokens
credentials.access_token = token_encryption.encrypt_token(access_token)
if new_refresh_token:
credentials.refresh_token = token_encryption.encrypt_token(
new_refresh_token
)
credentials.expires_in = expires_in
credentials.expires_at = expires_at
credentials.scope = token_json.get("scope")
# Preserve cloud_id and base_url (with backward compatibility for site_url)
if not credentials.cloud_id:
credentials.cloud_id = connector.config.get("cloud_id")
if not credentials.base_url:
# Check both base_url and site_url for backward compatibility
credentials.base_url = connector.config.get(
"base_url"
) or connector.config.get("site_url")
# Update connector config with encrypted tokens
credentials_dict = credentials.to_dict()
credentials_dict["_token_encrypted"] = True
connector.config = credentials_dict
await session.commit()
await session.refresh(connector)
logger.info(
f"Successfully refreshed Confluence token for connector {connector.id}"
)
return connector
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to refresh Confluence token: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to refresh Confluence token: {e!s}"
) from e

View file

@ -0,0 +1,533 @@
"""
Discord Connector OAuth Routes.
Handles OAuth 2.0 authentication flow for Discord connector.
"""
import logging
from datetime import UTC, datetime, timedelta
from uuid import UUID
import httpx
from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi.responses import RedirectResponse
from pydantic import ValidationError
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config
from app.db import (
SearchSourceConnector,
SearchSourceConnectorType,
User,
get_async_session,
)
from app.schemas.discord_auth_credentials import DiscordAuthCredentialsBase
from app.users import current_active_user
from app.utils.connector_naming import (
check_duplicate_connector,
extract_identifier_from_credentials,
generate_unique_connector_name,
)
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
logger = logging.getLogger(__name__)
router = APIRouter()
# Discord OAuth endpoints
AUTHORIZATION_URL = "https://discord.com/api/oauth2/authorize"
TOKEN_URL = "https://discord.com/api/oauth2/token"
# OAuth scopes for Discord (Bot Token)
SCOPES = [
"bot", # Basic bot scope
"guilds", # Access to guild information
"guilds.members.read", # Read member information
]
# Initialize security utilities
_state_manager = None
_token_encryption = None
def get_state_manager() -> OAuthStateManager:
"""Get or create OAuth state manager instance."""
global _state_manager
if _state_manager is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for OAuth security")
_state_manager = OAuthStateManager(config.SECRET_KEY)
return _state_manager
def get_token_encryption() -> TokenEncryption:
"""Get or create token encryption instance."""
global _token_encryption
if _token_encryption is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for token encryption")
_token_encryption = TokenEncryption(config.SECRET_KEY)
return _token_encryption
@router.get("/auth/discord/connector/add")
async def connect_discord(space_id: int, user: User = Depends(current_active_user)):
"""
Initiate Discord OAuth flow.
Args:
space_id: The search space ID
user: Current authenticated user
Returns:
Authorization URL for redirect
"""
try:
if not space_id:
raise HTTPException(status_code=400, detail="space_id is required")
if not config.DISCORD_CLIENT_ID:
raise HTTPException(status_code=500, detail="Discord OAuth not configured.")
if not config.DISCORD_BOT_TOKEN:
raise HTTPException(
status_code=500,
detail="Discord bot token not configured. Please set DISCORD_BOT_TOKEN in backend configuration.",
)
if not config.SECRET_KEY:
raise HTTPException(
status_code=500, detail="SECRET_KEY not configured for OAuth security."
)
# Generate secure state parameter with HMAC signature
state_manager = get_state_manager()
state_encoded = state_manager.generate_secure_state(space_id, user.id)
# Build authorization URL
from urllib.parse import urlencode
auth_params = {
"client_id": config.DISCORD_CLIENT_ID,
"scope": " ".join(SCOPES), # Discord uses space-separated scopes
"redirect_uri": config.DISCORD_REDIRECT_URI,
"response_type": "code",
"state": state_encoded,
}
auth_url = f"{AUTHORIZATION_URL}?{urlencode(auth_params)}"
logger.info(f"Generated Discord OAuth URL for user {user.id}, space {space_id}")
return {"auth_url": auth_url}
except Exception as e:
logger.error(f"Failed to initiate Discord OAuth: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to initiate Discord OAuth: {e!s}"
) from e
@router.get("/auth/discord/connector/callback")
async def discord_callback(
request: Request,
code: str | None = None,
error: str | None = None,
state: str | None = None,
session: AsyncSession = Depends(get_async_session),
):
"""
Handle Discord OAuth callback.
Args:
request: FastAPI request object
code: Authorization code from Discord (if user granted access)
error: Error code from Discord (if user denied access or error occurred)
state: State parameter containing user/space info
session: Database session
Returns:
Redirect response to frontend
"""
try:
# Handle OAuth errors (e.g., user denied access)
if error:
logger.warning(f"Discord OAuth error: {error}")
# Try to decode state to get space_id for redirect, but don't fail if it's invalid
space_id = None
if state:
try:
state_manager = get_state_manager()
data = state_manager.validate_state(state)
space_id = data.get("space_id")
except Exception:
# If state is invalid, we'll redirect without space_id
logger.warning("Failed to validate state in error handler")
# Redirect to frontend with error parameter
if space_id:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=discord_oauth_denied"
)
else:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=discord_oauth_denied"
)
# Validate required parameters for successful flow
if not code:
raise HTTPException(status_code=400, detail="Missing authorization code")
if not state:
raise HTTPException(status_code=400, detail="Missing state parameter")
# Validate and decode state with signature verification
state_manager = get_state_manager()
try:
data = state_manager.validate_state(state)
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=400, detail=f"Invalid state parameter: {e!s}"
) from e
user_id = UUID(data["user_id"])
space_id = data["space_id"]
# Validate redirect URI (security: ensure it matches configured value)
if not config.DISCORD_REDIRECT_URI:
raise HTTPException(
status_code=500, detail="DISCORD_REDIRECT_URI not configured"
)
# Exchange authorization code for access token
token_data = {
"client_id": config.DISCORD_CLIENT_ID,
"client_secret": config.DISCORD_CLIENT_SECRET,
"grant_type": "authorization_code",
"code": code,
"redirect_uri": config.DISCORD_REDIRECT_URI,
}
async with httpx.AsyncClient() as client:
token_response = await client.post(
TOKEN_URL,
data=token_data,
headers={"Content-Type": "application/x-www-form-urlencoded"},
timeout=30.0,
)
if token_response.status_code != 200:
error_detail = token_response.text
try:
error_json = token_response.json()
error_detail = error_json.get(
"error_description", error_json.get("error", error_detail)
)
except Exception:
pass
raise HTTPException(
status_code=400, detail=f"Token exchange failed: {error_detail}"
)
token_json = token_response.json()
# Log OAuth response for debugging (without sensitive data)
logger.info(f"Discord OAuth response received. Keys: {list(token_json.keys())}")
# Discord OAuth with 'bot' scope returns access_token (user token), not bot token
# The bot token must come from backend config (DISCORD_BOT_TOKEN)
# OAuth is used to authorize bot installation to servers, not to get bot token
if not config.DISCORD_BOT_TOKEN:
raise HTTPException(
status_code=500,
detail="Discord bot token not configured. Please set DISCORD_BOT_TOKEN in backend configuration.",
)
# Use the bot token from backend config (not the OAuth access_token)
bot_token = config.DISCORD_BOT_TOKEN
# Extract OAuth access_token and refresh_token (for reference, not used for bot operations)
oauth_access_token = token_json.get("access_token")
refresh_token = token_json.get("refresh_token")
# Encrypt sensitive tokens before storing
token_encryption = get_token_encryption()
# Calculate expiration time (UTC, tz-aware)
expires_at = None
if token_json.get("expires_in"):
now_utc = datetime.now(UTC)
expires_at = now_utc + timedelta(seconds=int(token_json["expires_in"]))
# Extract guild info from OAuth response if available
guild_id = None
guild_name = None
if token_json.get("guild"):
guild_id = token_json["guild"].get("id")
guild_name = token_json["guild"].get("name")
# Store the bot token from config and OAuth metadata
connector_config = {
"bot_token": token_encryption.encrypt_token(
bot_token
), # Use bot token from config
"oauth_access_token": token_encryption.encrypt_token(oauth_access_token)
if oauth_access_token
else None, # Store OAuth token for reference
"refresh_token": token_encryption.encrypt_token(refresh_token)
if refresh_token
else None,
"token_type": token_json.get("token_type", "Bearer"),
"expires_in": token_json.get("expires_in"),
"expires_at": expires_at.isoformat() if expires_at else None,
"scope": token_json.get("scope"),
"guild_id": guild_id,
"guild_name": guild_name,
# Mark that tokens are encrypted for backward compatibility
"_token_encrypted": True,
}
# Extract unique identifier from connector credentials
connector_identifier = extract_identifier_from_credentials(
SearchSourceConnectorType.DISCORD_CONNECTOR, connector_config
)
# Check for duplicate connector (same server already connected)
is_duplicate = await check_duplicate_connector(
session,
SearchSourceConnectorType.DISCORD_CONNECTOR,
space_id,
user_id,
connector_identifier,
)
if is_duplicate:
logger.warning(
f"Duplicate Discord connector detected for user {user_id} with server {connector_identifier}"
)
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=discord-connector"
)
# Generate a unique, user-friendly connector name
connector_name = await generate_unique_connector_name(
session,
SearchSourceConnectorType.DISCORD_CONNECTOR,
space_id,
user_id,
connector_identifier,
)
# Create new connector
new_connector = SearchSourceConnector(
name=connector_name,
connector_type=SearchSourceConnectorType.DISCORD_CONNECTOR,
is_indexable=True,
config=connector_config,
search_space_id=space_id,
user_id=user_id,
)
session.add(new_connector)
logger.info(
f"Created new Discord connector for user {user_id} in space {space_id}"
)
try:
await session.commit()
logger.info(f"Successfully saved Discord connector for user {user_id}")
# Redirect to the frontend with success params
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=discord-connector&connectorId={new_connector.id}"
)
except ValidationError as e:
await session.rollback()
raise HTTPException(
status_code=422, detail=f"Validation error: {e!s}"
) from e
except IntegrityError as e:
await session.rollback()
raise HTTPException(
status_code=409,
detail=f"Database integrity error: {e!s}",
) from e
except Exception as e:
logger.error(f"Failed to create search source connector: {e!s}")
await session.rollback()
raise HTTPException(
status_code=500,
detail=f"Failed to create search source connector: {e!s}",
) from e
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to complete Discord OAuth: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to complete Discord OAuth: {e!s}"
) from e
async def refresh_discord_token(
session: AsyncSession, connector: SearchSourceConnector
) -> SearchSourceConnector:
"""
Refresh the Discord OAuth tokens for a connector.
Note: Bot tokens from config don't expire, but OAuth access tokens might.
This function refreshes OAuth tokens if needed, but always uses bot token from config.
Args:
session: Database session
connector: Discord connector to refresh
Returns:
Updated connector object
"""
try:
logger.info(f"Refreshing Discord OAuth tokens for connector {connector.id}")
# Bot token always comes from config, not from OAuth
if not config.DISCORD_BOT_TOKEN:
raise HTTPException(
status_code=500,
detail="Discord bot token not configured. Please set DISCORD_BOT_TOKEN in backend configuration.",
)
credentials = DiscordAuthCredentialsBase.from_dict(connector.config)
# Decrypt tokens if they are encrypted
token_encryption = get_token_encryption()
is_encrypted = connector.config.get("_token_encrypted", False)
refresh_token = credentials.refresh_token
if is_encrypted and refresh_token:
try:
refresh_token = token_encryption.decrypt_token(refresh_token)
except Exception as e:
logger.error(f"Failed to decrypt refresh token: {e!s}")
raise HTTPException(
status_code=500, detail="Failed to decrypt stored refresh token"
) from e
# If no refresh token, bot token from config is still valid (bot tokens don't expire)
# Just update the bot token from config in case it was changed
if not refresh_token:
logger.info(
f"No refresh token available for connector {connector.id}. Using bot token from config."
)
# Update bot token from config (in case it was changed)
credentials.bot_token = token_encryption.encrypt_token(
config.DISCORD_BOT_TOKEN
)
credentials_dict = credentials.to_dict()
credentials_dict["_token_encrypted"] = True
connector.config = credentials_dict
await session.commit()
await session.refresh(connector)
return connector
# Discord uses oauth2/token for token refresh with grant_type=refresh_token
refresh_data = {
"client_id": config.DISCORD_CLIENT_ID,
"client_secret": config.DISCORD_CLIENT_SECRET,
"grant_type": "refresh_token",
"refresh_token": refresh_token,
}
async with httpx.AsyncClient() as client:
token_response = await client.post(
TOKEN_URL,
data=refresh_data,
headers={"Content-Type": "application/x-www-form-urlencoded"},
timeout=30.0,
)
if token_response.status_code != 200:
error_detail = token_response.text
try:
error_json = token_response.json()
error_detail = error_json.get(
"error_description", error_json.get("error", error_detail)
)
except Exception:
pass
# If refresh fails, bot token from config is still valid
logger.warning(
f"OAuth token refresh failed for connector {connector.id}: {error_detail}. "
"Using bot token from config."
)
# Update bot token from config
credentials.bot_token = token_encryption.encrypt_token(
config.DISCORD_BOT_TOKEN
)
credentials.refresh_token = None # Clear invalid refresh token
credentials_dict = credentials.to_dict()
credentials_dict["_token_encrypted"] = True
connector.config = credentials_dict
await session.commit()
await session.refresh(connector)
return connector
token_json = token_response.json()
# Extract OAuth access token from refresh response (for reference)
oauth_access_token = token_json.get("access_token")
# Get new refresh token if provided (Discord may rotate refresh tokens)
new_refresh_token = token_json.get("refresh_token")
# Calculate expiration time (UTC, tz-aware)
expires_at = None
expires_in = token_json.get("expires_in")
if expires_in:
now_utc = datetime.now(UTC)
expires_at = now_utc + timedelta(seconds=int(expires_in))
# Always use bot token from config (bot tokens don't expire)
credentials.bot_token = token_encryption.encrypt_token(config.DISCORD_BOT_TOKEN)
# Update OAuth tokens if available
if oauth_access_token:
# Store OAuth access token for reference
connector.config["oauth_access_token"] = token_encryption.encrypt_token(
oauth_access_token
)
if new_refresh_token:
credentials.refresh_token = token_encryption.encrypt_token(
new_refresh_token
)
credentials.expires_in = expires_in
credentials.expires_at = expires_at
credentials.scope = token_json.get("scope")
# Preserve guild info if present
if not credentials.guild_id:
credentials.guild_id = connector.config.get("guild_id")
if not credentials.guild_name:
credentials.guild_name = connector.config.get("guild_name")
if not credentials.bot_user_id:
credentials.bot_user_id = connector.config.get("bot_user_id")
# Update connector config with encrypted tokens
credentials_dict = credentials.to_dict()
credentials_dict["_token_encrypted"] = True
connector.config = credentials_dict
await session.commit()
await session.refresh(connector)
logger.info(
f"Successfully refreshed Discord OAuth tokens for connector {connector.id}"
)
return connector
except HTTPException:
raise
except Exception as e:
logger.error(
f"Failed to refresh Discord tokens for connector {connector.id}: {e!s}",
exc_info=True,
)
raise HTTPException(
status_code=500, detail=f"Failed to refresh Discord tokens: {e!s}"
) from e

View file

@ -2,7 +2,6 @@ import os
os.environ["OAUTHLIB_RELAX_TOKEN_SCOPE"] = "1"
import base64
import json
import logging
from uuid import UUID
@ -13,9 +12,9 @@ from google_auth_oauthlib.flow import Flow
from pydantic import ValidationError
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.config import config
from app.connectors.google_gmail_connector import fetch_google_user_email
from app.db import (
SearchSourceConnector,
SearchSourceConnectorType,
@ -23,6 +22,11 @@ from app.db import (
get_async_session,
)
from app.users import current_active_user
from app.utils.connector_naming import (
check_duplicate_connector,
generate_unique_connector_name,
)
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
logger = logging.getLogger(__name__)
@ -31,6 +35,30 @@ router = APIRouter()
SCOPES = ["https://www.googleapis.com/auth/calendar.readonly"]
REDIRECT_URI = config.GOOGLE_CALENDAR_REDIRECT_URI
# Initialize security utilities
_state_manager = None
_token_encryption = None
def get_state_manager() -> OAuthStateManager:
"""Get or create OAuth state manager instance."""
global _state_manager
if _state_manager is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for OAuth security")
_state_manager = OAuthStateManager(config.SECRET_KEY)
return _state_manager
def get_token_encryption() -> TokenEncryption:
"""Get or create token encryption instance."""
global _token_encryption
if _token_encryption is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for token encryption")
_token_encryption = TokenEncryption(config.SECRET_KEY)
return _token_encryption
def get_google_flow():
try:
@ -59,16 +87,16 @@ async def connect_calendar(space_id: int, user: User = Depends(current_active_us
if not space_id:
raise HTTPException(status_code=400, detail="space_id is required")
if not config.SECRET_KEY:
raise HTTPException(
status_code=500, detail="SECRET_KEY not configured for OAuth security."
)
flow = get_google_flow()
# Encode space_id and user_id in state
state_payload = json.dumps(
{
"space_id": space_id,
"user_id": str(user.id),
}
)
state_encoded = base64.urlsafe_b64encode(state_payload.encode()).decode()
# Generate secure state parameter with HMAC signature
state_manager = get_state_manager()
state_encoded = state_manager.generate_secure_state(space_id, user.id)
auth_url, _ = flow.authorization_url(
access_type="offline",
@ -86,42 +114,116 @@ async def connect_calendar(space_id: int, user: User = Depends(current_active_us
@router.get("/auth/google/calendar/connector/callback")
async def calendar_callback(
request: Request,
code: str,
state: str,
code: str | None = None,
error: str | None = None,
state: str | None = None,
session: AsyncSession = Depends(get_async_session),
):
try:
# Decode and parse the state
decoded_state = base64.urlsafe_b64decode(state.encode()).decode()
data = json.loads(decoded_state)
# Handle OAuth errors (e.g., user denied access)
if error:
logger.warning(f"Google Calendar OAuth error: {error}")
# Try to decode state to get space_id for redirect, but don't fail if it's invalid
space_id = None
if state:
try:
state_manager = get_state_manager()
data = state_manager.validate_state(state)
space_id = data.get("space_id")
except Exception:
# If state is invalid, we'll redirect without space_id
logger.warning("Failed to validate state in error handler")
# Redirect to frontend with error parameter
if space_id:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=google_calendar_oauth_denied"
)
else:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=google_calendar_oauth_denied"
)
# Validate required parameters for successful flow
if not code:
raise HTTPException(status_code=400, detail="Missing authorization code")
if not state:
raise HTTPException(status_code=400, detail="Missing state parameter")
# Validate and decode state with signature verification
state_manager = get_state_manager()
try:
data = state_manager.validate_state(state)
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=400, detail=f"Invalid state parameter: {e!s}"
) from e
user_id = UUID(data["user_id"])
space_id = data["space_id"]
# Validate redirect URI (security: ensure it matches configured value)
if not config.GOOGLE_CALENDAR_REDIRECT_URI:
raise HTTPException(
status_code=500, detail="GOOGLE_CALENDAR_REDIRECT_URI not configured"
)
flow = get_google_flow()
flow.fetch_token(code=code)
creds = flow.credentials
creds_dict = json.loads(creds.to_json())
try:
# Check if a connector with the same type already exists for this search space and user
result = await session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR,
)
# Fetch user email
user_email = fetch_google_user_email(creds)
# Encrypt sensitive credentials before storing
token_encryption = get_token_encryption()
# Encrypt sensitive fields: token, refresh_token, client_secret
if creds_dict.get("token"):
creds_dict["token"] = token_encryption.encrypt_token(creds_dict["token"])
if creds_dict.get("refresh_token"):
creds_dict["refresh_token"] = token_encryption.encrypt_token(
creds_dict["refresh_token"]
)
if creds_dict.get("client_secret"):
creds_dict["client_secret"] = token_encryption.encrypt_token(
creds_dict["client_secret"]
)
# Mark that credentials are encrypted for backward compatibility
creds_dict["_token_encrypted"] = True
# Check for duplicate connector (same account already connected)
is_duplicate = await check_duplicate_connector(
session,
SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR,
space_id,
user_id,
user_email,
)
if is_duplicate:
logger.warning(
f"Duplicate Google Calendar connector detected for user {user_id} with email {user_email}"
)
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=google-calendar-connector"
)
try:
# Generate a unique, user-friendly connector name
connector_name = await generate_unique_connector_name(
session,
SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR,
space_id,
user_id,
user_email,
)
existing_connector = result.scalars().first()
if existing_connector:
raise HTTPException(
status_code=409,
detail="A GOOGLE_CALENDAR_CONNECTOR connector already exists in this search space. Each search space can have only one connector of each type per user.",
)
db_connector = SearchSourceConnector(
name="Google Calendar Connector",
name=connector_name,
connector_type=SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR,
config=creds_dict,
search_space_id=space_id,
@ -134,7 +236,7 @@ async def calendar_callback(
# Redirect to the frontend with success params for indexing config
# Using query params to auto-open the popup with config view on new-chat page
return RedirectResponse(
f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-calendar-connector"
f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-calendar-connector&connectorId={db_connector.id}"
)
except ValidationError as e:
await session.rollback()
@ -145,7 +247,7 @@ async def calendar_callback(
await session.rollback()
raise HTTPException(
status_code=409,
detail=f"Integrity error: A connector with this type already exists. {e!s}",
detail=f"Database integrity error: {e!s}",
) from e
except HTTPException:
await session.rollback()

View file

@ -10,7 +10,6 @@ Endpoints:
- GET /connectors/{connector_id}/google-drive/folders - List user's folders (for index-time selection)
"""
import base64
import json
import logging
import os
@ -30,6 +29,7 @@ from app.connectors.google_drive import (
get_start_page_token,
list_folder_contents,
)
from app.connectors.google_gmail_connector import fetch_google_user_email
from app.db import (
SearchSourceConnector,
SearchSourceConnectorType,
@ -37,6 +37,11 @@ from app.db import (
get_async_session,
)
from app.users import current_active_user
from app.utils.connector_naming import (
check_duplicate_connector,
generate_unique_connector_name,
)
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
# Relax token scope validation for Google OAuth
os.environ["OAUTHLIB_RELAX_TOKEN_SCOPE"] = "1"
@ -44,6 +49,31 @@ os.environ["OAUTHLIB_RELAX_TOKEN_SCOPE"] = "1"
logger = logging.getLogger(__name__)
router = APIRouter()
# Initialize security utilities
_state_manager = None
_token_encryption = None
def get_state_manager() -> OAuthStateManager:
"""Get or create OAuth state manager instance."""
global _state_manager
if _state_manager is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for OAuth security")
_state_manager = OAuthStateManager(config.SECRET_KEY)
return _state_manager
def get_token_encryption() -> TokenEncryption:
"""Get or create token encryption instance."""
global _token_encryption
if _token_encryption is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for token encryption")
_token_encryption = TokenEncryption(config.SECRET_KEY)
return _token_encryption
# Google Drive OAuth scopes
SCOPES = [
"https://www.googleapis.com/auth/drive.readonly", # Read-only access to Drive
@ -90,16 +120,16 @@ async def connect_drive(space_id: int, user: User = Depends(current_active_user)
if not space_id:
raise HTTPException(status_code=400, detail="space_id is required")
if not config.SECRET_KEY:
raise HTTPException(
status_code=500, detail="SECRET_KEY not configured for OAuth security."
)
flow = get_google_flow()
# Encode space_id and user_id in state parameter
state_payload = json.dumps(
{
"space_id": space_id,
"user_id": str(user.id),
}
)
state_encoded = base64.urlsafe_b64encode(state_payload.encode()).decode()
# Generate secure state parameter with HMAC signature
state_manager = get_state_manager()
state_encoded = state_manager.generate_secure_state(space_id, user.id)
# Generate authorization URL
auth_url, _ = flow.authorization_url(
@ -124,8 +154,9 @@ async def connect_drive(space_id: int, user: User = Depends(current_active_user)
@router.get("/auth/google/drive/connector/callback")
async def drive_callback(
request: Request,
code: str,
state: str,
code: str | None = None,
error: str | None = None,
state: str | None = None,
session: AsyncSession = Depends(get_async_session),
):
"""
@ -133,15 +164,53 @@ async def drive_callback(
Query params:
code: Authorization code from Google
error: OAuth error (if user denied access)
state: Encoded state with space_id and user_id
Returns:
Redirect to frontend success page
"""
try:
# Decode and parse state
decoded_state = base64.urlsafe_b64decode(state.encode()).decode()
data = json.loads(decoded_state)
# Handle OAuth errors (e.g., user denied access)
if error:
logger.warning(f"Google Drive OAuth error: {error}")
# Try to decode state to get space_id for redirect, but don't fail if it's invalid
space_id = None
if state:
try:
state_manager = get_state_manager()
data = state_manager.validate_state(state)
space_id = data.get("space_id")
except Exception:
# If state is invalid, we'll redirect without space_id
logger.warning("Failed to validate state in error handler")
# Redirect to frontend with error parameter
if space_id:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=google_drive_oauth_denied"
)
else:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=google_drive_oauth_denied"
)
# Validate required parameters for successful flow
if not code:
raise HTTPException(status_code=400, detail="Missing authorization code")
if not state:
raise HTTPException(status_code=400, detail="Missing state parameter")
# Validate and decode state with signature verification
state_manager = get_state_manager()
try:
data = state_manager.validate_state(state)
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=400, detail=f"Invalid state parameter: {e!s}"
) from e
user_id = UUID(data["user_id"])
space_id = data["space_id"]
@ -150,6 +219,12 @@ async def drive_callback(
f"Processing Google Drive callback for user {user_id}, space {space_id}"
)
# Validate redirect URI (security: ensure it matches configured value)
if not config.GOOGLE_DRIVE_REDIRECT_URI:
raise HTTPException(
status_code=500, detail="GOOGLE_DRIVE_REDIRECT_URI not configured"
)
# Exchange authorization code for tokens
flow = get_google_flow()
flow.fetch_token(code=code)
@ -157,26 +232,54 @@ async def drive_callback(
creds = flow.credentials
creds_dict = json.loads(creds.to_json())
# Check if connector already exists for this space/user
result = await session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR,
# Fetch user email
user_email = fetch_google_user_email(creds)
# Encrypt sensitive credentials before storing
token_encryption = get_token_encryption()
# Encrypt sensitive fields: token, refresh_token, client_secret
if creds_dict.get("token"):
creds_dict["token"] = token_encryption.encrypt_token(creds_dict["token"])
if creds_dict.get("refresh_token"):
creds_dict["refresh_token"] = token_encryption.encrypt_token(
creds_dict["refresh_token"]
)
if creds_dict.get("client_secret"):
creds_dict["client_secret"] = token_encryption.encrypt_token(
creds_dict["client_secret"]
)
# Mark that credentials are encrypted for backward compatibility
creds_dict["_token_encrypted"] = True
# Check for duplicate connector (same account already connected)
is_duplicate = await check_duplicate_connector(
session,
SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR,
space_id,
user_id,
user_email,
)
existing_connector = result.scalars().first()
if existing_connector:
raise HTTPException(
status_code=409,
detail="A GOOGLE_DRIVE_CONNECTOR already exists in this search space. Each search space can have only one connector of each type per user.",
if is_duplicate:
logger.warning(
f"Duplicate Google Drive connector detected for user {user_id} with email {user_email}"
)
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=google-drive-connector"
)
# Create new connector (NO folder selection here - happens at index time)
# Generate a unique, user-friendly connector name
connector_name = await generate_unique_connector_name(
session,
SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR,
space_id,
user_id,
user_email,
)
db_connector = SearchSourceConnector(
name="Google Drive Connector",
name=connector_name,
connector_type=SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR,
config={
**creds_dict,
@ -213,7 +316,7 @@ async def drive_callback(
)
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-drive-connector"
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-drive-connector&connectorId={db_connector.id}"
)
except HTTPException:
@ -230,7 +333,7 @@ async def drive_callback(
logger.error(f"Database integrity error: {e!s}", exc_info=True)
raise HTTPException(
status_code=409,
detail="A connector with this configuration already exists.",
detail=f"Database integrity error: {e!s}",
) from e
except Exception as e:
await session.rollback()

View file

@ -2,7 +2,6 @@ import os
os.environ["OAUTHLIB_RELAX_TOKEN_SCOPE"] = "1"
import base64
import json
import logging
from uuid import UUID
@ -13,9 +12,9 @@ from google_auth_oauthlib.flow import Flow
from pydantic import ValidationError
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.config import config
from app.connectors.google_gmail_connector import fetch_google_user_email
from app.db import (
SearchSourceConnector,
SearchSourceConnectorType,
@ -23,51 +22,94 @@ from app.db import (
get_async_session,
)
from app.users import current_active_user
from app.utils.connector_naming import (
check_duplicate_connector,
generate_unique_connector_name,
)
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
logger = logging.getLogger(__name__)
router = APIRouter()
# Initialize security utilities
_state_manager = None
_token_encryption = None
def get_state_manager() -> OAuthStateManager:
"""Get or create OAuth state manager instance."""
global _state_manager
if _state_manager is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for OAuth security")
_state_manager = OAuthStateManager(config.SECRET_KEY)
return _state_manager
def get_token_encryption() -> TokenEncryption:
"""Get or create token encryption instance."""
global _token_encryption
if _token_encryption is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for token encryption")
_token_encryption = TokenEncryption(config.SECRET_KEY)
return _token_encryption
def get_google_flow():
"""Create and return a Google OAuth flow for Gmail API."""
flow = Flow.from_client_config(
{
"web": {
"client_id": config.GOOGLE_OAUTH_CLIENT_ID,
"client_secret": config.GOOGLE_OAUTH_CLIENT_SECRET,
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"redirect_uris": [config.GOOGLE_GMAIL_REDIRECT_URI],
}
},
scopes=[
"https://www.googleapis.com/auth/gmail.readonly",
"https://www.googleapis.com/auth/userinfo.email",
"https://www.googleapis.com/auth/userinfo.profile",
"openid",
],
)
flow.redirect_uri = config.GOOGLE_GMAIL_REDIRECT_URI
return flow
try:
flow = Flow.from_client_config(
{
"web": {
"client_id": config.GOOGLE_OAUTH_CLIENT_ID,
"client_secret": config.GOOGLE_OAUTH_CLIENT_SECRET,
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"redirect_uris": [config.GOOGLE_GMAIL_REDIRECT_URI],
}
},
scopes=[
"https://www.googleapis.com/auth/gmail.readonly",
"https://www.googleapis.com/auth/userinfo.email",
"https://www.googleapis.com/auth/userinfo.profile",
"openid",
],
)
flow.redirect_uri = config.GOOGLE_GMAIL_REDIRECT_URI
return flow
except Exception as e:
raise HTTPException(
status_code=500, detail=f"Failed to create Google flow: {e!s}"
) from e
@router.get("/auth/google/gmail/connector/add")
async def connect_gmail(space_id: int, user: User = Depends(current_active_user)):
"""
Initiate Google Gmail OAuth flow.
Query params:
space_id: Search space ID to add connector to
Returns:
JSON with auth_url to redirect user to Google authorization
"""
try:
if not space_id:
raise HTTPException(status_code=400, detail="space_id is required")
if not config.SECRET_KEY:
raise HTTPException(
status_code=500, detail="SECRET_KEY not configured for OAuth security."
)
flow = get_google_flow()
# Encode space_id and user_id in state
state_payload = json.dumps(
{
"space_id": space_id,
"user_id": str(user.id),
}
)
state_encoded = base64.urlsafe_b64encode(state_payload.encode()).decode()
# Generate secure state parameter with HMAC signature
state_manager = get_state_manager()
state_encoded = state_manager.generate_secure_state(space_id, user.id)
auth_url, _ = flow.authorization_url(
access_type="offline",
@ -75,8 +117,13 @@ async def connect_gmail(space_id: int, user: User = Depends(current_active_user)
include_granted_scopes="true",
state=state_encoded,
)
logger.info(
f"Initiating Google Gmail OAuth for user {user.id}, space {space_id}"
)
return {"auth_url": auth_url}
except Exception as e:
logger.error(f"Failed to initiate Google Gmail OAuth: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to initiate Google OAuth: {e!s}"
) from e
@ -85,42 +132,129 @@ async def connect_gmail(space_id: int, user: User = Depends(current_active_user)
@router.get("/auth/google/gmail/connector/callback")
async def gmail_callback(
request: Request,
code: str,
state: str,
code: str | None = None,
error: str | None = None,
state: str | None = None,
session: AsyncSession = Depends(get_async_session),
):
"""
Handle Google Gmail OAuth callback.
Args:
request: FastAPI request object
code: Authorization code from Google (if user granted access)
error: Error code from Google (if user denied access or error occurred)
state: State parameter containing user/space info
session: Database session
Returns:
Redirect response to frontend
"""
try:
# Decode and parse the state
decoded_state = base64.urlsafe_b64decode(state.encode()).decode()
data = json.loads(decoded_state)
# Handle OAuth errors (e.g., user denied access)
if error:
logger.warning(f"Google Gmail OAuth error: {error}")
# Try to decode state to get space_id for redirect, but don't fail if it's invalid
space_id = None
if state:
try:
state_manager = get_state_manager()
data = state_manager.validate_state(state)
space_id = data.get("space_id")
except Exception:
# If state is invalid, we'll redirect without space_id
logger.warning("Failed to validate state in error handler")
# Redirect to frontend with error parameter
if space_id:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=google_gmail_oauth_denied"
)
else:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=google_gmail_oauth_denied"
)
# Validate required parameters for successful flow
if not code:
raise HTTPException(status_code=400, detail="Missing authorization code")
if not state:
raise HTTPException(status_code=400, detail="Missing state parameter")
# Validate and decode state with signature verification
state_manager = get_state_manager()
try:
data = state_manager.validate_state(state)
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=400, detail=f"Invalid state parameter: {e!s}"
) from e
user_id = UUID(data["user_id"])
space_id = data["space_id"]
# Validate redirect URI (security: ensure it matches configured value)
if not config.GOOGLE_GMAIL_REDIRECT_URI:
raise HTTPException(
status_code=500, detail="GOOGLE_GMAIL_REDIRECT_URI not configured"
)
flow = get_google_flow()
flow.fetch_token(code=code)
creds = flow.credentials
creds_dict = json.loads(creds.to_json())
try:
# Check if a connector with the same type already exists for this search space and user
result = await session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR,
)
# Fetch user email
user_email = fetch_google_user_email(creds)
# Encrypt sensitive credentials before storing
token_encryption = get_token_encryption()
# Encrypt sensitive fields: token, refresh_token, client_secret
if creds_dict.get("token"):
creds_dict["token"] = token_encryption.encrypt_token(creds_dict["token"])
if creds_dict.get("refresh_token"):
creds_dict["refresh_token"] = token_encryption.encrypt_token(
creds_dict["refresh_token"]
)
if creds_dict.get("client_secret"):
creds_dict["client_secret"] = token_encryption.encrypt_token(
creds_dict["client_secret"]
)
# Mark that credentials are encrypted for backward compatibility
creds_dict["_token_encrypted"] = True
# Check for duplicate connector (same account already connected)
is_duplicate = await check_duplicate_connector(
session,
SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR,
space_id,
user_id,
user_email,
)
if is_duplicate:
logger.warning(
f"Duplicate Gmail connector detected for user {user_id} with email {user_email}"
)
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=google-gmail-connector"
)
try:
# Generate a unique, user-friendly connector name
connector_name = await generate_unique_connector_name(
session,
SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR,
space_id,
user_id,
user_email,
)
existing_connector = result.scalars().first()
if existing_connector:
raise HTTPException(
status_code=409,
detail="A GOOGLE_GMAIL_CONNECTOR connector already exists in this search space. Each search space can have only one connector of each type per user.",
)
db_connector = SearchSourceConnector(
name="Google Gmail Connector",
name=connector_name,
connector_type=SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR,
config=creds_dict,
search_space_id=space_id,
@ -138,7 +272,7 @@ async def gmail_callback(
# Redirect to the frontend with success params for indexing config
# Using query params to auto-open the popup with config view on new-chat page
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-gmail-connector"
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=google-gmail-connector&connectorId={db_connector.id}"
)
except IntegrityError as e:
@ -146,7 +280,7 @@ async def gmail_callback(
logger.error(f"Database integrity error: {e!s}")
raise HTTPException(
status_code=409,
detail="A connector with this configuration already exists.",
detail=f"Database integrity error: {e!s}",
) from e
except ValidationError as e:
await session.rollback()
@ -160,3 +294,6 @@ async def gmail_callback(
raise
except Exception as e:
logger.error(f"Unexpected error in Gmail callback: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to complete Google Gmail OAuth: {e!s}"
) from e

View file

@ -0,0 +1,510 @@
"""
Jira Connector OAuth Routes.
Handles OAuth 2.0 authentication flow for Jira connector.
Uses Atlassian OAuth 2.0 (3LO) with accessible-resources API to discover Jira instances.
"""
import logging
from datetime import UTC, datetime, timedelta
from uuid import UUID
import httpx
from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi.responses import RedirectResponse
from pydantic import ValidationError
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config
from app.db import (
SearchSourceConnector,
SearchSourceConnectorType,
User,
get_async_session,
)
from app.schemas.atlassian_auth_credentials import AtlassianAuthCredentialsBase
from app.users import current_active_user
from app.utils.connector_naming import (
check_duplicate_connector,
extract_identifier_from_credentials,
generate_unique_connector_name,
)
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
logger = logging.getLogger(__name__)
router = APIRouter()
# Atlassian OAuth endpoints
AUTHORIZATION_URL = "https://auth.atlassian.com/authorize"
TOKEN_URL = "https://auth.atlassian.com/oauth/token"
ACCESSIBLE_RESOURCES_URL = "https://api.atlassian.com/oauth/token/accessible-resources"
# OAuth scopes for Jira
SCOPES = [
"read:jira-work",
"read:jira-user",
"offline_access", # Required for refresh tokens
]
# Initialize security utilities
_state_manager = None
_token_encryption = None
def get_state_manager() -> OAuthStateManager:
"""Get or create OAuth state manager instance."""
global _state_manager
if _state_manager is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for OAuth security")
_state_manager = OAuthStateManager(config.SECRET_KEY)
return _state_manager
def get_token_encryption() -> TokenEncryption:
"""Get or create token encryption instance."""
global _token_encryption
if _token_encryption is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for token encryption")
_token_encryption = TokenEncryption(config.SECRET_KEY)
return _token_encryption
@router.get("/auth/jira/connector/add")
async def connect_jira(space_id: int, user: User = Depends(current_active_user)):
"""
Initiate Jira OAuth flow.
Args:
space_id: The search space ID
user: Current authenticated user
Returns:
Authorization URL for redirect
"""
try:
if not space_id:
raise HTTPException(status_code=400, detail="space_id is required")
if not config.ATLASSIAN_CLIENT_ID:
raise HTTPException(
status_code=500, detail="Atlassian OAuth not configured."
)
if not config.SECRET_KEY:
raise HTTPException(
status_code=500, detail="SECRET_KEY not configured for OAuth security."
)
# Generate secure state parameter with HMAC signature
state_manager = get_state_manager()
state_encoded = state_manager.generate_secure_state(space_id, user.id)
# Build authorization URL
from urllib.parse import urlencode
auth_params = {
"audience": "api.atlassian.com",
"client_id": config.ATLASSIAN_CLIENT_ID,
"scope": " ".join(SCOPES),
"redirect_uri": config.JIRA_REDIRECT_URI,
"state": state_encoded,
"response_type": "code",
"prompt": "consent", # Force consent screen to get refresh token
}
auth_url = f"{AUTHORIZATION_URL}?{urlencode(auth_params)}"
logger.info(f"Generated Jira OAuth URL for user {user.id}, space {space_id}")
return {"auth_url": auth_url}
except Exception as e:
logger.error(f"Failed to initiate Jira OAuth: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to initiate Jira OAuth: {e!s}"
) from e
@router.get("/auth/jira/connector/callback")
async def jira_callback(
request: Request,
code: str | None = None,
error: str | None = None,
state: str | None = None,
session: AsyncSession = Depends(get_async_session),
):
"""
Handle Jira OAuth callback.
Args:
request: FastAPI request object
code: Authorization code from Atlassian (if user granted access)
error: Error code from Atlassian (if user denied access or error occurred)
state: State parameter containing user/space info
session: Database session
Returns:
Redirect response to frontend
"""
try:
# Handle OAuth errors (e.g., user denied access)
if error:
logger.warning(f"Jira OAuth error: {error}")
# Try to decode state to get space_id for redirect, but don't fail if it's invalid
space_id = None
if state:
try:
state_manager = get_state_manager()
data = state_manager.validate_state(state)
space_id = data.get("space_id")
except Exception:
# If state is invalid, we'll redirect without space_id
logger.warning("Failed to validate state in error handler")
# Redirect to frontend with error parameter
if space_id:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=jira_oauth_denied"
)
else:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=jira_oauth_denied"
)
# Validate required parameters for successful flow
if not code:
raise HTTPException(status_code=400, detail="Missing authorization code")
if not state:
raise HTTPException(status_code=400, detail="Missing state parameter")
# Validate and decode state with signature verification
state_manager = get_state_manager()
try:
data = state_manager.validate_state(state)
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=400, detail=f"Invalid state parameter: {e!s}"
) from e
user_id = UUID(data["user_id"])
space_id = data["space_id"]
# Validate redirect URI (security: ensure it matches configured value)
if not config.JIRA_REDIRECT_URI:
raise HTTPException(
status_code=500, detail="JIRA_REDIRECT_URI not configured"
)
# Exchange authorization code for access token
token_data = {
"grant_type": "authorization_code",
"client_id": config.ATLASSIAN_CLIENT_ID,
"client_secret": config.ATLASSIAN_CLIENT_SECRET,
"code": code,
"redirect_uri": config.JIRA_REDIRECT_URI,
}
async with httpx.AsyncClient() as client:
token_response = await client.post(
TOKEN_URL,
data=token_data,
headers={"Content-Type": "application/x-www-form-urlencoded"},
timeout=30.0,
)
if token_response.status_code != 200:
error_detail = token_response.text
try:
error_json = token_response.json()
error_detail = error_json.get(
"error_description", error_json.get("error", error_detail)
)
except Exception:
pass
raise HTTPException(
status_code=400, detail=f"Token exchange failed: {error_detail}"
)
token_json = token_response.json()
# Encrypt sensitive tokens before storing
token_encryption = get_token_encryption()
access_token = token_json.get("access_token")
refresh_token = token_json.get("refresh_token")
if not access_token:
raise HTTPException(
status_code=400, detail="No access token received from Atlassian"
)
# Fetch accessible resources to get Jira instance information
async with httpx.AsyncClient() as client:
resources_response = await client.get(
ACCESSIBLE_RESOURCES_URL,
headers={"Authorization": f"Bearer {access_token}"},
timeout=30.0,
)
if resources_response.status_code != 200:
error_detail = resources_response.text
logger.error(f"Failed to fetch accessible resources: {error_detail}")
raise HTTPException(
status_code=400,
detail=f"Failed to fetch Jira instances: {error_detail}",
)
resources = resources_response.json()
# Filter for Jira instances (resources with type "jira" or id field)
jira_instances = [
r for r in resources if r.get("id") and (r.get("name") or r.get("url"))
]
if not jira_instances:
raise HTTPException(
status_code=400,
detail="No accessible Jira instances found. Please ensure you have access to at least one Jira instance.",
)
# For now, use the first Jira instance
# TODO: Support multiple instances by letting user choose during OAuth
jira_instance = jira_instances[0]
cloud_id = jira_instance["id"]
base_url = jira_instance.get("url")
# If URL is not provided, construct it from cloud_id
if not base_url:
# Try to extract from name or construct default format
instance_name = jira_instance.get("name", "").lower().replace(" ", "")
if instance_name:
base_url = f"https://{instance_name}.atlassian.net"
else:
# Fallback: use cloud_id directly (though this may not work)
base_url = f"https://{cloud_id}.atlassian.net"
# Calculate expiration time (UTC, tz-aware)
expires_at = None
expires_in = token_json.get("expires_in")
if expires_in:
now_utc = datetime.now(UTC)
expires_at = now_utc + timedelta(seconds=int(expires_in))
# Store the encrypted access token and refresh token in connector config
connector_config = {
"access_token": token_encryption.encrypt_token(access_token),
"refresh_token": token_encryption.encrypt_token(refresh_token)
if refresh_token
else None,
"token_type": token_json.get("token_type", "Bearer"),
"expires_in": expires_in,
"expires_at": expires_at.isoformat() if expires_at else None,
"scope": token_json.get("scope"),
"cloud_id": cloud_id,
"base_url": base_url.rstrip("/") if base_url else None,
# Mark that tokens are encrypted for backward compatibility
"_token_encrypted": True,
}
# Extract unique identifier from connector credentials
connector_identifier = extract_identifier_from_credentials(
SearchSourceConnectorType.JIRA_CONNECTOR, connector_config
)
# Check for duplicate connector (same Jira instance already connected)
is_duplicate = await check_duplicate_connector(
session,
SearchSourceConnectorType.JIRA_CONNECTOR,
space_id,
user_id,
connector_identifier,
)
if is_duplicate:
logger.warning(
f"Duplicate Jira connector detected for user {user_id} with instance {connector_identifier}"
)
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=jira-connector"
)
# Generate a unique, user-friendly connector name
connector_name = await generate_unique_connector_name(
session,
SearchSourceConnectorType.JIRA_CONNECTOR,
space_id,
user_id,
connector_identifier,
)
# Create new connector
new_connector = SearchSourceConnector(
name=connector_name,
connector_type=SearchSourceConnectorType.JIRA_CONNECTOR,
is_indexable=True,
config=connector_config,
search_space_id=space_id,
user_id=user_id,
)
session.add(new_connector)
logger.info(
f"Created new Jira connector for user {user_id} in space {space_id}"
)
try:
await session.commit()
logger.info(f"Successfully saved Jira connector for user {user_id}")
# Redirect to the frontend with success params
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=jira-connector&connectorId={new_connector.id}"
)
except ValidationError as e:
await session.rollback()
raise HTTPException(
status_code=422, detail=f"Validation error: {e!s}"
) from e
except IntegrityError as e:
await session.rollback()
raise HTTPException(
status_code=409,
detail=f"Database integrity error: {e!s}",
) from e
except Exception as e:
logger.error(f"Failed to create search source connector: {e!s}")
await session.rollback()
raise HTTPException(
status_code=500,
detail=f"Failed to create search source connector: {e!s}",
) from e
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to complete Jira OAuth: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to complete Jira OAuth: {e!s}"
) from e
async def refresh_jira_token(
session: AsyncSession, connector: SearchSourceConnector
) -> SearchSourceConnector:
"""
Refresh the Jira access token for a connector.
Args:
session: Database session
connector: Jira connector to refresh
Returns:
Updated connector object
"""
try:
logger.info(f"Refreshing Jira token for connector {connector.id}")
credentials = AtlassianAuthCredentialsBase.from_dict(connector.config)
# Decrypt tokens if they are encrypted
token_encryption = get_token_encryption()
is_encrypted = connector.config.get("_token_encrypted", False)
refresh_token = credentials.refresh_token
if is_encrypted and refresh_token:
try:
refresh_token = token_encryption.decrypt_token(refresh_token)
except Exception as e:
logger.error(f"Failed to decrypt refresh token: {e!s}")
raise HTTPException(
status_code=500, detail="Failed to decrypt stored refresh token"
) from e
if not refresh_token:
raise HTTPException(
status_code=400,
detail="No refresh token available. Please re-authenticate.",
)
# Prepare token refresh data
refresh_data = {
"grant_type": "refresh_token",
"client_id": config.ATLASSIAN_CLIENT_ID,
"client_secret": config.ATLASSIAN_CLIENT_SECRET,
"refresh_token": refresh_token,
}
async with httpx.AsyncClient() as client:
token_response = await client.post(
TOKEN_URL,
data=refresh_data,
headers={"Content-Type": "application/x-www-form-urlencoded"},
timeout=30.0,
)
if token_response.status_code != 200:
error_detail = token_response.text
try:
error_json = token_response.json()
error_detail = error_json.get(
"error_description", error_json.get("error", error_detail)
)
except Exception:
pass
raise HTTPException(
status_code=400, detail=f"Token refresh failed: {error_detail}"
)
token_json = token_response.json()
# Calculate expiration time (UTC, tz-aware)
expires_at = None
expires_in = token_json.get("expires_in")
if expires_in:
now_utc = datetime.now(UTC)
expires_at = now_utc + timedelta(seconds=int(expires_in))
# Encrypt new tokens before storing
access_token = token_json.get("access_token")
new_refresh_token = token_json.get("refresh_token")
if not access_token:
raise HTTPException(
status_code=400, detail="No access token received from Jira refresh"
)
# Update credentials object with encrypted tokens
credentials.access_token = token_encryption.encrypt_token(access_token)
if new_refresh_token:
credentials.refresh_token = token_encryption.encrypt_token(
new_refresh_token
)
credentials.expires_in = expires_in
credentials.expires_at = expires_at
credentials.scope = token_json.get("scope")
# Preserve cloud_id and base_url
if not credentials.cloud_id:
credentials.cloud_id = connector.config.get("cloud_id")
if not credentials.base_url:
credentials.base_url = connector.config.get("base_url")
# Update connector config with encrypted tokens
credentials_dict = credentials.to_dict()
credentials_dict["_token_encrypted"] = True
connector.config = credentials_dict
await session.commit()
await session.refresh(connector)
logger.info(f"Successfully refreshed Jira token for connector {connector.id}")
return connector
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to refresh Jira token: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to refresh Jira token: {e!s}"
) from e

View file

@ -0,0 +1,459 @@
"""
Linear Connector OAuth Routes.
Handles OAuth 2.0 authentication flow for Linear connector.
"""
import logging
from datetime import UTC, datetime, timedelta
from uuid import UUID
import httpx
from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi.responses import RedirectResponse
from pydantic import ValidationError
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config
from app.connectors.linear_connector import fetch_linear_organization_name
from app.db import (
SearchSourceConnector,
SearchSourceConnectorType,
User,
get_async_session,
)
from app.schemas.linear_auth_credentials import LinearAuthCredentialsBase
from app.users import current_active_user
from app.utils.connector_naming import (
check_duplicate_connector,
generate_unique_connector_name,
)
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
logger = logging.getLogger(__name__)
router = APIRouter()
# Linear OAuth endpoints
AUTHORIZATION_URL = "https://linear.app/oauth/authorize"
TOKEN_URL = "https://api.linear.app/oauth/token"
# OAuth scopes for Linear
SCOPES = ["read", "write"]
# Initialize security utilities
_state_manager = None
_token_encryption = None
def get_state_manager() -> OAuthStateManager:
"""Get or create OAuth state manager instance."""
global _state_manager
if _state_manager is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for OAuth security")
_state_manager = OAuthStateManager(config.SECRET_KEY)
return _state_manager
def get_token_encryption() -> TokenEncryption:
"""Get or create token encryption instance."""
global _token_encryption
if _token_encryption is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for token encryption")
_token_encryption = TokenEncryption(config.SECRET_KEY)
return _token_encryption
def make_basic_auth_header(client_id: str, client_secret: str) -> str:
"""Create Basic Auth header for Linear OAuth."""
import base64
credentials = f"{client_id}:{client_secret}".encode()
b64 = base64.b64encode(credentials).decode("ascii")
return f"Basic {b64}"
@router.get("/auth/linear/connector/add")
async def connect_linear(space_id: int, user: User = Depends(current_active_user)):
"""
Initiate Linear OAuth flow.
Args:
space_id: The search space ID
user: Current authenticated user
Returns:
Authorization URL for redirect
"""
try:
if not space_id:
raise HTTPException(status_code=400, detail="space_id is required")
if not config.LINEAR_CLIENT_ID:
raise HTTPException(status_code=500, detail="Linear OAuth not configured.")
if not config.SECRET_KEY:
raise HTTPException(
status_code=500, detail="SECRET_KEY not configured for OAuth security."
)
# Generate secure state parameter with HMAC signature
state_manager = get_state_manager()
state_encoded = state_manager.generate_secure_state(space_id, user.id)
# Build authorization URL
from urllib.parse import urlencode
auth_params = {
"client_id": config.LINEAR_CLIENT_ID,
"response_type": "code",
"redirect_uri": config.LINEAR_REDIRECT_URI,
"scope": " ".join(SCOPES),
"state": state_encoded,
}
auth_url = f"{AUTHORIZATION_URL}?{urlencode(auth_params)}"
logger.info(f"Generated Linear OAuth URL for user {user.id}, space {space_id}")
return {"auth_url": auth_url}
except Exception as e:
logger.error(f"Failed to initiate Linear OAuth: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to initiate Linear OAuth: {e!s}"
) from e
@router.get("/auth/linear/connector/callback")
async def linear_callback(
request: Request,
code: str | None = None,
error: str | None = None,
state: str | None = None,
session: AsyncSession = Depends(get_async_session),
):
"""
Handle Linear OAuth callback.
Args:
request: FastAPI request object
code: Authorization code from Linear (if user granted access)
error: Error code from Linear (if user denied access or error occurred)
state: State parameter containing user/space info
session: Database session
Returns:
Redirect response to frontend
"""
try:
# Handle OAuth errors (e.g., user denied access)
if error:
logger.warning(f"Linear OAuth error: {error}")
# Try to decode state to get space_id for redirect, but don't fail if it's invalid
space_id = None
if state:
try:
state_manager = get_state_manager()
data = state_manager.validate_state(state)
space_id = data.get("space_id")
except Exception:
# If state is invalid, we'll redirect without space_id
logger.warning("Failed to validate state in error handler")
# Redirect to frontend with error parameter
if space_id:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=linear_oauth_denied"
)
else:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=linear_oauth_denied"
)
# Validate required parameters for successful flow
if not code:
raise HTTPException(status_code=400, detail="Missing authorization code")
if not state:
raise HTTPException(status_code=400, detail="Missing state parameter")
# Validate and decode state with signature verification
state_manager = get_state_manager()
try:
data = state_manager.validate_state(state)
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=400, detail=f"Invalid state parameter: {e!s}"
) from e
user_id = UUID(data["user_id"])
space_id = data["space_id"]
# Validate redirect URI (security: ensure it matches configured value)
if not config.LINEAR_REDIRECT_URI:
raise HTTPException(
status_code=500, detail="LINEAR_REDIRECT_URI not configured"
)
# Exchange authorization code for access token
auth_header = make_basic_auth_header(
config.LINEAR_CLIENT_ID, config.LINEAR_CLIENT_SECRET
)
token_data = {
"grant_type": "authorization_code",
"code": code,
"redirect_uri": config.LINEAR_REDIRECT_URI, # Use stored value, not from request
}
async with httpx.AsyncClient() as client:
token_response = await client.post(
TOKEN_URL,
data=token_data,
headers={
"Content-Type": "application/x-www-form-urlencoded",
"Authorization": auth_header,
},
timeout=30.0,
)
if token_response.status_code != 200:
error_detail = token_response.text
try:
error_json = token_response.json()
error_detail = error_json.get("error_description", error_detail)
except Exception:
pass
raise HTTPException(
status_code=400, detail=f"Token exchange failed: {error_detail}"
)
token_json = token_response.json()
# Encrypt sensitive tokens before storing
token_encryption = get_token_encryption()
access_token = token_json.get("access_token")
refresh_token = token_json.get("refresh_token")
if not access_token:
raise HTTPException(
status_code=400, detail="No access token received from Linear"
)
# Fetch organization name
org_name = await fetch_linear_organization_name(access_token)
# Calculate expiration time (UTC, tz-aware)
expires_at = None
if token_json.get("expires_in"):
now_utc = datetime.now(UTC)
expires_at = now_utc + timedelta(seconds=int(token_json["expires_in"]))
# Store the encrypted access token and refresh token in connector config
connector_config = {
"access_token": token_encryption.encrypt_token(access_token),
"refresh_token": token_encryption.encrypt_token(refresh_token)
if refresh_token
else None,
"token_type": token_json.get("token_type", "Bearer"),
"expires_in": token_json.get("expires_in"),
"expires_at": expires_at.isoformat() if expires_at else None,
"scope": token_json.get("scope"),
# Mark that tokens are encrypted for backward compatibility
"_token_encrypted": True,
}
# Check for duplicate connector (same organization already connected)
is_duplicate = await check_duplicate_connector(
session,
SearchSourceConnectorType.LINEAR_CONNECTOR,
space_id,
user_id,
org_name,
)
if is_duplicate:
logger.warning(
f"Duplicate Linear connector detected for user {user_id} with org {org_name}"
)
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=linear-connector"
)
# Generate a unique, user-friendly connector name
connector_name = await generate_unique_connector_name(
session,
SearchSourceConnectorType.LINEAR_CONNECTOR,
space_id,
user_id,
org_name,
)
# Create new connector
new_connector = SearchSourceConnector(
name=connector_name,
connector_type=SearchSourceConnectorType.LINEAR_CONNECTOR,
is_indexable=True,
config=connector_config,
search_space_id=space_id,
user_id=user_id,
)
session.add(new_connector)
logger.info(
f"Created new Linear connector for user {user_id} in space {space_id}"
)
try:
await session.commit()
logger.info(f"Successfully saved Linear connector for user {user_id}")
# Redirect to the frontend with success params
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=linear-connector&connectorId={new_connector.id}"
)
except ValidationError as e:
await session.rollback()
raise HTTPException(
status_code=422, detail=f"Validation error: {e!s}"
) from e
except IntegrityError as e:
await session.rollback()
raise HTTPException(
status_code=409,
detail=f"Database integrity error: {e!s}",
) from e
except Exception as e:
logger.error(f"Failed to create search source connector: {e!s}")
await session.rollback()
raise HTTPException(
status_code=500,
detail=f"Failed to create search source connector: {e!s}",
) from e
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to complete Linear OAuth: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to complete Linear OAuth: {e!s}"
) from e
async def refresh_linear_token(
session: AsyncSession, connector: SearchSourceConnector
) -> SearchSourceConnector:
"""
Refresh the Linear access token for a connector.
Args:
session: Database session
connector: Linear connector to refresh
Returns:
Updated connector object
"""
try:
logger.info(f"Refreshing Linear token for connector {connector.id}")
credentials = LinearAuthCredentialsBase.from_dict(connector.config)
# Decrypt tokens if they are encrypted
token_encryption = get_token_encryption()
is_encrypted = connector.config.get("_token_encrypted", False)
refresh_token = credentials.refresh_token
if is_encrypted and refresh_token:
try:
refresh_token = token_encryption.decrypt_token(refresh_token)
except Exception as e:
logger.error(f"Failed to decrypt refresh token: {e!s}")
raise HTTPException(
status_code=500, detail="Failed to decrypt stored refresh token"
) from e
if not refresh_token:
raise HTTPException(
status_code=400,
detail="No refresh token available. Please re-authenticate.",
)
auth_header = make_basic_auth_header(
config.LINEAR_CLIENT_ID, config.LINEAR_CLIENT_SECRET
)
# Prepare token refresh data
refresh_data = {
"grant_type": "refresh_token",
"refresh_token": refresh_token,
}
async with httpx.AsyncClient() as client:
token_response = await client.post(
TOKEN_URL,
data=refresh_data,
headers={
"Content-Type": "application/x-www-form-urlencoded",
"Authorization": auth_header,
},
timeout=30.0,
)
if token_response.status_code != 200:
error_detail = token_response.text
try:
error_json = token_response.json()
error_detail = error_json.get("error_description", error_detail)
except Exception:
pass
raise HTTPException(
status_code=400, detail=f"Token refresh failed: {error_detail}"
)
token_json = token_response.json()
# Calculate expiration time (UTC, tz-aware)
expires_at = None
expires_in = token_json.get("expires_in")
if expires_in:
now_utc = datetime.now(UTC)
expires_at = now_utc + timedelta(seconds=int(expires_in))
# Encrypt new tokens before storing
access_token = token_json.get("access_token")
new_refresh_token = token_json.get("refresh_token")
if not access_token:
raise HTTPException(
status_code=400, detail="No access token received from Linear refresh"
)
# Update credentials object with encrypted tokens
credentials.access_token = token_encryption.encrypt_token(access_token)
if new_refresh_token:
credentials.refresh_token = token_encryption.encrypt_token(
new_refresh_token
)
credentials.expires_in = expires_in
credentials.expires_at = expires_at
credentials.scope = token_json.get("scope")
# Update connector config with encrypted tokens
credentials_dict = credentials.to_dict()
credentials_dict["_token_encrypted"] = True
connector.config = credentials_dict
await session.commit()
await session.refresh(connector)
logger.info(f"Successfully refreshed Linear token for connector {connector.id}")
return connector
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to refresh Linear token: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to refresh Linear token: {e!s}"
) from e

View file

@ -0,0 +1,472 @@
"""
Notion Connector OAuth Routes.
Handles OAuth 2.0 authentication flow for Notion connector.
"""
import logging
from datetime import UTC, datetime, timedelta
from uuid import UUID
import httpx
from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi.responses import RedirectResponse
from pydantic import ValidationError
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config
from app.db import (
SearchSourceConnector,
SearchSourceConnectorType,
User,
get_async_session,
)
from app.schemas.notion_auth_credentials import NotionAuthCredentialsBase
from app.users import current_active_user
from app.utils.connector_naming import (
check_duplicate_connector,
extract_identifier_from_credentials,
generate_unique_connector_name,
)
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
logger = logging.getLogger(__name__)
router = APIRouter()
# Notion OAuth endpoints
AUTHORIZATION_URL = "https://api.notion.com/v1/oauth/authorize"
TOKEN_URL = "https://api.notion.com/v1/oauth/token"
# Initialize security utilities
_state_manager = None
_token_encryption = None
def get_state_manager() -> OAuthStateManager:
"""Get or create OAuth state manager instance."""
global _state_manager
if _state_manager is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for OAuth security")
_state_manager = OAuthStateManager(config.SECRET_KEY)
return _state_manager
def get_token_encryption() -> TokenEncryption:
"""Get or create token encryption instance."""
global _token_encryption
if _token_encryption is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for token encryption")
_token_encryption = TokenEncryption(config.SECRET_KEY)
return _token_encryption
def make_basic_auth_header(client_id: str, client_secret: str) -> str:
"""Create Basic Auth header for Notion OAuth."""
import base64
credentials = f"{client_id}:{client_secret}".encode()
b64 = base64.b64encode(credentials).decode("ascii")
return f"Basic {b64}"
@router.get("/auth/notion/connector/add")
async def connect_notion(space_id: int, user: User = Depends(current_active_user)):
"""
Initiate Notion OAuth flow.
Args:
space_id: The search space ID
user: Current authenticated user
Returns:
Authorization URL for redirect
"""
try:
if not space_id:
raise HTTPException(status_code=400, detail="space_id is required")
if not config.NOTION_CLIENT_ID:
raise HTTPException(status_code=500, detail="Notion OAuth not configured.")
if not config.SECRET_KEY:
raise HTTPException(
status_code=500, detail="SECRET_KEY not configured for OAuth security."
)
# Generate secure state parameter with HMAC signature
state_manager = get_state_manager()
state_encoded = state_manager.generate_secure_state(space_id, user.id)
# Build authorization URL
from urllib.parse import urlencode
auth_params = {
"client_id": config.NOTION_CLIENT_ID,
"response_type": "code",
"owner": "user", # Allows both admins and members to authorize
"redirect_uri": config.NOTION_REDIRECT_URI,
"state": state_encoded,
}
auth_url = f"{AUTHORIZATION_URL}?{urlencode(auth_params)}"
logger.info(f"Generated Notion OAuth URL for user {user.id}, space {space_id}")
return {"auth_url": auth_url}
except Exception as e:
logger.error(f"Failed to initiate Notion OAuth: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to initiate Notion OAuth: {e!s}"
) from e
@router.get("/auth/notion/connector/callback")
async def notion_callback(
request: Request,
code: str | None = None,
error: str | None = None,
state: str | None = None,
session: AsyncSession = Depends(get_async_session),
):
"""
Handle Notion OAuth callback.
Args:
request: FastAPI request object
code: Authorization code from Notion (if user granted access)
error: Error code from Notion (if user denied access or error occurred)
state: State parameter containing user/space info
session: Database session
Returns:
Redirect response to frontend
"""
try:
# Handle OAuth errors (e.g., user denied access)
if error:
logger.warning(f"Notion OAuth error: {error}")
# Try to decode state to get space_id for redirect, but don't fail if it's invalid
space_id = None
if state:
try:
state_manager = get_state_manager()
data = state_manager.validate_state(state)
space_id = data.get("space_id")
except Exception:
# If state is invalid, we'll redirect without space_id
logger.warning("Failed to validate state in error handler")
# Redirect to frontend with error parameter
if space_id:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=notion_oauth_denied"
)
else:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=notion_oauth_denied"
)
# Validate required parameters for successful flow
if not code:
raise HTTPException(status_code=400, detail="Missing authorization code")
if not state:
raise HTTPException(status_code=400, detail="Missing state parameter")
# Validate and decode state with signature verification
state_manager = get_state_manager()
try:
data = state_manager.validate_state(state)
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=400, detail=f"Invalid state parameter: {e!s}"
) from e
user_id = UUID(data["user_id"])
space_id = data["space_id"]
# Validate redirect URI (security: ensure it matches configured value)
# Note: Notion doesn't send redirect_uri in callback, but we validate
# that we're using the configured one in token exchange
if not config.NOTION_REDIRECT_URI:
raise HTTPException(
status_code=500, detail="NOTION_REDIRECT_URI not configured"
)
# Exchange authorization code for access token
auth_header = make_basic_auth_header(
config.NOTION_CLIENT_ID, config.NOTION_CLIENT_SECRET
)
token_data = {
"grant_type": "authorization_code",
"code": code,
"redirect_uri": config.NOTION_REDIRECT_URI, # Use stored value, not from request
}
async with httpx.AsyncClient() as client:
token_response = await client.post(
TOKEN_URL,
json=token_data,
headers={
"Content-Type": "application/json",
"Authorization": auth_header,
},
timeout=30.0,
)
if token_response.status_code != 200:
error_detail = token_response.text
try:
error_json = token_response.json()
error_detail = error_json.get("error_description", error_detail)
except Exception:
pass
raise HTTPException(
status_code=400, detail=f"Token exchange failed: {error_detail}"
)
token_json = token_response.json()
# Encrypt sensitive tokens before storing
token_encryption = get_token_encryption()
access_token = token_json.get("access_token")
refresh_token = token_json.get("refresh_token")
if not access_token:
raise HTTPException(
status_code=400, detail="No access token received from Notion"
)
# Calculate expiration time (UTC, tz-aware)
expires_at = None
expires_in = token_json.get("expires_in")
if expires_in:
now_utc = datetime.now(UTC)
expires_at = now_utc + timedelta(seconds=int(expires_in))
# Notion returns access_token, refresh_token (if available), and workspace information
# Store the encrypted tokens and workspace info in connector config
connector_config = {
"access_token": token_encryption.encrypt_token(access_token),
"refresh_token": token_encryption.encrypt_token(refresh_token)
if refresh_token
else None,
"expires_in": expires_in,
"expires_at": expires_at.isoformat() if expires_at else None,
"workspace_id": token_json.get("workspace_id"),
"workspace_name": token_json.get("workspace_name"),
"workspace_icon": token_json.get("workspace_icon"),
"bot_id": token_json.get("bot_id"),
# Mark that token is encrypted for backward compatibility
"_token_encrypted": True,
}
# Extract unique identifier from connector credentials
connector_identifier = extract_identifier_from_credentials(
SearchSourceConnectorType.NOTION_CONNECTOR, connector_config
)
# Check for duplicate connector (same workspace already connected)
is_duplicate = await check_duplicate_connector(
session,
SearchSourceConnectorType.NOTION_CONNECTOR,
space_id,
user_id,
connector_identifier,
)
if is_duplicate:
logger.warning(
f"Duplicate Notion connector detected for user {user_id} with workspace {connector_identifier}"
)
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=notion-connector"
)
# Generate a unique, user-friendly connector name
connector_name = await generate_unique_connector_name(
session,
SearchSourceConnectorType.NOTION_CONNECTOR,
space_id,
user_id,
connector_identifier,
)
# Create new connector
new_connector = SearchSourceConnector(
name=connector_name,
connector_type=SearchSourceConnectorType.NOTION_CONNECTOR,
is_indexable=True,
config=connector_config,
search_space_id=space_id,
user_id=user_id,
)
session.add(new_connector)
logger.info(
f"Created new Notion connector for user {user_id} in space {space_id}"
)
try:
await session.commit()
logger.info(f"Successfully saved Notion connector for user {user_id}")
# Redirect to the frontend with success params
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=notion-connector&connectorId={new_connector.id}"
)
except ValidationError as e:
await session.rollback()
raise HTTPException(
status_code=422, detail=f"Validation error: {e!s}"
) from e
except IntegrityError as e:
await session.rollback()
raise HTTPException(
status_code=409,
detail=f"Database integrity error: {e!s}",
) from e
except Exception as e:
logger.error(f"Failed to create search source connector: {e!s}")
await session.rollback()
raise HTTPException(
status_code=500,
detail=f"Failed to create search source connector: {e!s}",
) from e
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to complete Notion OAuth: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to complete Notion OAuth: {e!s}"
) from e
async def refresh_notion_token(
session: AsyncSession, connector: SearchSourceConnector
) -> SearchSourceConnector:
"""
Refresh the Notion access token for a connector.
Args:
session: Database session
connector: Notion connector to refresh
Returns:
Updated connector object
"""
try:
logger.info(f"Refreshing Notion token for connector {connector.id}")
credentials = NotionAuthCredentialsBase.from_dict(connector.config)
# Decrypt tokens if they are encrypted
token_encryption = get_token_encryption()
is_encrypted = connector.config.get("_token_encrypted", False)
refresh_token = credentials.refresh_token
if is_encrypted and refresh_token:
try:
refresh_token = token_encryption.decrypt_token(refresh_token)
except Exception as e:
logger.error(f"Failed to decrypt refresh token: {e!s}")
raise HTTPException(
status_code=500, detail="Failed to decrypt stored refresh token"
) from e
if not refresh_token:
raise HTTPException(
status_code=400,
detail="No refresh token available. Please re-authenticate.",
)
auth_header = make_basic_auth_header(
config.NOTION_CLIENT_ID, config.NOTION_CLIENT_SECRET
)
# Prepare token refresh data
refresh_data = {
"grant_type": "refresh_token",
"refresh_token": refresh_token,
}
async with httpx.AsyncClient() as client:
token_response = await client.post(
TOKEN_URL,
json=refresh_data,
headers={
"Content-Type": "application/json",
"Authorization": auth_header,
},
timeout=30.0,
)
if token_response.status_code != 200:
error_detail = token_response.text
try:
error_json = token_response.json()
error_detail = error_json.get("error_description", error_detail)
except Exception:
pass
raise HTTPException(
status_code=400, detail=f"Token refresh failed: {error_detail}"
)
token_json = token_response.json()
# Calculate expiration time (UTC, tz-aware)
expires_at = None
expires_in = token_json.get("expires_in")
if expires_in:
now_utc = datetime.now(UTC)
expires_at = now_utc + timedelta(seconds=int(expires_in))
# Encrypt new tokens before storing
access_token = token_json.get("access_token")
new_refresh_token = token_json.get("refresh_token")
if not access_token:
raise HTTPException(
status_code=400, detail="No access token received from Notion refresh"
)
# Update credentials object with encrypted tokens
credentials.access_token = token_encryption.encrypt_token(access_token)
if new_refresh_token:
credentials.refresh_token = token_encryption.encrypt_token(
new_refresh_token
)
credentials.expires_in = expires_in
credentials.expires_at = expires_at
# Preserve workspace info
if not credentials.workspace_id:
credentials.workspace_id = connector.config.get("workspace_id")
if not credentials.workspace_name:
credentials.workspace_name = connector.config.get("workspace_name")
if not credentials.workspace_icon:
credentials.workspace_icon = connector.config.get("workspace_icon")
if not credentials.bot_id:
credentials.bot_id = connector.config.get("bot_id")
# Update connector config with encrypted tokens
credentials_dict = credentials.to_dict()
credentials_dict["_token_encrypted"] = True
connector.config = credentials_dict
await session.commit()
await session.refresh(connector)
logger.info(f"Successfully refreshed Notion token for connector {connector.id}")
return connector
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to refresh Notion token: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to refresh Notion token: {e!s}"
) from e

View file

@ -7,7 +7,8 @@ PUT /search-source-connectors/{connector_id} - Update a specific connector
DELETE /search-source-connectors/{connector_id} - Delete a specific connector
POST /search-source-connectors/{connector_id}/index - Index content from a connector to a search space
Note: Each search space can have only one connector of each type per user (based on search_space_id, user_id, and connector_type).
Note: OAuth connectors (Gmail, Drive, Slack, etc.) support multiple accounts per search space.
Non-OAuth connectors (BookStack, GitHub, etc.) are limited to one per search space.
"""
import logging
@ -125,6 +126,7 @@ async def create_search_source_connector(
)
# Check if a connector with the same type already exists for this search space
# (for non-OAuth connectors that don't support multiple accounts)
result = await session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.search_space_id == search_space_id,

View file

@ -0,0 +1,492 @@
"""
Slack Connector OAuth Routes.
Handles OAuth 2.0 authentication flow for Slack connector.
"""
import logging
from datetime import UTC, datetime, timedelta
from uuid import UUID
import httpx
from fastapi import APIRouter, Depends, HTTPException, Request
from fastapi.responses import RedirectResponse
from pydantic import ValidationError
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config
from app.db import (
SearchSourceConnector,
SearchSourceConnectorType,
User,
get_async_session,
)
from app.schemas.slack_auth_credentials import SlackAuthCredentialsBase
from app.users import current_active_user
from app.utils.connector_naming import (
check_duplicate_connector,
extract_identifier_from_credentials,
generate_unique_connector_name,
)
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
logger = logging.getLogger(__name__)
router = APIRouter()
# Slack OAuth endpoints
AUTHORIZATION_URL = "https://slack.com/oauth/v2/authorize"
TOKEN_URL = "https://slack.com/api/oauth.v2.access"
# OAuth scopes for Slack (Bot Token)
SCOPES = [
"channels:history", # Read messages in public channels
"channels:read", # View basic information about public channels
"groups:history", # Read messages in private channels
"groups:read", # View basic information about private channels
"im:history", # Read messages in direct messages
"mpim:history", # Read messages in group direct messages
"users:read", # Read user information
]
# Initialize security utilities
_state_manager = None
_token_encryption = None
def get_state_manager() -> OAuthStateManager:
"""Get or create OAuth state manager instance."""
global _state_manager
if _state_manager is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for OAuth security")
_state_manager = OAuthStateManager(config.SECRET_KEY)
return _state_manager
def get_token_encryption() -> TokenEncryption:
"""Get or create token encryption instance."""
global _token_encryption
if _token_encryption is None:
if not config.SECRET_KEY:
raise ValueError("SECRET_KEY must be set for token encryption")
_token_encryption = TokenEncryption(config.SECRET_KEY)
return _token_encryption
@router.get("/auth/slack/connector/add")
async def connect_slack(space_id: int, user: User = Depends(current_active_user)):
"""
Initiate Slack OAuth flow.
Args:
space_id: The search space ID
user: Current authenticated user
Returns:
Authorization URL for redirect
"""
try:
if not space_id:
raise HTTPException(status_code=400, detail="space_id is required")
if not config.SLACK_CLIENT_ID:
raise HTTPException(status_code=500, detail="Slack OAuth not configured.")
if not config.SECRET_KEY:
raise HTTPException(
status_code=500, detail="SECRET_KEY not configured for OAuth security."
)
# Generate secure state parameter with HMAC signature
state_manager = get_state_manager()
state_encoded = state_manager.generate_secure_state(space_id, user.id)
# Build authorization URL
from urllib.parse import urlencode
auth_params = {
"client_id": config.SLACK_CLIENT_ID,
"scope": ",".join(SCOPES),
"redirect_uri": config.SLACK_REDIRECT_URI,
"state": state_encoded,
}
auth_url = f"{AUTHORIZATION_URL}?{urlencode(auth_params)}"
logger.info(f"Generated Slack OAuth URL for user {user.id}, space {space_id}")
return {"auth_url": auth_url}
except Exception as e:
logger.error(f"Failed to initiate Slack OAuth: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to initiate Slack OAuth: {e!s}"
) from e
@router.get("/auth/slack/connector/callback")
async def slack_callback(
request: Request,
code: str | None = None,
error: str | None = None,
state: str | None = None,
session: AsyncSession = Depends(get_async_session),
):
"""
Handle Slack OAuth callback.
Args:
request: FastAPI request object
code: Authorization code from Slack (if user granted access)
error: Error code from Slack (if user denied access or error occurred)
state: State parameter containing user/space info
session: Database session
Returns:
Redirect response to frontend
"""
try:
# Handle OAuth errors (e.g., user denied access)
if error:
logger.warning(f"Slack OAuth error: {error}")
# Try to decode state to get space_id for redirect, but don't fail if it's invalid
space_id = None
if state:
try:
state_manager = get_state_manager()
data = state_manager.validate_state(state)
space_id = data.get("space_id")
except Exception:
# If state is invalid, we'll redirect without space_id
logger.warning("Failed to validate state in error handler")
# Redirect to frontend with error parameter
if space_id:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=slack_oauth_denied"
)
else:
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=slack_oauth_denied"
)
# Validate required parameters for successful flow
if not code:
raise HTTPException(status_code=400, detail="Missing authorization code")
if not state:
raise HTTPException(status_code=400, detail="Missing state parameter")
# Validate and decode state with signature verification
state_manager = get_state_manager()
try:
data = state_manager.validate_state(state)
except HTTPException:
raise
except Exception as e:
raise HTTPException(
status_code=400, detail=f"Invalid state parameter: {e!s}"
) from e
user_id = UUID(data["user_id"])
space_id = data["space_id"]
# Validate redirect URI (security: ensure it matches configured value)
if not config.SLACK_REDIRECT_URI:
raise HTTPException(
status_code=500, detail="SLACK_REDIRECT_URI not configured"
)
# Exchange authorization code for access token
token_data = {
"client_id": config.SLACK_CLIENT_ID,
"client_secret": config.SLACK_CLIENT_SECRET,
"code": code,
"redirect_uri": config.SLACK_REDIRECT_URI,
}
async with httpx.AsyncClient() as client:
token_response = await client.post(
TOKEN_URL,
data=token_data,
headers={"Content-Type": "application/x-www-form-urlencoded"},
timeout=30.0,
)
if token_response.status_code != 200:
error_detail = token_response.text
try:
error_json = token_response.json()
error_detail = error_json.get("error", error_detail)
except Exception:
pass
raise HTTPException(
status_code=400, detail=f"Token exchange failed: {error_detail}"
)
token_json = token_response.json()
# Slack OAuth v2 returns success status in the JSON
if not token_json.get("ok", False):
error_msg = token_json.get("error", "Unknown error")
raise HTTPException(
status_code=400, detail=f"Slack OAuth error: {error_msg}"
)
# Extract bot token from Slack response
# Slack OAuth v2 returns: { "ok": true, "access_token": "...", "bot": { "bot_user_id": "...", "bot_access_token": "xoxb-..." }, "refresh_token": "...", ... }
bot_token = None
if token_json.get("bot") and token_json["bot"].get("bot_access_token"):
bot_token = token_json["bot"]["bot_access_token"]
elif token_json.get("access_token"):
# Fallback to access_token if bot token not available
bot_token = token_json["access_token"]
else:
raise HTTPException(
status_code=400, detail="No bot token received from Slack"
)
# Extract refresh token if available (for token rotation)
refresh_token = token_json.get("refresh_token")
# Encrypt sensitive tokens before storing
token_encryption = get_token_encryption()
# Calculate expiration time (UTC, tz-aware)
# Slack tokens don't expire by default, but we'll store expiration info if provided
expires_at = None
if token_json.get("expires_in"):
now_utc = datetime.now(UTC)
expires_at = now_utc + timedelta(seconds=int(token_json["expires_in"]))
# Store the encrypted bot token and refresh token in connector config
connector_config = {
"bot_token": token_encryption.encrypt_token(bot_token),
"refresh_token": token_encryption.encrypt_token(refresh_token)
if refresh_token
else None,
"bot_user_id": token_json.get("bot", {}).get("bot_user_id"),
"team_id": token_json.get("team", {}).get("id"),
"team_name": token_json.get("team", {}).get("name"),
"token_type": token_json.get("token_type", "Bearer"),
"expires_in": token_json.get("expires_in"),
"expires_at": expires_at.isoformat() if expires_at else None,
"scope": token_json.get("scope"),
# Mark that tokens are encrypted for backward compatibility
"_token_encrypted": True,
}
# Extract unique identifier from connector credentials
connector_identifier = extract_identifier_from_credentials(
SearchSourceConnectorType.SLACK_CONNECTOR, connector_config
)
# Check for duplicate connector (same workspace already connected)
is_duplicate = await check_duplicate_connector(
session,
SearchSourceConnectorType.SLACK_CONNECTOR,
space_id,
user_id,
connector_identifier,
)
if is_duplicate:
logger.warning(
f"Duplicate Slack connector detected for user {user_id} with workspace {connector_identifier}"
)
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=slack-connector"
)
# Generate a unique, user-friendly connector name
connector_name = await generate_unique_connector_name(
session,
SearchSourceConnectorType.SLACK_CONNECTOR,
space_id,
user_id,
connector_identifier,
)
# Create new connector
new_connector = SearchSourceConnector(
name=connector_name,
connector_type=SearchSourceConnectorType.SLACK_CONNECTOR,
is_indexable=True,
config=connector_config,
search_space_id=space_id,
user_id=user_id,
)
session.add(new_connector)
logger.info(
f"Created new Slack connector for user {user_id} in space {space_id}"
)
try:
await session.commit()
logger.info(f"Successfully saved Slack connector for user {user_id}")
# Redirect to the frontend with success params
return RedirectResponse(
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=slack-connector&connectorId={new_connector.id}"
)
except ValidationError as e:
await session.rollback()
raise HTTPException(
status_code=422, detail=f"Validation error: {e!s}"
) from e
except IntegrityError as e:
await session.rollback()
raise HTTPException(
status_code=409,
detail=f"Database integrity error: {e!s}",
) from e
except Exception as e:
logger.error(f"Failed to create search source connector: {e!s}")
await session.rollback()
raise HTTPException(
status_code=500,
detail=f"Failed to create search source connector: {e!s}",
) from e
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to complete Slack OAuth: {e!s}", exc_info=True)
raise HTTPException(
status_code=500, detail=f"Failed to complete Slack OAuth: {e!s}"
) from e
async def refresh_slack_token(
session: AsyncSession, connector: SearchSourceConnector
) -> SearchSourceConnector:
"""
Refresh the Slack bot token for a connector.
Args:
session: Database session
connector: Slack connector to refresh
Returns:
Updated connector object
"""
try:
logger.info(f"Refreshing Slack token for connector {connector.id}")
credentials = SlackAuthCredentialsBase.from_dict(connector.config)
# Decrypt tokens if they are encrypted
token_encryption = get_token_encryption()
is_encrypted = connector.config.get("_token_encrypted", False)
refresh_token = credentials.refresh_token
if is_encrypted and refresh_token:
try:
refresh_token = token_encryption.decrypt_token(refresh_token)
except Exception as e:
logger.error(f"Failed to decrypt refresh token: {e!s}")
raise HTTPException(
status_code=500, detail="Failed to decrypt stored refresh token"
) from e
if not refresh_token:
raise HTTPException(
status_code=400,
detail="No refresh token available. Please re-authenticate.",
)
# Slack uses oauth.v2.access for token refresh with grant_type=refresh_token
refresh_data = {
"client_id": config.SLACK_CLIENT_ID,
"client_secret": config.SLACK_CLIENT_SECRET,
"grant_type": "refresh_token",
"refresh_token": refresh_token,
}
async with httpx.AsyncClient() as client:
token_response = await client.post(
TOKEN_URL,
data=refresh_data,
headers={"Content-Type": "application/x-www-form-urlencoded"},
timeout=30.0,
)
if token_response.status_code != 200:
error_detail = token_response.text
try:
error_json = token_response.json()
error_detail = error_json.get("error", error_detail)
except Exception:
pass
raise HTTPException(
status_code=400, detail=f"Token refresh failed: {error_detail}"
)
token_json = token_response.json()
# Slack OAuth v2 returns success status in the JSON
if not token_json.get("ok", False):
error_msg = token_json.get("error", "Unknown error")
raise HTTPException(
status_code=400, detail=f"Slack OAuth refresh error: {error_msg}"
)
# Extract bot token from refresh response
bot_token = None
if token_json.get("bot") and token_json["bot"].get("bot_access_token"):
bot_token = token_json["bot"]["bot_access_token"]
elif token_json.get("access_token"):
bot_token = token_json["access_token"]
else:
raise HTTPException(
status_code=400, detail="No bot token received from Slack refresh"
)
# Get new refresh token if provided (Slack may rotate refresh tokens)
new_refresh_token = token_json.get("refresh_token")
# Calculate expiration time (UTC, tz-aware)
expires_at = None
expires_in = token_json.get("expires_in")
if expires_in:
now_utc = datetime.now(UTC)
expires_at = now_utc + timedelta(seconds=int(expires_in))
# Update credentials object with encrypted tokens
credentials.bot_token = token_encryption.encrypt_token(bot_token)
if new_refresh_token:
credentials.refresh_token = token_encryption.encrypt_token(
new_refresh_token
)
credentials.expires_in = expires_in
credentials.expires_at = expires_at
credentials.scope = token_json.get("scope")
# Preserve team info
if not credentials.team_id:
credentials.team_id = connector.config.get("team_id")
if not credentials.team_name:
credentials.team_name = connector.config.get("team_name")
if not credentials.bot_user_id:
credentials.bot_user_id = connector.config.get("bot_user_id")
# Update connector config with encrypted tokens
credentials_dict = credentials.to_dict()
credentials_dict["_token_encrypted"] = True
connector.config = credentials_dict
await session.commit()
await session.refresh(connector)
logger.info(f"Successfully refreshed Slack token for connector {connector.id}")
return connector
except HTTPException:
raise
except Exception as e:
logger.error(
f"Failed to refresh Slack token for connector {connector.id}: {e!s}",
exc_info=True,
)
raise HTTPException(
status_code=500, detail=f"Failed to refresh Slack token: {e!s}"
) from e

View file

@ -0,0 +1,86 @@
"""
Atlassian OAuth 2.0 Authentication Credentials Schema.
Shared schema for both Jira and Confluence OAuth credentials.
Both products use the same Atlassian OAuth 2.0 (3LO) flow and token structure.
"""
from datetime import UTC, datetime
from pydantic import BaseModel, field_validator
class AtlassianAuthCredentialsBase(BaseModel):
"""
Base model for Atlassian OAuth 2.0 credentials.
Used for both Jira and Confluence connectors since they share
the same Atlassian OAuth infrastructure and token structure.
"""
access_token: str
refresh_token: str | None = None
token_type: str = "Bearer"
expires_in: int | None = None
expires_at: datetime | None = None
scope: str | None = None
cloud_id: str | None = None
base_url: str | None = None
@property
def is_expired(self) -> bool:
"""Check if the credentials have expired."""
if self.expires_at is None:
return False
return self.expires_at <= datetime.now(UTC)
@property
def is_refreshable(self) -> bool:
"""Check if the credentials can be refreshed."""
return self.refresh_token is not None
def to_dict(self) -> dict:
"""Convert credentials to dictionary for storage."""
return {
"access_token": self.access_token,
"refresh_token": self.refresh_token,
"token_type": self.token_type,
"expires_in": self.expires_in,
"expires_at": self.expires_at.isoformat() if self.expires_at else None,
"scope": self.scope,
"cloud_id": self.cloud_id,
"base_url": self.base_url,
}
@classmethod
def from_dict(cls, data: dict) -> "AtlassianAuthCredentialsBase":
"""Create credentials from dictionary."""
expires_at = None
if data.get("expires_at"):
expires_at = datetime.fromisoformat(data["expires_at"])
return cls(
access_token=data["access_token"],
refresh_token=data.get("refresh_token"),
token_type=data.get("token_type", "Bearer"),
expires_in=data.get("expires_in"),
expires_at=expires_at,
scope=data.get("scope"),
cloud_id=data.get("cloud_id"),
base_url=data.get("base_url"),
)
@field_validator("expires_at", mode="before")
@classmethod
def ensure_aware_utc(cls, v):
# Strings like "2025-08-26T14:46:57.367184"
if isinstance(v, str):
# add +00:00 if missing tz info
if v.endswith("Z"):
return datetime.fromisoformat(v.replace("Z", "+00:00"))
dt = datetime.fromisoformat(v)
return dt if dt.tzinfo else dt.replace(tzinfo=UTC)
# datetime objects
if isinstance(v, datetime):
return v if v.tzinfo else v.replace(tzinfo=UTC)
return v

View file

@ -0,0 +1,85 @@
from datetime import UTC, datetime
from pydantic import BaseModel, field_validator
class ClickUpAuthCredentialsBase(BaseModel):
access_token: str
refresh_token: str | None = None
expires_in: int | None = None
expires_at: datetime | None = None
user_id: str | None = None
user_email: str | None = None
user_name: str | None = None
workspace_id: str | None = None
workspace_name: str | None = None
@property
def is_expired(self) -> bool:
"""Check if the credentials have expired."""
if self.expires_at is None:
return False # Long-lived token, treat as not expired
return self.expires_at <= datetime.now(UTC)
@property
def is_refreshable(self) -> bool:
"""Check if the credentials can be refreshed."""
return self.refresh_token is not None
def to_dict(self) -> dict:
"""Convert credentials to dictionary for storage."""
return {
"access_token": self.access_token,
"refresh_token": self.refresh_token,
"expires_in": self.expires_in,
"expires_at": self.expires_at.isoformat() if self.expires_at else None,
"user_id": self.user_id,
"user_email": self.user_email,
"user_name": self.user_name,
"workspace_id": self.workspace_id,
"workspace_name": self.workspace_name,
}
@classmethod
def from_dict(cls, data: dict) -> "ClickUpAuthCredentialsBase":
"""Create credentials from dictionary."""
expires_at = None
if data.get("expires_at"):
expires_at = datetime.fromisoformat(data["expires_at"])
# Convert user_id to string if it's an integer (for backward compatibility)
user_id = data.get("user_id")
if user_id is not None and not isinstance(user_id, str):
user_id = str(user_id)
# Convert workspace_id to string if it's an integer (for backward compatibility)
workspace_id = data.get("workspace_id")
if workspace_id is not None and not isinstance(workspace_id, str):
workspace_id = str(workspace_id)
return cls(
access_token=data.get("access_token", ""),
refresh_token=data.get("refresh_token"),
expires_in=data.get("expires_in"),
expires_at=expires_at,
user_id=user_id,
user_email=data.get("user_email"),
user_name=data.get("user_name"),
workspace_id=workspace_id,
workspace_name=data.get("workspace_name"),
)
@field_validator("expires_at", mode="before")
@classmethod
def ensure_aware_utc(cls, v):
# Strings like "2025-08-26T14:46:57.367184"
if isinstance(v, str):
# add +00:00 if missing tz info
if v.endswith("Z"):
return datetime.fromisoformat(v.replace("Z", "+00:00"))
dt = datetime.fromisoformat(v)
return dt if dt.tzinfo else dt.replace(tzinfo=UTC)
# datetime objects
if isinstance(v, datetime):
return v if v.tzinfo else v.replace(tzinfo=UTC)
return v

View file

@ -0,0 +1,75 @@
from datetime import UTC, datetime
from pydantic import BaseModel, field_validator
class DiscordAuthCredentialsBase(BaseModel):
bot_token: str
refresh_token: str | None = None
token_type: str = "Bearer"
expires_in: int | None = None
expires_at: datetime | None = None
scope: str | None = None
bot_user_id: str | None = None
guild_id: str | None = None
guild_name: str | None = None
@property
def is_expired(self) -> bool:
"""Check if the credentials have expired."""
if self.expires_at is None:
return False # Long-lived token, treat as not expired
return self.expires_at <= datetime.now(UTC)
@property
def is_refreshable(self) -> bool:
"""Check if the credentials can be refreshed."""
return self.refresh_token is not None
def to_dict(self) -> dict:
"""Convert credentials to dictionary for storage."""
return {
"bot_token": self.bot_token,
"refresh_token": self.refresh_token,
"token_type": self.token_type,
"expires_in": self.expires_in,
"expires_at": self.expires_at.isoformat() if self.expires_at else None,
"scope": self.scope,
"bot_user_id": self.bot_user_id,
"guild_id": self.guild_id,
"guild_name": self.guild_name,
}
@classmethod
def from_dict(cls, data: dict) -> "DiscordAuthCredentialsBase":
"""Create credentials from dictionary."""
expires_at = None
if data.get("expires_at"):
expires_at = datetime.fromisoformat(data["expires_at"])
return cls(
bot_token=data.get("bot_token", ""),
refresh_token=data.get("refresh_token"),
token_type=data.get("token_type", "Bearer"),
expires_in=data.get("expires_in"),
expires_at=expires_at,
scope=data.get("scope"),
bot_user_id=data.get("bot_user_id"),
guild_id=data.get("guild_id"),
guild_name=data.get("guild_name"),
)
@field_validator("expires_at", mode="before")
@classmethod
def ensure_aware_utc(cls, v):
# Strings like "2025-08-26T14:46:57.367184"
if isinstance(v, str):
# add +00:00 if missing tz info
if v.endswith("Z"):
return datetime.fromisoformat(v.replace("Z", "+00:00"))
dt = datetime.fromisoformat(v)
return dt if dt.tzinfo else dt.replace(tzinfo=UTC)
# datetime objects
if isinstance(v, datetime):
return v if v.tzinfo else v.replace(tzinfo=UTC)
return v

View file

@ -0,0 +1,66 @@
from datetime import UTC, datetime
from pydantic import BaseModel, field_validator
class LinearAuthCredentialsBase(BaseModel):
access_token: str
refresh_token: str | None = None
token_type: str = "Bearer"
expires_in: int | None = None
expires_at: datetime | None = None
scope: str | None = None
@property
def is_expired(self) -> bool:
"""Check if the credentials have expired."""
if self.expires_at is None:
return False
return self.expires_at <= datetime.now(UTC)
@property
def is_refreshable(self) -> bool:
"""Check if the credentials can be refreshed."""
return self.refresh_token is not None
def to_dict(self) -> dict:
"""Convert credentials to dictionary for storage."""
return {
"access_token": self.access_token,
"refresh_token": self.refresh_token,
"token_type": self.token_type,
"expires_in": self.expires_in,
"expires_at": self.expires_at.isoformat() if self.expires_at else None,
"scope": self.scope,
}
@classmethod
def from_dict(cls, data: dict) -> "LinearAuthCredentialsBase":
"""Create credentials from dictionary."""
expires_at = None
if data.get("expires_at"):
expires_at = datetime.fromisoformat(data["expires_at"])
return cls(
access_token=data["access_token"],
refresh_token=data.get("refresh_token"),
token_type=data.get("token_type", "Bearer"),
expires_in=data.get("expires_in"),
expires_at=expires_at,
scope=data.get("scope"),
)
@field_validator("expires_at", mode="before")
@classmethod
def ensure_aware_utc(cls, v):
# Strings like "2025-08-26T14:46:57.367184"
if isinstance(v, str):
# add +00:00 if missing tz info
if v.endswith("Z"):
return datetime.fromisoformat(v.replace("Z", "+00:00"))
dt = datetime.fromisoformat(v)
return dt if dt.tzinfo else dt.replace(tzinfo=UTC)
# datetime objects
if isinstance(v, datetime):
return v if v.tzinfo else v.replace(tzinfo=UTC)
return v

View file

@ -0,0 +1,72 @@
from datetime import UTC, datetime
from pydantic import BaseModel, field_validator
class NotionAuthCredentialsBase(BaseModel):
access_token: str
refresh_token: str | None = None
expires_in: int | None = None
expires_at: datetime | None = None
workspace_id: str | None = None
workspace_name: str | None = None
workspace_icon: str | None = None
bot_id: str | None = None
@property
def is_expired(self) -> bool:
"""Check if the credentials have expired."""
if self.expires_at is None:
return False # Long-lived token, treat as not expired
return self.expires_at <= datetime.now(UTC)
@property
def is_refreshable(self) -> bool:
"""Check if the credentials can be refreshed."""
return self.refresh_token is not None
def to_dict(self) -> dict:
"""Convert credentials to dictionary for storage."""
return {
"access_token": self.access_token,
"refresh_token": self.refresh_token,
"expires_in": self.expires_in,
"expires_at": self.expires_at.isoformat() if self.expires_at else None,
"workspace_id": self.workspace_id,
"workspace_name": self.workspace_name,
"workspace_icon": self.workspace_icon,
"bot_id": self.bot_id,
}
@classmethod
def from_dict(cls, data: dict) -> "NotionAuthCredentialsBase":
"""Create credentials from dictionary."""
expires_at = None
if data.get("expires_at"):
expires_at = datetime.fromisoformat(data["expires_at"])
return cls(
access_token=data["access_token"],
refresh_token=data.get("refresh_token"),
expires_in=data.get("expires_in"),
expires_at=expires_at,
workspace_id=data.get("workspace_id"),
workspace_name=data.get("workspace_name"),
workspace_icon=data.get("workspace_icon"),
bot_id=data.get("bot_id"),
)
@field_validator("expires_at", mode="before")
@classmethod
def ensure_aware_utc(cls, v):
# Strings like "2025-08-26T14:46:57.367184"
if isinstance(v, str):
# add +00:00 if missing tz info
if v.endswith("Z"):
return datetime.fromisoformat(v.replace("Z", "+00:00"))
dt = datetime.fromisoformat(v)
return dt if dt.tzinfo else dt.replace(tzinfo=UTC)
# datetime objects
if isinstance(v, datetime):
return v if v.tzinfo else v.replace(tzinfo=UTC)
return v

View file

@ -30,7 +30,12 @@ class SearchSourceConnectorBase(BaseModel):
@model_validator(mode="after")
def validate_periodic_indexing(self):
"""Validate that periodic indexing configuration is consistent."""
"""Validate that periodic indexing configuration is consistent.
Supported frequencies: Any positive integer (in minutes).
Common values: 5, 15, 60 (1 hour), 360 (6 hours), 720 (12 hours), 1440 (daily), etc.
The schedule checker will handle any frequency >= 1 minute.
"""
if self.periodic_indexing_enabled:
if not self.is_indexable:
raise ValueError(

View file

@ -0,0 +1,75 @@
from datetime import UTC, datetime
from pydantic import BaseModel, field_validator
class SlackAuthCredentialsBase(BaseModel):
bot_token: str
refresh_token: str | None = None
token_type: str = "Bearer"
expires_in: int | None = None
expires_at: datetime | None = None
scope: str | None = None
bot_user_id: str | None = None
team_id: str | None = None
team_name: str | None = None
@property
def is_expired(self) -> bool:
"""Check if the credentials have expired."""
if self.expires_at is None:
return False # Long-lived token, treat as not expired
return self.expires_at <= datetime.now(UTC)
@property
def is_refreshable(self) -> bool:
"""Check if the credentials can be refreshed."""
return self.refresh_token is not None
def to_dict(self) -> dict:
"""Convert credentials to dictionary for storage."""
return {
"bot_token": self.bot_token,
"refresh_token": self.refresh_token,
"token_type": self.token_type,
"expires_in": self.expires_in,
"expires_at": self.expires_at.isoformat() if self.expires_at else None,
"scope": self.scope,
"bot_user_id": self.bot_user_id,
"team_id": self.team_id,
"team_name": self.team_name,
}
@classmethod
def from_dict(cls, data: dict) -> "SlackAuthCredentialsBase":
"""Create credentials from dictionary."""
expires_at = None
if data.get("expires_at"):
expires_at = datetime.fromisoformat(data["expires_at"])
return cls(
bot_token=data.get("bot_token", ""),
refresh_token=data.get("refresh_token"),
token_type=data.get("token_type", "Bearer"),
expires_in=data.get("expires_in"),
expires_at=expires_at,
scope=data.get("scope"),
bot_user_id=data.get("bot_user_id"),
team_id=data.get("team_id"),
team_name=data.get("team_name"),
)
@field_validator("expires_at", mode="before")
@classmethod
def ensure_aware_utc(cls, v):
# Strings like "2025-08-26T14:46:57.367184"
if isinstance(v, str):
# add +00:00 if missing tz info
if v.endswith("Z"):
return datetime.fromisoformat(v.replace("Z", "+00:00"))
dt = datetime.fromisoformat(v)
return dt if dt.tzinfo else dt.replace(tzinfo=UTC)
# datetime objects
if isinstance(v, datetime):
return v if v.tzinfo else v.replace(tzinfo=UTC)
return v

View file

@ -128,42 +128,6 @@ class DoclingService:
logger.error(f"❌ Docling initialization failed: {e}")
raise RuntimeError(f"Docling initialization failed: {e}") from e
def _configure_easyocr_local_models(self):
"""Configure EasyOCR to use pre-downloaded local models."""
try:
import os
import easyocr
# Set SSL environment for EasyOCR downloads
os.environ["CURL_CA_BUNDLE"] = ""
os.environ["REQUESTS_CA_BUNDLE"] = ""
# Try to use local models first, fallback to download if needed
try:
reader = easyocr.Reader(
["en"],
download_enabled=False,
model_storage_directory="/root/.EasyOCR/model",
)
logger.info("✅ EasyOCR configured for local models")
return reader
except Exception:
# If local models fail, allow download with SSL bypass
logger.info(
"🔄 Local models failed, attempting download with SSL bypass..."
)
reader = easyocr.Reader(
["en"],
download_enabled=True,
model_storage_directory="/root/.EasyOCR/model",
)
logger.info("✅ EasyOCR configured with downloaded models")
return reader
except Exception as e:
logger.warning(f"⚠️ EasyOCR configuration failed: {e}")
return None
async def process_document(
self, file_path: str, filename: str | None = None
) -> dict[str, Any]:

View file

@ -342,40 +342,7 @@ async def get_document_summary_llm(
)
# Backward-compatible aliases (deprecated - will be removed in future versions)
async def get_user_llm_instance(
session: AsyncSession, user_id: str, search_space_id: int, role: str
) -> ChatLiteLLM | None:
"""
Deprecated: Use get_search_space_llm_instance instead.
LLM preferences are now stored at the search space level, not per-user.
"""
return await get_search_space_llm_instance(session, search_space_id, role)
# Legacy aliases for backward compatibility
async def get_long_context_llm(
session: AsyncSession, search_space_id: int
) -> ChatLiteLLM | None:
"""Deprecated: Use get_document_summary_llm instead."""
return await get_document_summary_llm(session, search_space_id)
async def get_fast_llm(
session: AsyncSession, search_space_id: int
) -> ChatLiteLLM | None:
"""Deprecated: Use get_agent_llm instead."""
return await get_agent_llm(session, search_space_id)
async def get_strategic_llm(
session: AsyncSession, search_space_id: int
) -> ChatLiteLLM | None:
"""Deprecated: Use get_document_summary_llm instead."""
return await get_document_summary_llm(session, search_space_id)
# User-based legacy aliases (LLM preferences are now per-search-space, not per-user)
# Backward-compatible alias (LLM preferences are now per-search-space, not per-user)
async def get_user_long_context_llm(
session: AsyncSession, user_id: str, search_space_id: int
) -> ChatLiteLLM | None:
@ -384,23 +351,3 @@ async def get_user_long_context_llm(
The user_id parameter is ignored as LLM preferences are now per-search-space.
"""
return await get_document_summary_llm(session, search_space_id)
async def get_user_fast_llm(
session: AsyncSession, user_id: str, search_space_id: int
) -> ChatLiteLLM | None:
"""
Deprecated: Use get_agent_llm instead.
The user_id parameter is ignored as LLM preferences are now per-search-space.
"""
return await get_agent_llm(session, search_space_id)
async def get_user_strategic_llm(
session: AsyncSession, user_id: str, search_space_id: int
) -> ChatLiteLLM | None:
"""
Deprecated: Use get_document_summary_llm instead.
The user_id parameter is ignored as LLM preferences are now per-search-space.
"""
return await get_document_summary_llm(session, search_space_id)

View file

@ -1,114 +0,0 @@
import datetime
from typing import Any
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
from sqlalchemy.ext.asyncio import AsyncSession
from app.services.llm_service import get_document_summary_llm
class QueryService:
"""
Service for query-related operations, including reformulation and processing.
"""
@staticmethod
async def reformulate_query_with_chat_history(
user_query: str,
session: AsyncSession,
search_space_id: int,
chat_history_str: str | None = None,
) -> str:
"""
Reformulate the user query using the search space's document summary LLM to make it more
effective for information retrieval and research purposes.
Args:
user_query: The original user query
session: Database session for accessing LLM configs
search_space_id: Search Space ID to get LLM preferences
chat_history_str: Optional chat history string
Returns:
str: The reformulated query
"""
if not user_query or not user_query.strip():
return user_query
try:
# Get the search space's document summary LLM instance
llm = await get_document_summary_llm(session, search_space_id)
if not llm:
print(
f"Warning: No document summary LLM configured for search space {search_space_id}. Using original query."
)
return user_query
# Create system message with instructions
system_message = SystemMessage(
content=f"""
Today's date: {datetime.datetime.now().strftime("%Y-%m-%d")}
You are a highly skilled AI assistant specializing in query optimization for advanced research.
Your primary objective is to transform a user's initial query into a highly effective search query.
This reformulated query will be used to retrieve information from diverse data sources.
**Chat History Context:**
{chat_history_str if chat_history_str else "No prior conversation history is available."}
If chat history is provided, analyze it to understand the user's evolving information needs and the broader context of their request. Use this understanding to refine the current query, ensuring it builds upon or clarifies previous interactions.
**Query Reformulation Guidelines:**
Your reformulated query should:
1. **Enhance Specificity and Detail:** Add precision to narrow the search focus effectively, making the query less ambiguous and more targeted.
2. **Resolve Ambiguities:** Identify and clarify vague terms or phrases. If a term has multiple meanings, orient the query towards the most likely one given the context.
3. **Expand Key Concepts:** Incorporate relevant synonyms, related terms, and alternative phrasings for core concepts. This helps capture a wider range of relevant documents.
4. **Deconstruct Complex Questions:** If the original query is multifaceted, break it down into its core searchable components or rephrase it to address each aspect clearly. The final output must still be a single, coherent query string.
5. **Optimize for Comprehensiveness:** Ensure the query is structured to uncover all essential facets of the original request, aiming for thorough information retrieval suitable for research.
6. **Maintain User Intent:** The reformulated query must stay true to the original intent of the user's query. Do not introduce new topics or shift the focus significantly.
**Crucial Constraints:**
* **Conciseness and Effectiveness:** While aiming for comprehensiveness, the reformulated query MUST be as concise as possible. Eliminate all unnecessary verbosity. Focus on essential keywords, entities, and concepts that directly contribute to effective retrieval.
* **Single, Direct Output:** Return ONLY the reformulated query itself. Do NOT include any explanations, introductory phrases (e.g., "Reformulated query:", "Here is the optimized query:"), or any other surrounding text or markdown formatting.
Your output should be a single, optimized query string, ready for immediate use in a search system.
"""
)
# Create human message with the user query
human_message = HumanMessage(
content=f"Reformulate this query for better research results: {user_query}"
)
# Get the response from the LLM
response = await llm.agenerate(messages=[[system_message, human_message]])
# Extract the reformulated query from the response
reformulated_query = response.generations[0][0].text.strip()
# Return the original query if the reformulation is empty
if not reformulated_query:
return user_query
return reformulated_query
except Exception as e:
# Log the error and return the original query
print(f"Error reformulating query: {e}")
return user_query
@staticmethod
async def langchain_chat_history_to_str(chat_history: list[Any]) -> str:
"""
Convert a list of chat history messages to a string.
"""
chat_history_str = "<chat_history>\n"
for chat_message in chat_history:
if isinstance(chat_message, HumanMessage):
chat_history_str += f"<user>{chat_message.content}</user>\n"
elif isinstance(chat_message, AIMessage):
chat_history_str += f"<assistant>{chat_message.content}</assistant>\n"
elif isinstance(chat_message, SystemMessage):
chat_history_str += f"<system>{chat_message.content}</system>\n"
chat_history_str += "</chat_history>"
return chat_history_str

View file

@ -270,7 +270,8 @@ async def stream_new_chat(
# Track if we just finished a tool (text flows silently after tools)
just_finished_tool: bool = False
# Track write_todos calls to show "Creating plan" vs "Updating plan"
write_todos_call_count: int = 0
# Disabled for now
# write_todos_call_count: int = 0
def next_thinking_step_id() -> str:
nonlocal thinking_step_counter
@ -479,60 +480,60 @@ async def stream_new_chat(
status="in_progress",
items=last_active_step_items,
)
elif tool_name == "write_todos":
# Track write_todos calls for better messaging
write_todos_call_count += 1
todos = (
tool_input.get("todos", [])
if isinstance(tool_input, dict)
else []
)
todo_count = len(todos) if isinstance(todos, list) else 0
# elif tool_name == "write_todos": # Disabled for now
# # Track write_todos calls for better messaging
# write_todos_call_count += 1
# todos = (
# tool_input.get("todos", [])
# if isinstance(tool_input, dict)
# else []
# )
# todo_count = len(todos) if isinstance(todos, list) else 0
if write_todos_call_count == 1:
# First call - creating the plan
last_active_step_title = "Creating plan"
last_active_step_items = [f"Defining {todo_count} tasks..."]
else:
# Subsequent calls - updating the plan
# Try to provide context about what's being updated
in_progress_count = (
sum(
1
for t in todos
if isinstance(t, dict)
and t.get("status") == "in_progress"
)
if isinstance(todos, list)
else 0
)
completed_count = (
sum(
1
for t in todos
if isinstance(t, dict)
and t.get("status") == "completed"
)
if isinstance(todos, list)
else 0
)
# if write_todos_call_count == 1:
# # First call - creating the plan
# last_active_step_title = "Creating plan"
# last_active_step_items = [f"Defining {todo_count} tasks..."]
# else:
# # Subsequent calls - updating the plan
# # Try to provide context about what's being updated
# in_progress_count = (
# sum(
# 1
# for t in todos
# if isinstance(t, dict)
# and t.get("status") == "in_progress"
# )
# if isinstance(todos, list)
# else 0
# )
# completed_count = (
# sum(
# 1
# for t in todos
# if isinstance(t, dict)
# and t.get("status") == "completed"
# )
# if isinstance(todos, list)
# else 0
# )
last_active_step_title = "Updating progress"
last_active_step_items = (
[
f"Progress: {completed_count}/{todo_count} completed",
f"In progress: {in_progress_count} tasks",
]
if completed_count > 0
else [f"Working on {todo_count} tasks"]
)
# last_active_step_title = "Updating progress"
# last_active_step_items = (
# [
# f"Progress: {completed_count}/{todo_count} completed",
# f"In progress: {in_progress_count} tasks",
# ]
# if completed_count > 0
# else [f"Working on {todo_count} tasks"]
# )
yield streaming_service.format_thinking_step(
step_id=tool_step_id,
title=last_active_step_title,
status="in_progress",
items=last_active_step_items,
)
# yield streaming_service.format_thinking_step(
# step_id=tool_step_id,
# title=last_active_step_title,
# status="in_progress",
# items=last_active_step_items,
# )
elif tool_name == "generate_podcast":
podcast_title = (
tool_input.get("podcast_title", "SurfSense Podcast")
@ -596,10 +597,12 @@ async def stream_new_chat(
raw_output = event.get("data", {}).get("output", "")
# Handle deepagents' write_todos Command object specially
if tool_name == "write_todos" and hasattr(raw_output, "update"):
# deepagents returns a Command object - extract todos directly
tool_output = extract_todos_from_deepagents(raw_output)
elif hasattr(raw_output, "content"):
# Disabled for now
# if tool_name == "write_todos" and hasattr(raw_output, "update"):
# # deepagents returns a Command object - extract todos directly
# tool_output = extract_todos_from_deepagents(raw_output)
# elif hasattr(raw_output, "content"):
if hasattr(raw_output, "content"):
# It's a ToolMessage object - extract the content
content = raw_output.content
# If content is a string that looks like JSON, try to parse it
@ -758,63 +761,63 @@ async def stream_new_chat(
status="completed",
items=completed_items,
)
elif tool_name == "write_todos":
# Build completion items for planning/updating
if isinstance(tool_output, dict):
todos = tool_output.get("todos", [])
todo_count = len(todos) if isinstance(todos, list) else 0
completed_count = (
sum(
1
for t in todos
if isinstance(t, dict)
and t.get("status") == "completed"
)
if isinstance(todos, list)
else 0
)
in_progress_count = (
sum(
1
for t in todos
if isinstance(t, dict)
and t.get("status") == "in_progress"
)
if isinstance(todos, list)
else 0
)
# elif tool_name == "write_todos": # Disabled for now
# # Build completion items for planning/updating
# if isinstance(tool_output, dict):
# todos = tool_output.get("todos", [])
# todo_count = len(todos) if isinstance(todos, list) else 0
# completed_count = (
# sum(
# 1
# for t in todos
# if isinstance(t, dict)
# and t.get("status") == "completed"
# )
# if isinstance(todos, list)
# else 0
# )
# in_progress_count = (
# sum(
# 1
# for t in todos
# if isinstance(t, dict)
# and t.get("status") == "in_progress"
# )
# if isinstance(todos, list)
# else 0
# )
# Use context-aware completion message
if last_active_step_title == "Creating plan":
completed_items = [f"Created {todo_count} tasks"]
else:
# Updating progress - show stats
completed_items = [
f"Progress: {completed_count}/{todo_count} completed",
]
if in_progress_count > 0:
# Find the currently in-progress task name
in_progress_task = next(
(
t.get("content", "")[:40]
for t in todos
if isinstance(t, dict)
and t.get("status") == "in_progress"
),
None,
)
if in_progress_task:
completed_items.append(
f"Current: {in_progress_task}..."
)
else:
completed_items = ["Plan updated"]
yield streaming_service.format_thinking_step(
step_id=original_step_id,
title=last_active_step_title,
status="completed",
items=completed_items,
)
# # Use context-aware completion message
# if last_active_step_title == "Creating plan":
# completed_items = [f"Created {todo_count} tasks"]
# else:
# # Updating progress - show stats
# completed_items = [
# f"Progress: {completed_count}/{todo_count} completed",
# ]
# if in_progress_count > 0:
# # Find the currently in-progress task name
# in_progress_task = next(
# (
# t.get("content", "")[:40]
# for t in todos
# if isinstance(t, dict)
# and t.get("status") == "in_progress"
# ),
# None,
# )
# if in_progress_task:
# completed_items.append(
# f"Current: {in_progress_task}..."
# )
# else:
# completed_items = ["Plan updated"]
# yield streaming_service.format_thinking_step(
# step_id=original_step_id,
# title=last_active_step_title,
# status="completed",
# items=completed_items,
# )
elif tool_name == "ls":
# Build completion items showing file names found
if isinstance(tool_output, dict):
@ -992,27 +995,27 @@ async def stream_new_chat(
yield streaming_service.format_terminal_info(
"Knowledge base search completed", "success"
)
elif tool_name == "write_todos":
# Stream the full write_todos result so frontend can render the Plan component
yield streaming_service.format_tool_output_available(
tool_call_id,
tool_output
if isinstance(tool_output, dict)
else {"result": tool_output},
)
# Send terminal message with plan info
if isinstance(tool_output, dict):
todos = tool_output.get("todos", [])
todo_count = len(todos) if isinstance(todos, list) else 0
yield streaming_service.format_terminal_info(
f"Plan created ({todo_count} tasks)",
"success",
)
else:
yield streaming_service.format_terminal_info(
"Plan created",
"success",
)
# elif tool_name == "write_todos": # Disabled for now
# # Stream the full write_todos result so frontend can render the Plan component
# yield streaming_service.format_tool_output_available(
# tool_call_id,
# tool_output
# if isinstance(tool_output, dict)
# else {"result": tool_output},
# )
# # Send terminal message with plan info
# if isinstance(tool_output, dict):
# todos = tool_output.get("todos", [])
# todo_count = len(todos) if isinstance(todos, list) else 0
# yield streaming_service.format_terminal_info(
# f"Plan created ({todo_count} tasks)",
# "success",
# )
# else:
# yield streaming_service.format_terminal_info(
# "Plan created",
# "success",
# )
else:
# Default handling for other tools
yield streaming_service.format_tool_output_available(

View file

@ -6,10 +6,8 @@ from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config
from app.connectors.airtable_connector import AirtableConnector
from app.connectors.airtable_history import AirtableHistoryConnector
from app.db import Document, DocumentType, SearchSourceConnectorType
from app.routes.airtable_add_connector_route import refresh_airtable_token
from app.schemas.airtable_auth_credentials import AirtableAuthCredentialsBase
from app.services.llm_service import get_user_long_context_llm
from app.services.task_logging_service import TaskLoggingService
from app.utils.document_converters import (
@ -84,31 +82,11 @@ async def index_airtable_records(
)
return 0, f"Connector with ID {connector_id} not found"
# Create credentials from connector config
config_data = connector.config
try:
credentials = AirtableAuthCredentialsBase.from_dict(config_data)
except Exception as e:
await task_logger.log_task_failure(
log_entry,
f"Invalid Airtable credentials in connector {connector_id}",
str(e),
{"error_type": "InvalidCredentials"},
)
return 0, f"Invalid Airtable credentials: {e!s}"
# Check if credentials are expired
if credentials.is_expired:
await task_logger.log_task_failure(
log_entry,
f"Airtable credentials expired for connector {connector_id}",
"Credentials expired",
{"error_type": "ExpiredCredentials"},
)
connector = await refresh_airtable_token(session, connector)
# return 0, "Airtable credentials have expired. Please re-authenticate."
# Normalize "undefined" strings to None (from frontend)
if start_date == "undefined" or start_date == "":
start_date = None
if end_date == "undefined" or end_date == "":
end_date = None
# Calculate date range for indexing
start_date_str, end_date_str = calculate_date_range(
@ -120,8 +98,9 @@ async def index_airtable_records(
f"from {start_date_str} to {end_date_str}"
)
# Initialize Airtable connector
airtable_connector = AirtableConnector(credentials)
# Initialize Airtable history connector with auto-refresh capability
airtable_history = AirtableHistoryConnector(session, connector_id)
airtable_connector = await airtable_history._get_connector()
total_processed = 0
try:
@ -413,47 +392,56 @@ async def index_airtable_records(
documents_skipped += 1
continue # Skip this message and continue with others
# Update the last_indexed_at timestamp for the connector only if requested
total_processed = documents_indexed
if total_processed > 0:
await update_connector_last_indexed(
session, connector, update_last_indexed
)
# Accumulate total processed across all tables
total_processed += documents_indexed
# Final commit for any remaining documents not yet committed in batches
logger.info(
f"Final commit: Total {documents_indexed} Airtable records processed"
)
await session.commit()
logger.info(
"Successfully committed all Airtable document changes to database"
)
if documents_indexed > 0:
logger.info(
f"Final commit for table {table_name}: {documents_indexed} Airtable records processed"
)
await session.commit()
logger.info(
f"Successfully committed all Airtable document changes for table {table_name}"
)
# Log success
await task_logger.log_task_success(
log_entry,
f"Successfully completed Airtable indexing for connector {connector_id}",
{
"events_processed": total_processed,
"documents_indexed": documents_indexed,
"documents_skipped": documents_skipped,
"skipped_messages_count": len(skipped_messages),
},
)
# Update the last_indexed_at timestamp for the connector only if requested
# (after all tables in all bases are processed)
if total_processed > 0:
await update_connector_last_indexed(
session, connector, update_last_indexed
)
logger.info(
f"Airtable indexing completed: {documents_indexed} new records, {documents_skipped} skipped"
)
return (
total_processed,
None,
) # Return None as the error message to indicate success
# Log success after processing all bases and tables
await task_logger.log_task_success(
log_entry,
f"Successfully completed Airtable indexing for connector {connector_id}",
{
"events_processed": total_processed,
"documents_indexed": total_processed,
},
)
logger.info(
f"Airtable indexing completed: {total_processed} total records processed"
)
return (
total_processed,
None,
) # Return None as the error message to indicate success
except Exception as e:
logger.error(
f"Fetching Airtable bases for connector {connector_id} failed: {e!s}",
exc_info=True,
)
await task_logger.log_task_failure(
log_entry,
f"Failed to fetch Airtable bases for connector {connector_id}",
str(e),
{"error_type": type(e).__name__},
)
return 0, f"Failed to fetch Airtable bases: {e!s}"
except SQLAlchemyError as db_error:
await session.rollback()

View file

@ -2,13 +2,14 @@
ClickUp connector indexer.
"""
import contextlib
from datetime import datetime
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config
from app.connectors.clickup_connector import ClickUpConnector
from app.connectors.clickup_history import ClickUpHistoryConnector
from app.db import Document, DocumentType, SearchSourceConnectorType
from app.services.llm_service import get_user_long_context_llm
from app.services.task_logging_service import TaskLoggingService
@ -82,26 +83,30 @@ async def index_clickup_tasks(
)
return 0, error_msg
# Extract ClickUp configuration
clickup_api_token = connector.config.get("CLICKUP_API_TOKEN")
# Check if using OAuth (has access_token in config) or legacy (has CLICKUP_API_TOKEN)
has_oauth = connector.config.get("access_token") is not None
has_legacy = connector.config.get("CLICKUP_API_TOKEN") is not None
if not clickup_api_token:
error_msg = "ClickUp API token not found in connector configuration"
if not has_oauth and not has_legacy:
error_msg = "ClickUp credentials not found in connector configuration (neither OAuth nor API token)"
await task_logger.log_task_failure(
log_entry,
f"ClickUp API token not found in connector config for connector {connector_id}",
"Missing ClickUp token",
{"error_type": "MissingToken"},
f"ClickUp credentials not found in connector config for connector {connector_id}",
"Missing ClickUp credentials",
{"error_type": "MissingCredentials"},
)
return 0, error_msg
await task_logger.log_task_progress(
log_entry,
f"Initializing ClickUp client for connector {connector_id}",
f"Initializing ClickUp client for connector {connector_id} ({'OAuth' if has_oauth else 'API Token'})",
{"stage": "client_initialization"},
)
clickup_client = ClickUpConnector(api_token=clickup_api_token)
# Use history connector which supports both OAuth and legacy API tokens
clickup_client = ClickUpHistoryConnector(
session=session, connector_id=connector_id
)
# Get authorized workspaces
await task_logger.log_task_progress(
@ -110,7 +115,7 @@ async def index_clickup_tasks(
{"stage": "workspace_fetching"},
)
workspaces_response = clickup_client.get_authorized_workspaces()
workspaces_response = await clickup_client.get_authorized_workspaces()
workspaces = workspaces_response.get("teams", [])
if not workspaces:
@ -141,7 +146,7 @@ async def index_clickup_tasks(
# Fetch tasks for date range if provided
if start_date and end_date:
tasks, error = clickup_client.get_tasks_in_date_range(
tasks, error = await clickup_client.get_tasks_in_date_range(
workspace_id=workspace_id,
start_date=start_date,
end_date=end_date,
@ -153,7 +158,7 @@ async def index_clickup_tasks(
)
continue
else:
tasks = clickup_client.get_workspace_tasks(
tasks = await clickup_client.get_workspace_tasks(
workspace_id=workspace_id, include_closed=True
)
@ -393,10 +398,21 @@ async def index_clickup_tasks(
logger.info(
f"clickup indexing completed: {documents_indexed} new tasks, {documents_skipped} skipped"
)
# Close client connection
try:
await clickup_client.close()
except Exception as e:
logger.warning(f"Error closing ClickUp client: {e!s}")
return total_processed, None
except SQLAlchemyError as db_error:
await session.rollback()
# Clean up the connector in case of error
if "clickup_client" in locals():
with contextlib.suppress(Exception):
await clickup_client.close()
await task_logger.log_task_failure(
log_entry,
f"Database error during ClickUp indexing for connector {connector_id}",
@ -407,6 +423,10 @@ async def index_clickup_tasks(
return 0, f"Database error: {db_error!s}"
except Exception as e:
await session.rollback()
# Clean up the connector in case of error
if "clickup_client" in locals():
with contextlib.suppress(Exception):
await clickup_client.close()
await task_logger.log_task_failure(
log_entry,
f"Failed to index ClickUp tasks for connector {connector_id}",

View file

@ -2,13 +2,14 @@
Confluence connector indexer.
"""
import contextlib
from datetime import datetime
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config
from app.connectors.confluence_connector import ConfluenceConnector
from app.connectors.confluence_history import ConfluenceHistoryConnector
from app.db import Document, DocumentType, SearchSourceConnectorType
from app.services.llm_service import get_user_long_context_llm
from app.services.task_logging_service import TaskLoggingService
@ -83,31 +84,18 @@ async def index_confluence_pages(
)
return 0, f"Connector with ID {connector_id} not found"
# Get the Confluence credentials from the connector config
confluence_email = connector.config.get("CONFLUENCE_EMAIL")
confluence_api_token = connector.config.get("CONFLUENCE_API_TOKEN")
confluence_base_url = connector.config.get("CONFLUENCE_BASE_URL")
if not confluence_email or not confluence_api_token or not confluence_base_url:
await task_logger.log_task_failure(
log_entry,
f"Confluence credentials not found in connector config for connector {connector_id}",
"Missing Confluence credentials",
{"error_type": "MissingCredentials"},
)
return 0, "Confluence credentials not found in connector config"
# Initialize Confluence client
# Initialize Confluence OAuth client
await task_logger.log_task_progress(
log_entry,
f"Initializing Confluence client for connector {connector_id}",
f"Initializing Confluence OAuth client for connector {connector_id}",
{"stage": "client_initialization"},
)
confluence_client = ConfluenceConnector(
base_url=confluence_base_url,
email=confluence_email,
api_token=confluence_api_token,
confluence_client: ConfluenceHistoryConnector | None = (
ConfluenceHistoryConnector(
session=session,
connector_id=connector_id,
)
)
# Calculate date range
@ -127,7 +115,7 @@ async def index_confluence_pages(
# Get pages within date range
try:
pages, error = confluence_client.get_pages_by_date_range(
pages, error = await confluence_client.get_pages_by_date_range(
start_date=start_date_str, end_date=end_date_str, include_comments=True
)
@ -153,6 +141,10 @@ async def index_confluence_pages(
f"No Confluence pages found in date range {start_date_str} to {end_date_str}",
{"pages_found": 0},
)
# Close client before returning
if confluence_client:
with contextlib.suppress(Exception):
await confluence_client.close()
return 0, None
else:
await task_logger.log_task_failure(
@ -161,12 +153,20 @@ async def index_confluence_pages(
"API Error",
{"error_type": "APIError"},
)
# Close client on error
if confluence_client:
with contextlib.suppress(Exception):
await confluence_client.close()
return 0, f"Failed to get Confluence pages: {error}"
logger.info(f"Retrieved {len(pages)} pages from Confluence API")
except Exception as e:
logger.error(f"Error fetching Confluence pages: {e!s}", exc_info=True)
# Close client on error
if confluence_client:
with contextlib.suppress(Exception):
await confluence_client.close()
return 0, f"Error fetching Confluence pages: {e!s}"
# Process and index each page
@ -418,6 +418,11 @@ async def index_confluence_pages(
logger.info(
f"Confluence indexing completed: {documents_indexed} new pages, {documents_skipped} skipped"
)
# Close the client connection
if confluence_client:
await confluence_client.close()
return (
total_processed,
None,
@ -425,6 +430,10 @@ async def index_confluence_pages(
except SQLAlchemyError as db_error:
await session.rollback()
# Close client if it exists
if confluence_client:
with contextlib.suppress(Exception):
await confluence_client.close()
await task_logger.log_task_failure(
log_entry,
f"Database error during Confluence indexing for connector {connector_id}",
@ -435,6 +444,10 @@ async def index_confluence_pages(
return 0, f"Database error: {db_error!s}"
except Exception as e:
await session.rollback()
# Close client if it exists
if confluence_client:
with contextlib.suppress(Exception):
await confluence_client.close()
await task_logger.log_task_failure(
log_entry,
f"Failed to index Confluence pages for connector {connector_id}",

View file

@ -8,19 +8,18 @@ from datetime import UTC, datetime, timedelta
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config
from app.connectors.discord_connector import DiscordConnector
from app.db import Document, DocumentType, SearchSourceConnectorType
from app.services.llm_service import get_user_long_context_llm
from app.services.task_logging_service import TaskLoggingService
from app.utils.document_converters import (
create_document_chunks,
generate_content_hash,
generate_document_summary,
generate_unique_identifier_hash,
)
from .base import (
build_document_metadata_string,
build_document_metadata_markdown,
check_document_by_unique_identifier,
get_connector_by_id,
get_current_timestamp,
@ -69,6 +68,14 @@ async def index_discord_messages(
)
try:
# Normalize date parameters - handle 'undefined' strings from frontend
if start_date and (
start_date.lower() == "undefined" or start_date.strip() == ""
):
start_date = None
if end_date and (end_date.lower() == "undefined" or end_date.strip() == ""):
end_date = None
# Get the connector
await task_logger.log_task_progress(
log_entry,
@ -92,27 +99,55 @@ async def index_discord_messages(
f"Connector with ID {connector_id} not found or is not a Discord connector",
)
# Get the Discord token from the connector config
discord_token = connector.config.get("DISCORD_BOT_TOKEN")
if not discord_token:
await task_logger.log_task_failure(
log_entry,
f"Discord token not found in connector config for connector {connector_id}",
"Missing Discord token",
{"error_type": "MissingToken"},
)
return 0, "Discord token not found in connector config"
logger.info(f"Starting Discord indexing for connector {connector_id}")
# Initialize Discord client
# Initialize Discord client with OAuth credentials support
await task_logger.log_task_progress(
log_entry,
f"Initializing Discord client for connector {connector_id}",
{"stage": "client_initialization"},
)
discord_client = DiscordConnector(token=discord_token)
# Check if using OAuth (has bot_token in config) or legacy (has DISCORD_BOT_TOKEN)
has_oauth = connector.config.get("bot_token") is not None
has_legacy = connector.config.get("DISCORD_BOT_TOKEN") is not None
if has_oauth:
# Use OAuth credentials with auto-refresh
discord_client = DiscordConnector(
session=session, connector_id=connector_id
)
elif has_legacy:
# Backward compatibility: use legacy token format
discord_token = connector.config.get("DISCORD_BOT_TOKEN")
# Decrypt token if it's encrypted (legacy tokens might be encrypted)
token_encrypted = connector.config.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY and discord_token:
try:
from app.utils.oauth_security import TokenEncryption
token_encryption = TokenEncryption(config.SECRET_KEY)
discord_token = token_encryption.decrypt_token(discord_token)
logger.info(
f"Decrypted legacy Discord token for connector {connector_id}"
)
except Exception as e:
logger.warning(
f"Failed to decrypt legacy Discord token for connector {connector_id}: {e!s}. "
"Trying to use token as-is (might be unencrypted)."
)
# Continue with token as-is - might be unencrypted legacy token
discord_client = DiscordConnector(token=discord_token)
else:
await task_logger.log_task_failure(
log_entry,
f"Discord credentials not found in connector config for connector {connector_id}",
"Missing Discord credentials",
{"error_type": "MissingCredentials"},
)
return 0, "Discord credentials not found in connector config"
# Calculate date range
if start_date is None or end_date is None:
@ -135,32 +170,71 @@ async def index_discord_messages(
if start_date is None:
start_date_iso = calculated_start_date.isoformat()
else:
# Convert YYYY-MM-DD to ISO format
# Validate and convert YYYY-MM-DD to ISO format
try:
start_date_iso = (
datetime.strptime(start_date, "%Y-%m-%d")
.replace(tzinfo=UTC)
.isoformat()
)
except ValueError as e:
logger.warning(
f"Invalid start_date format '{start_date}', using calculated start date: {e!s}"
)
start_date_iso = calculated_start_date.isoformat()
if end_date is None:
end_date_iso = calculated_end_date.isoformat()
else:
# Validate and convert YYYY-MM-DD to ISO format
try:
end_date_iso = (
datetime.strptime(end_date, "%Y-%m-%d")
.replace(tzinfo=UTC)
.isoformat()
)
except ValueError as e:
logger.warning(
f"Invalid end_date format '{end_date}', using calculated end date: {e!s}"
)
end_date_iso = calculated_end_date.isoformat()
else:
# Convert provided dates to ISO format for Discord API
try:
start_date_iso = (
datetime.strptime(start_date, "%Y-%m-%d")
.replace(tzinfo=UTC)
.isoformat()
)
except ValueError as e:
await task_logger.log_task_failure(
log_entry,
f"Invalid start_date format: {start_date}",
f"Date parsing error: {e!s}",
{"error_type": "InvalidDateFormat", "start_date": start_date},
)
return (
0,
f"Invalid start_date format: {start_date}. Expected YYYY-MM-DD format.",
)
if end_date is None:
end_date_iso = calculated_end_date.isoformat()
else:
# Convert YYYY-MM-DD to ISO format
try:
end_date_iso = (
datetime.strptime(end_date, "%Y-%m-%d")
.replace(tzinfo=UTC)
.isoformat()
)
else:
# Convert provided dates to ISO format for Discord API
start_date_iso = (
datetime.strptime(start_date, "%Y-%m-%d")
.replace(tzinfo=UTC)
.isoformat()
)
end_date_iso = (
datetime.strptime(end_date, "%Y-%m-%d").replace(tzinfo=UTC).isoformat()
)
except ValueError as e:
await task_logger.log_task_failure(
log_entry,
f"Invalid end_date format: {end_date}",
f"Date parsing error: {e!s}",
{"error_type": "InvalidDateFormat", "end_date": end_date},
)
return (
0,
f"Invalid end_date format: {end_date}. Expected YYYY-MM-DD format.",
)
logger.info(
f"Indexing Discord messages from {start_date_iso} to {end_date_iso}"
@ -271,207 +345,163 @@ async def index_discord_messages(
documents_skipped += 1
continue
# Convert messages to markdown format
channel_content = (
f"# Discord Channel: {guild_name} / {channel_name}\n\n"
)
# Process each message as an individual document (like Slack)
for msg in formatted_messages:
user_name = msg.get("author_name", "Unknown User")
timestamp = msg.get("created_at", "Unknown Time")
text = msg.get("content", "")
channel_content += (
f"## {user_name} ({timestamp})\n\n{text}\n\n---\n\n"
msg_id = msg.get("id", "")
msg_user_name = msg.get("author_name", "Unknown User")
msg_timestamp = msg.get("created_at", "Unknown Time")
msg_text = msg.get("content", "")
# Format document metadata (similar to Slack)
metadata_sections = [
(
"METADATA",
[
f"GUILD_NAME: {guild_name}",
f"GUILD_ID: {guild_id}",
f"CHANNEL_NAME: {channel_name}",
f"CHANNEL_ID: {channel_id}",
f"MESSAGE_TIMESTAMP: {msg_timestamp}",
f"MESSAGE_USER_NAME: {msg_user_name}",
],
),
(
"CONTENT",
[
"FORMAT: markdown",
"TEXT_START",
msg_text,
"TEXT_END",
],
),
]
# Build the document string
combined_document_string = build_document_metadata_markdown(
metadata_sections
)
# Metadata sections
metadata_sections = [
(
"METADATA",
[
f"GUILD_NAME: {guild_name}",
f"GUILD_ID: {guild_id}",
f"CHANNEL_NAME: {channel_name}",
f"CHANNEL_ID: {channel_id}",
f"MESSAGE_COUNT: {len(formatted_messages)}",
],
),
(
"CONTENT",
[
"FORMAT: markdown",
"TEXT_START",
channel_content,
"TEXT_END",
],
),
]
# Generate unique identifier hash for this Discord message
unique_identifier = f"{channel_id}_{msg_id}"
unique_identifier_hash = generate_unique_identifier_hash(
DocumentType.DISCORD_CONNECTOR,
unique_identifier,
search_space_id,
)
combined_document_string = build_document_metadata_string(
metadata_sections
)
# Generate content hash
content_hash = generate_content_hash(
combined_document_string, search_space_id
)
# Generate unique identifier hash for this Discord channel
unique_identifier_hash = generate_unique_identifier_hash(
DocumentType.DISCORD_CONNECTOR, channel_id, search_space_id
)
# Generate content hash
content_hash = generate_content_hash(
combined_document_string, search_space_id
)
# Check if document with this unique identifier already exists
existing_document = await check_document_by_unique_identifier(
session, unique_identifier_hash
)
if existing_document:
# Document exists - check if content has changed
if existing_document.content_hash == content_hash:
logger.info(
f"Document for Discord channel {guild_name}#{channel_name} unchanged. Skipping."
)
documents_skipped += 1
continue
else:
# Content has changed - update the existing document
logger.info(
f"Content changed for Discord channel {guild_name}#{channel_name}. Updating document."
# Check if document with this unique identifier already exists
existing_document = (
await check_document_by_unique_identifier(
session, unique_identifier_hash
)
)
# Get user's long context LLM
user_llm = await get_user_long_context_llm(
session, user_id, search_space_id
)
if not user_llm:
logger.error(
f"No long context LLM configured for user {user_id}"
)
skipped_channels.append(
f"{guild_name}#{channel_name} (no LLM configured)"
if existing_document:
# Document exists - check if content has changed
if existing_document.content_hash == content_hash:
logger.info(
f"Document for Discord message {msg_id} in {guild_name}#{channel_name} unchanged. Skipping."
)
documents_skipped += 1
continue
else:
# Content has changed - update the existing document
logger.info(
f"Content changed for Discord message {msg_id} in {guild_name}#{channel_name}. Updating document."
)
# Generate summary with metadata
document_metadata = {
"guild_name": guild_name,
"channel_name": channel_name,
"message_count": len(formatted_messages),
"document_type": "Discord Channel Messages",
"connector_type": "Discord",
}
(
summary_content,
summary_embedding,
) = await generate_document_summary(
combined_document_string,
user_llm,
document_metadata,
)
# Update chunks and embedding
chunks = await create_document_chunks(
combined_document_string
)
doc_embedding = (
config.embedding_model_instance.embed(
combined_document_string
)
)
# Chunks from channel content
chunks = await create_document_chunks(channel_content)
# Update existing document
existing_document.content = combined_document_string
existing_document.content_hash = content_hash
existing_document.embedding = doc_embedding
existing_document.document_metadata = {
"guild_name": guild_name,
"guild_id": guild_id,
"channel_name": channel_name,
"channel_id": channel_id,
"message_id": msg_id,
"message_timestamp": msg_timestamp,
"message_user_name": msg_user_name,
"indexed_at": datetime.now(UTC).strftime(
"%Y-%m-%d %H:%M:%S"
),
}
# Update existing document
existing_document.title = (
f"Discord - {guild_name}#{channel_name}"
)
existing_document.content = summary_content
existing_document.content_hash = content_hash
existing_document.embedding = summary_embedding
existing_document.document_metadata = {
# Delete old chunks and add new ones
existing_document.chunks = chunks
existing_document.updated_at = (
get_current_timestamp()
)
documents_indexed += 1
logger.info(
f"Successfully updated Discord message {msg_id}"
)
continue
# Document doesn't exist - create new one
# Process chunks
chunks = await create_document_chunks(
combined_document_string
)
doc_embedding = config.embedding_model_instance.embed(
combined_document_string
)
# Create and store new document
document = Document(
search_space_id=search_space_id,
title=f"Discord - {guild_name}#{channel_name}",
document_type=DocumentType.DISCORD_CONNECTOR,
document_metadata={
"guild_name": guild_name,
"guild_id": guild_id,
"channel_name": channel_name,
"channel_id": channel_id,
"message_count": len(formatted_messages),
"start_date": start_date_iso,
"end_date": end_date_iso,
"message_id": msg_id,
"message_timestamp": msg_timestamp,
"message_user_name": msg_user_name,
"indexed_at": datetime.now(UTC).strftime(
"%Y-%m-%d %H:%M:%S"
),
}
existing_document.chunks = chunks
existing_document.updated_at = get_current_timestamp()
},
content=combined_document_string,
embedding=doc_embedding,
chunks=chunks,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
updated_at=get_current_timestamp(),
)
documents_indexed += 1
session.add(document)
documents_indexed += 1
# Batch commit every 10 documents
if documents_indexed % 10 == 0:
logger.info(
f"Successfully updated Discord channel {guild_name}#{channel_name}"
f"Committing batch: {documents_indexed} Discord messages processed so far"
)
continue
await session.commit()
# Document doesn't exist - create new one
# Get user's long context LLM
user_llm = await get_user_long_context_llm(
session, user_id, search_space_id
)
if not user_llm:
logger.error(
f"No long context LLM configured for user {user_id}"
)
skipped_channels.append(
f"{guild_name}#{channel_name} (no LLM configured)"
)
documents_skipped += 1
continue
# Generate summary with metadata
document_metadata = {
"guild_name": guild_name,
"channel_name": channel_name,
"message_count": len(formatted_messages),
"document_type": "Discord Channel Messages",
"connector_type": "Discord",
}
(
summary_content,
summary_embedding,
) = await generate_document_summary(
combined_document_string, user_llm, document_metadata
)
# Chunks from channel content
chunks = await create_document_chunks(channel_content)
# Create and store new document
document = Document(
search_space_id=search_space_id,
title=f"Discord - {guild_name}#{channel_name}",
document_type=DocumentType.DISCORD_CONNECTOR,
document_metadata={
"guild_name": guild_name,
"guild_id": guild_id,
"channel_name": channel_name,
"channel_id": channel_id,
"message_count": len(formatted_messages),
"start_date": start_date_iso,
"end_date": end_date_iso,
"indexed_at": datetime.now(UTC).strftime(
"%Y-%m-%d %H:%M:%S"
),
},
content=summary_content,
content_hash=content_hash,
unique_identifier_hash=unique_identifier_hash,
embedding=summary_embedding,
chunks=chunks,
updated_at=get_current_timestamp(),
)
session.add(document)
documents_indexed += 1
logger.info(
f"Successfully indexed new channel {guild_name}#{channel_name} with {len(formatted_messages)} messages"
f"Successfully indexed channel {guild_name}#{channel_name} with {len(formatted_messages)} messages"
)
# Batch commit every 10 documents
if documents_indexed % 10 == 0:
logger.info(
f"Committing batch: {documents_indexed} Discord channels processed so far"
)
await session.commit()
except Exception as e:
logger.error(
f"Error processing guild {guild_name}: {e!s}", exc_info=True
@ -488,7 +518,7 @@ async def index_discord_messages(
# Final commit for any remaining documents not yet committed in batches
logger.info(
f"Final commit: Total {documents_indexed} Discord channels processed"
f"Final commit: Total {documents_indexed} Discord messages processed"
)
await session.commit()
@ -496,18 +526,18 @@ async def index_discord_messages(
result_message = None
if skipped_channels:
result_message = (
f"Processed {documents_indexed} channels. Skipped {len(skipped_channels)} channels: "
f"Processed {documents_indexed} messages. Skipped {len(skipped_channels)} channels: "
+ ", ".join(skipped_channels)
)
else:
result_message = f"Processed {documents_indexed} channels."
result_message = f"Processed {documents_indexed} messages."
# Log success
await task_logger.log_task_success(
log_entry,
f"Successfully completed Discord indexing for connector {connector_id}",
{
"channels_processed": documents_indexed,
"messages_processed": documents_indexed,
"documents_indexed": documents_indexed,
"documents_skipped": documents_skipped,
"skipped_channels_count": len(skipped_channels),
@ -517,7 +547,7 @@ async def index_discord_messages(
)
logger.info(
f"Discord indexing completed: {documents_indexed} new channels, {documents_skipped} skipped"
f"Discord indexing completed: {documents_indexed} new messages, {documents_skipped} skipped"
)
return documents_indexed, result_message

View file

@ -8,7 +8,6 @@ from google.oauth2.credentials import Credentials
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config
from app.connectors.google_calendar_connector import GoogleCalendarConnector
from app.db import Document, DocumentType, SearchSourceConnectorType
from app.services.llm_service import get_user_long_context_llm
@ -84,15 +83,52 @@ async def index_google_calendar_events(
return 0, f"Connector with ID {connector_id} not found"
# Get the Google Calendar credentials from the connector config
exp = connector.config.get("expiry").replace("Z", "")
config_data = connector.config
# Decrypt sensitive credentials if encrypted (for backward compatibility)
from app.config import config
from app.utils.oauth_security import TokenEncryption
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
try:
token_encryption = TokenEncryption(config.SECRET_KEY)
# Decrypt sensitive fields
if config_data.get("token"):
config_data["token"] = token_encryption.decrypt_token(
config_data["token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
if config_data.get("client_secret"):
config_data["client_secret"] = token_encryption.decrypt_token(
config_data["client_secret"]
)
logger.info(
f"Decrypted Google Calendar credentials for connector {connector_id}"
)
except Exception as e:
await task_logger.log_task_failure(
log_entry,
f"Failed to decrypt Google Calendar credentials for connector {connector_id}: {e!s}",
"Credential decryption failed",
{"error_type": "CredentialDecryptionError"},
)
return 0, f"Failed to decrypt Google Calendar credentials: {e!s}"
exp = config_data.get("expiry", "").replace("Z", "")
credentials = Credentials(
token=connector.config.get("token"),
refresh_token=connector.config.get("refresh_token"),
token_uri=connector.config.get("token_uri"),
client_id=connector.config.get("client_id"),
client_secret=connector.config.get("client_secret"),
scopes=connector.config.get("scopes"),
expiry=datetime.fromisoformat(exp),
token=config_data.get("token"),
refresh_token=config_data.get("refresh_token"),
token_uri=config_data.get("token_uri"),
client_id=config_data.get("client_id"),
client_secret=config_data.get("client_secret"),
scopes=config_data.get("scopes"),
expiry=datetime.fromisoformat(exp) if exp else None,
)
if (
@ -122,6 +158,12 @@ async def index_google_calendar_events(
connector_id=connector_id,
)
# Handle 'undefined' string from frontend (treat as None)
if start_date == "undefined" or start_date == "":
start_date = None
if end_date == "undefined" or end_date == "":
end_date = None
# Calculate date range
if start_date is None or end_date is None:
# Fall back to calculating dates based on last_indexed_at

View file

@ -5,6 +5,7 @@ import logging
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config
from app.connectors.google_drive import (
GoogleDriveClient,
categorize_change,
@ -87,6 +88,26 @@ async def index_google_drive_files(
{"stage": "client_initialization"},
)
# Check if credentials are encrypted (only when explicitly marked)
token_encrypted = connector.config.get("_token_encrypted", False)
if token_encrypted:
# Credentials are explicitly marked as encrypted, will be decrypted during client initialization
if not config.SECRET_KEY:
await task_logger.log_task_failure(
log_entry,
f"SECRET_KEY not configured but credentials are marked as encrypted for connector {connector_id}",
"Missing SECRET_KEY for token decryption",
{"error_type": "MissingSecretKey"},
)
return (
0,
"SECRET_KEY not configured but credentials are marked as encrypted",
)
logger.info(
f"Google Drive credentials are encrypted for connector {connector_id}, will decrypt during client initialization"
)
# If _token_encrypted is False or not set, treat credentials as plaintext
drive_client = GoogleDriveClient(session, connector_id)
if not folder_id:
@ -249,6 +270,26 @@ async def index_google_drive_single_file(
{"stage": "client_initialization"},
)
# Check if credentials are encrypted (only when explicitly marked)
token_encrypted = connector.config.get("_token_encrypted", False)
if token_encrypted:
# Credentials are explicitly marked as encrypted, will be decrypted during client initialization
if not config.SECRET_KEY:
await task_logger.log_task_failure(
log_entry,
f"SECRET_KEY not configured but credentials are marked as encrypted for connector {connector_id}",
"Missing SECRET_KEY for token decryption",
{"error_type": "MissingSecretKey"},
)
return (
0,
"SECRET_KEY not configured but credentials are marked as encrypted",
)
logger.info(
f"Google Drive credentials are encrypted for connector {connector_id}, will decrypt during client initialization"
)
# If _token_encrypted is False or not set, treat credentials as plaintext
drive_client = GoogleDriveClient(session, connector_id)
# Fetch the file metadata

View file

@ -8,7 +8,6 @@ from google.oauth2.credentials import Credentials
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config
from app.connectors.google_gmail_connector import GoogleGmailConnector
from app.db import (
Document,
@ -88,9 +87,47 @@ async def index_google_gmail_messages(
)
return 0, error_msg
# Create credentials from connector config
# Get the Google Gmail credentials from the connector config
config_data = connector.config
exp = config_data.get("expiry").replace("Z", "")
# Decrypt sensitive credentials if encrypted (for backward compatibility)
from app.config import config
from app.utils.oauth_security import TokenEncryption
token_encrypted = config_data.get("_token_encrypted", False)
if token_encrypted and config.SECRET_KEY:
try:
token_encryption = TokenEncryption(config.SECRET_KEY)
# Decrypt sensitive fields
if config_data.get("token"):
config_data["token"] = token_encryption.decrypt_token(
config_data["token"]
)
if config_data.get("refresh_token"):
config_data["refresh_token"] = token_encryption.decrypt_token(
config_data["refresh_token"]
)
if config_data.get("client_secret"):
config_data["client_secret"] = token_encryption.decrypt_token(
config_data["client_secret"]
)
logger.info(
f"Decrypted Google Gmail credentials for connector {connector_id}"
)
except Exception as e:
await task_logger.log_task_failure(
log_entry,
f"Failed to decrypt Google Gmail credentials for connector {connector_id}: {e!s}",
"Credential decryption failed",
{"error_type": "CredentialDecryptionError"},
)
return 0, f"Failed to decrypt Google Gmail credentials: {e!s}"
exp = config_data.get("expiry", "")
if exp:
exp = exp.replace("Z", "")
credentials = Credentials(
token=config_data.get("token"),
refresh_token=config_data.get("refresh_token"),
@ -98,7 +135,7 @@ async def index_google_gmail_messages(
client_id=config_data.get("client_id"),
client_secret=config_data.get("client_secret"),
scopes=config_data.get("scopes", []),
expiry=datetime.fromisoformat(exp),
expiry=datetime.fromisoformat(exp) if exp else None,
)
if (

View file

@ -2,13 +2,14 @@
Jira connector indexer.
"""
import contextlib
from datetime import datetime
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config
from app.connectors.jira_connector import JiraConnector
from app.connectors.jira_history import JiraHistoryConnector
from app.db import Document, DocumentType, SearchSourceConnectorType
from app.services.llm_service import get_user_long_context_llm
from app.services.task_logging_service import TaskLoggingService
@ -83,32 +84,27 @@ async def index_jira_issues(
)
return 0, f"Connector with ID {connector_id} not found"
# Get the Jira credentials from the connector config
jira_email = connector.config.get("JIRA_EMAIL")
jira_api_token = connector.config.get("JIRA_API_TOKEN")
jira_base_url = connector.config.get("JIRA_BASE_URL")
if not jira_email or not jira_api_token or not jira_base_url:
await task_logger.log_task_failure(
log_entry,
f"Jira credentials not found in connector config for connector {connector_id}",
"Missing Jira credentials",
{"error_type": "MissingCredentials"},
)
return 0, "Jira credentials not found in connector config"
# Initialize Jira client
# Initialize Jira client with internal refresh capability
# Token refresh will happen automatically when needed
await task_logger.log_task_progress(
log_entry,
f"Initializing Jira client for connector {connector_id}",
{"stage": "client_initialization"},
)
jira_client = JiraConnector(
base_url=jira_base_url, email=jira_email, api_token=jira_api_token
)
logger.info(f"Initializing Jira client for connector {connector_id}")
# Create connector with session and connector_id for internal refresh
# Token refresh will happen automatically when needed
jira_client = JiraHistoryConnector(session=session, connector_id=connector_id)
# Calculate date range
# Handle "undefined" strings from frontend
if start_date == "undefined" or start_date == "":
start_date = None
if end_date == "undefined" or end_date == "":
end_date = None
start_date_str, end_date_str = calculate_date_range(
connector, start_date, end_date, default_days_back=365
)
@ -125,7 +121,7 @@ async def index_jira_issues(
# Get issues within date range
try:
issues, error = jira_client.get_issues_by_date_range(
issues, error = await jira_client.get_issues_by_date_range(
start_date=start_date_str, end_date=end_date_str, include_comments=True
)
@ -398,6 +394,10 @@ async def index_jira_issues(
logger.info(
f"JIRA indexing completed: {documents_indexed} new issues, {documents_skipped} skipped"
)
# Clean up the connector
await jira_client.close()
return (
total_processed,
None,
@ -412,6 +412,10 @@ async def index_jira_issues(
{"error_type": "SQLAlchemyError"},
)
logger.error(f"Database error: {db_error!s}", exc_info=True)
# Clean up the connector in case of error
if "jira_client" in locals():
with contextlib.suppress(Exception):
await jira_client.close()
return 0, f"Database error: {db_error!s}"
except Exception as e:
await session.rollback()
@ -422,4 +426,8 @@ async def index_jira_issues(
{"error_type": type(e).__name__},
)
logger.error(f"Failed to index JIRA issues: {e!s}", exc_info=True)
# Clean up the connector in case of error
if "jira_client" in locals():
with contextlib.suppress(Exception):
await jira_client.close()
return 0, f"Failed to index JIRA issues: {e!s}"

View file

@ -92,25 +92,34 @@ async def index_linear_issues(
f"Connector with ID {connector_id} not found or is not a Linear connector",
)
# Get the Linear token from the connector config
linear_token = connector.config.get("LINEAR_API_KEY")
if not linear_token:
# Check if access_token exists (support both new OAuth format and old API key format)
if not connector.config.get("access_token") and not connector.config.get(
"LINEAR_API_KEY"
):
await task_logger.log_task_failure(
log_entry,
f"Linear API token not found in connector config for connector {connector_id}",
"Missing Linear token",
f"Linear access token not found in connector config for connector {connector_id}",
"Missing Linear access token",
{"error_type": "MissingToken"},
)
return 0, "Linear API token not found in connector config"
return 0, "Linear access token not found in connector config"
# Initialize Linear client
# Initialize Linear client with internal refresh capability
await task_logger.log_task_progress(
log_entry,
f"Initializing Linear client for connector {connector_id}",
{"stage": "client_initialization"},
)
linear_client = LinearConnector(token=linear_token)
# Create connector with session and connector_id for internal refresh
# Token refresh will happen automatically when needed
linear_client = LinearConnector(session=session, connector_id=connector_id)
# Handle 'undefined' string from frontend (treat as None)
if start_date == "undefined" or start_date == "":
start_date = None
if end_date == "undefined" or end_date == "":
end_date = None
# Calculate date range
start_date_str, end_date_str = calculate_date_range(
@ -131,7 +140,7 @@ async def index_linear_issues(
# Get issues within date range
try:
issues, error = linear_client.get_issues_by_date_range(
issues, error = await linear_client.get_issues_by_date_range(
start_date=start_date_str, end_date=end_date_str, include_comments=True
)

View file

@ -2,7 +2,7 @@
Notion connector indexer.
"""
from datetime import datetime, timedelta
from datetime import datetime
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.ext.asyncio import AsyncSession
@ -20,6 +20,7 @@ from app.utils.document_converters import (
from .base import (
build_document_metadata_string,
calculate_date_range,
check_document_by_unique_identifier,
get_connector_by_id,
get_current_timestamp,
@ -91,18 +92,19 @@ async def index_notion_pages(
f"Connector with ID {connector_id} not found or is not a Notion connector",
)
# Get the Notion token from the connector config
notion_token = connector.config.get("NOTION_INTEGRATION_TOKEN")
if not notion_token:
# Check if access_token exists (support both new OAuth format and old integration token format)
if not connector.config.get("access_token") and not connector.config.get(
"NOTION_INTEGRATION_TOKEN"
):
await task_logger.log_task_failure(
log_entry,
f"Notion integration token not found in connector config for connector {connector_id}",
"Missing Notion token",
f"Notion access token not found in connector config for connector {connector_id}",
"Missing Notion access token",
{"error_type": "MissingToken"},
)
return 0, "Notion integration token not found in connector config"
return 0, "Notion access token not found in connector config"
# Initialize Notion client
# Initialize Notion client with internal refresh capability
await task_logger.log_task_progress(
log_entry,
f"Initializing Notion client for connector {connector_id}",
@ -111,40 +113,30 @@ async def index_notion_pages(
logger.info(f"Initializing Notion client for connector {connector_id}")
# Calculate date range
if start_date is None or end_date is None:
# Fall back to calculating dates
calculated_end_date = datetime.now()
calculated_start_date = calculated_end_date - timedelta(
days=365
) # Check for last 1 year of pages
# Handle 'undefined' string from frontend (treat as None)
if start_date == "undefined" or start_date == "":
start_date = None
if end_date == "undefined" or end_date == "":
end_date = None
# Use calculated dates if not provided
if start_date is None:
start_date_iso = calculated_start_date.strftime("%Y-%m-%dT%H:%M:%SZ")
else:
# Convert YYYY-MM-DD to ISO format
start_date_iso = datetime.strptime(start_date, "%Y-%m-%d").strftime(
"%Y-%m-%dT%H:%M:%SZ"
)
# Calculate date range using the shared utility function
start_date_str, end_date_str = calculate_date_range(
connector, start_date, end_date, default_days_back=365
)
if end_date is None:
end_date_iso = calculated_end_date.strftime("%Y-%m-%dT%H:%M:%SZ")
else:
# Convert YYYY-MM-DD to ISO format
end_date_iso = datetime.strptime(end_date, "%Y-%m-%d").strftime(
"%Y-%m-%dT%H:%M:%SZ"
)
else:
# Convert provided dates to ISO format for Notion API
start_date_iso = datetime.strptime(start_date, "%Y-%m-%d").strftime(
"%Y-%m-%dT%H:%M:%SZ"
)
end_date_iso = datetime.strptime(end_date, "%Y-%m-%d").strftime(
"%Y-%m-%dT%H:%M:%SZ"
)
# Convert YYYY-MM-DD to ISO format for Notion API
start_date_iso = datetime.strptime(start_date_str, "%Y-%m-%d").strftime(
"%Y-%m-%dT%H:%M:%SZ"
)
end_date_iso = datetime.strptime(end_date_str, "%Y-%m-%d").strftime(
"%Y-%m-%dT%H:%M:%SZ"
)
notion_client = NotionHistoryConnector(token=notion_token)
# Create connector with session and connector_id for internal refresh
# Token refresh will happen automatically when needed
notion_client = NotionHistoryConnector(
session=session, connector_id=connector_id
)
logger.info(f"Fetching Notion pages from {start_date_iso} to {end_date_iso}")

View file

@ -92,25 +92,24 @@ async def index_slack_messages(
f"Connector with ID {connector_id} not found or is not a Slack connector",
)
# Get the Slack token from the connector config
slack_token = connector.config.get("SLACK_BOT_TOKEN")
if not slack_token:
await task_logger.log_task_failure(
log_entry,
f"Slack token not found in connector config for connector {connector_id}",
"Missing Slack token",
{"error_type": "MissingToken"},
)
return 0, "Slack token not found in connector config"
# Note: Token handling is now done automatically by SlackHistory
# with auto-refresh support. We just need to pass session and connector_id.
# Initialize Slack client
# Initialize Slack client with auto-refresh support
await task_logger.log_task_progress(
log_entry,
f"Initializing Slack client for connector {connector_id}",
{"stage": "client_initialization"},
)
slack_client = SlackHistory(token=slack_token)
# Use the new pattern with session and connector_id for auto-refresh
slack_client = SlackHistory(session=session, connector_id=connector_id)
# Handle 'undefined' string from frontend (treat as None)
if start_date == "undefined" or start_date == "":
start_date = None
if end_date == "undefined" or end_date == "":
end_date = None
# Calculate date range
await task_logger.log_task_progress(
@ -141,7 +140,7 @@ async def index_slack_messages(
# Get all channels
try:
channels = slack_client.get_all_channels()
channels = await slack_client.get_all_channels()
except Exception as e:
await task_logger.log_task_failure(
log_entry,
@ -190,7 +189,7 @@ async def index_slack_messages(
continue
# Get messages for this channel
messages, error = slack_client.get_history_by_date_range(
messages, error = await slack_client.get_history_by_date_range(
channel_id=channel_id,
start_date=start_date_str,
end_date=end_date_str,
@ -223,7 +222,7 @@ async def index_slack_messages(
]:
continue
formatted_msg = slack_client.format_message(
formatted_msg = await slack_client.format_message(
msg, include_user_info=True
)
formatted_messages.append(formatted_msg)

View file

@ -0,0 +1,189 @@
"""
Connector Naming Utilities.
Provides functions for generating unique, user-friendly connector names.
"""
from typing import Any
from urllib.parse import urlparse
from uuid import UUID
from sqlalchemy import func
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.db import SearchSourceConnector, SearchSourceConnectorType
# Friendly display names for connector types
BASE_NAME_FOR_TYPE = {
SearchSourceConnectorType.GOOGLE_GMAIL_CONNECTOR: "Gmail",
SearchSourceConnectorType.GOOGLE_DRIVE_CONNECTOR: "Google Drive",
SearchSourceConnectorType.GOOGLE_CALENDAR_CONNECTOR: "Google Calendar",
SearchSourceConnectorType.SLACK_CONNECTOR: "Slack",
SearchSourceConnectorType.NOTION_CONNECTOR: "Notion",
SearchSourceConnectorType.LINEAR_CONNECTOR: "Linear",
SearchSourceConnectorType.JIRA_CONNECTOR: "Jira",
SearchSourceConnectorType.DISCORD_CONNECTOR: "Discord",
SearchSourceConnectorType.CONFLUENCE_CONNECTOR: "Confluence",
SearchSourceConnectorType.AIRTABLE_CONNECTOR: "Airtable",
}
def get_base_name_for_type(connector_type: SearchSourceConnectorType) -> str:
"""Get a friendly display name for a connector type."""
return BASE_NAME_FOR_TYPE.get(
connector_type, connector_type.replace("_", " ").title()
)
def extract_identifier_from_credentials(
connector_type: SearchSourceConnectorType,
credentials: dict[str, Any],
) -> str | None:
"""
Extract a unique identifier from connector credentials.
Args:
connector_type: The type of connector
credentials: The connector credentials dict
Returns:
Identifier string (workspace name, email, etc.) or None
"""
if connector_type == SearchSourceConnectorType.SLACK_CONNECTOR:
return credentials.get("team_name")
if connector_type == SearchSourceConnectorType.NOTION_CONNECTOR:
return credentials.get("workspace_name")
if connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR:
return credentials.get("guild_name")
if connector_type in (
SearchSourceConnectorType.JIRA_CONNECTOR,
SearchSourceConnectorType.CONFLUENCE_CONNECTOR,
):
base_url = credentials.get("base_url", "")
if base_url:
try:
parsed = urlparse(base_url)
hostname = parsed.netloc or parsed.path
if ".atlassian.net" in hostname:
return hostname.replace(".atlassian.net", "")
return hostname
except Exception:
pass
return None
# Google, Linear, Airtable require API calls - return None
return None
def generate_connector_name_with_identifier(
connector_type: SearchSourceConnectorType,
identifier: str | None,
) -> str:
"""
Generate a connector name with an identifier.
Args:
connector_type: The type of connector
identifier: User identifier (email, workspace name, etc.)
Returns:
Name like "Gmail - john@example.com" or just "Gmail" if no identifier
"""
base = get_base_name_for_type(connector_type)
if identifier:
return f"{base} - {identifier}"
return base
async def count_connectors_of_type(
session: AsyncSession,
connector_type: SearchSourceConnectorType,
search_space_id: int,
user_id: UUID,
) -> int:
"""Count existing connectors of a type for a user in a search space."""
result = await session.execute(
select(func.count(SearchSourceConnector.id)).where(
SearchSourceConnector.connector_type == connector_type,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
)
)
return result.scalar() or 0
async def check_duplicate_connector(
session: AsyncSession,
connector_type: SearchSourceConnectorType,
search_space_id: int,
user_id: UUID,
identifier: str | None,
) -> bool:
"""
Check if a connector with the same identifier already exists.
Args:
session: Database session
connector_type: The type of connector
search_space_id: The search space ID
user_id: The user ID
identifier: User identifier (email, workspace name, etc.)
Returns:
True if a duplicate exists, False otherwise
"""
if not identifier:
return False
expected_name = f"{get_base_name_for_type(connector_type)} - {identifier}"
result = await session.execute(
select(func.count(SearchSourceConnector.id)).where(
SearchSourceConnector.connector_type == connector_type,
SearchSourceConnector.search_space_id == search_space_id,
SearchSourceConnector.user_id == user_id,
SearchSourceConnector.name == expected_name,
)
)
return (result.scalar() or 0) > 0
async def generate_unique_connector_name(
session: AsyncSession,
connector_type: SearchSourceConnectorType,
search_space_id: int,
user_id: UUID,
identifier: str | None = None,
) -> str:
"""
Generate a unique connector name.
If an identifier is provided (email, workspace name, etc.), uses it with base name.
Otherwise, falls back to counting existing connectors for uniqueness.
Args:
session: Database session
connector_type: The type of connector
search_space_id: The search space ID
user_id: The user ID
identifier: Optional user identifier (email, workspace name, etc.)
Returns:
Unique name like "Gmail - john@example.com" or "Gmail (2)"
"""
base = get_base_name_for_type(connector_type)
if identifier:
return f"{base} - {identifier}"
# Fallback: use counter for uniqueness
count = await count_connectors_of_type(
session, connector_type, search_space_id, user_id
)
if count == 0:
return base
return f"{base} ({count + 1})"

View file

@ -222,88 +222,6 @@ async def convert_document_to_markdown(elements):
return "".join(markdown_parts)
def convert_chunks_to_langchain_documents(chunks):
"""
Convert chunks from hybrid search results to LangChain Document objects.
Args:
chunks: List of chunk dictionaries from hybrid search results
Returns:
List of LangChain Document objects
"""
try:
from langchain_core.documents import Document as LangChainDocument
except ImportError:
raise ImportError(
"LangChain is not installed. Please install it with `pip install langchain langchain-core`"
) from None
langchain_docs = []
for chunk in chunks:
# Extract content from the chunk
content = chunk.get("content", "")
# Create metadata dictionary
metadata = {
"chunk_id": chunk.get("chunk_id"),
"score": chunk.get("score"),
"rank": chunk.get("rank") if "rank" in chunk else None,
}
# Add document information to metadata
if "document" in chunk:
doc = chunk["document"]
metadata.update(
{
"document_id": doc.get("id"),
"document_title": doc.get("title"),
"document_type": doc.get("document_type"),
}
)
# Add document metadata if available
if "metadata" in doc:
# Prefix document metadata keys to avoid conflicts
doc_metadata = {
f"doc_meta_{k}": v for k, v in doc.get("metadata", {}).items()
}
metadata.update(doc_metadata)
# Add source URL if available in metadata
if "url" in doc.get("metadata", {}):
metadata["source"] = doc["metadata"]["url"]
elif "sourceURL" in doc.get("metadata", {}):
metadata["source"] = doc["metadata"]["sourceURL"]
# Ensure source_id is set for citation purposes
# Use document_id as the source_id if available
if "document_id" in metadata:
metadata["source_id"] = metadata["document_id"]
# Update content for citation mode - format as XML with explicit source_id
new_content = f"""
<document>
<metadata>
<source_id>{metadata.get("source_id", metadata.get("document_id", "unknown"))}</source_id>
</metadata>
<content>
<text>
{content}
</text>
</content>
</document>
"""
# Create LangChain Document
langchain_doc = LangChainDocument(page_content=new_content, metadata=metadata)
langchain_docs.append(langchain_doc)
return langchain_docs
def generate_content_hash(content: str, search_space_id: int) -> str:
"""Generate SHA-256 hash for the given content combined with search space ID."""
combined_data = f"{search_space_id}:{content}"

View file

@ -0,0 +1,210 @@
"""
OAuth Security Utilities.
Provides secure state parameter generation/validation and token encryption
for OAuth 2.0 flows.
"""
import base64
import hashlib
import hmac
import json
import logging
import time
from uuid import UUID
from cryptography.fernet import Fernet
from fastapi import HTTPException
logger = logging.getLogger(__name__)
class OAuthStateManager:
"""Manages secure OAuth state parameters with HMAC signatures."""
def __init__(self, secret_key: str, max_age_seconds: int = 600):
"""
Initialize OAuth state manager.
Args:
secret_key: Secret key for HMAC signing (should be SECRET_KEY from config)
max_age_seconds: Maximum age of state parameter in seconds (default 10 minutes)
"""
if not secret_key:
raise ValueError("secret_key is required for OAuth state management")
self.secret_key = secret_key
self.max_age_seconds = max_age_seconds
def generate_secure_state(
self, space_id: int, user_id: UUID, **extra_fields
) -> str:
"""
Generate cryptographically signed state parameter.
Args:
space_id: The search space ID
user_id: The user ID
**extra_fields: Additional fields to include in state (e.g., code_verifier for PKCE)
Returns:
Base64-encoded state parameter with HMAC signature
"""
timestamp = int(time.time())
state_payload = {
"space_id": space_id,
"user_id": str(user_id),
"timestamp": timestamp,
}
# Add any extra fields (e.g., code_verifier for PKCE)
state_payload.update(extra_fields)
# Create signature
payload_str = json.dumps(state_payload, sort_keys=True)
signature = hmac.new(
self.secret_key.encode(),
payload_str.encode(),
hashlib.sha256,
).hexdigest()
# Include signature in state
state_payload["signature"] = signature
state_encoded = base64.urlsafe_b64encode(
json.dumps(state_payload).encode()
).decode()
return state_encoded
def validate_state(self, state: str) -> dict:
"""
Validate and decode state parameter with signature verification.
Args:
state: The state parameter from OAuth callback
Returns:
Decoded state data (space_id, user_id, timestamp)
Raises:
HTTPException: If state is invalid, expired, or tampered with
"""
try:
decoded = base64.urlsafe_b64decode(state.encode()).decode()
data = json.loads(decoded)
except Exception as e:
raise HTTPException(
status_code=400, detail=f"Invalid state format: {e!s}"
) from e
# Verify signature exists
signature = data.pop("signature", None)
if not signature:
raise HTTPException(status_code=400, detail="Missing state signature")
# Verify signature
payload_str = json.dumps(data, sort_keys=True)
expected_signature = hmac.new(
self.secret_key.encode(),
payload_str.encode(),
hashlib.sha256,
).hexdigest()
if not hmac.compare_digest(signature, expected_signature):
raise HTTPException(
status_code=400, detail="Invalid state signature - possible tampering"
)
# Verify timestamp (prevent replay attacks)
timestamp = data.get("timestamp", 0)
current_time = time.time()
age = current_time - timestamp
if age < 0:
raise HTTPException(status_code=400, detail="Invalid state timestamp")
if age > self.max_age_seconds:
raise HTTPException(
status_code=400,
detail="State parameter expired. Please try again.",
)
return data
class TokenEncryption:
"""Encrypt/decrypt sensitive OAuth tokens for storage."""
def __init__(self, secret_key: str):
"""
Initialize token encryption.
Args:
secret_key: Secret key for encryption (should be SECRET_KEY from config)
"""
if not secret_key:
raise ValueError("secret_key is required for token encryption")
# Derive Fernet key from secret using SHA256
# Note: In production, consider using HKDF for key derivation
key = base64.urlsafe_b64encode(hashlib.sha256(secret_key.encode()).digest())
try:
self.cipher = Fernet(key)
except Exception as e:
raise ValueError(f"Failed to initialize encryption cipher: {e!s}") from e
def encrypt_token(self, token: str) -> str:
"""
Encrypt a token for storage.
Args:
token: Plaintext token to encrypt
Returns:
Encrypted token string
"""
if not token:
return token
try:
return self.cipher.encrypt(token.encode()).decode()
except Exception as e:
logger.error(f"Failed to encrypt token: {e!s}")
raise ValueError(f"Token encryption failed: {e!s}") from e
def decrypt_token(self, encrypted_token: str) -> str:
"""
Decrypt a stored token.
Args:
encrypted_token: Encrypted token string
Returns:
Decrypted plaintext token
"""
if not encrypted_token:
return encrypted_token
try:
return self.cipher.decrypt(encrypted_token.encode()).decode()
except Exception as e:
logger.error(f"Failed to decrypt token: {e!s}")
raise ValueError(f"Token decryption failed: {e!s}") from e
def is_encrypted(self, token: str) -> bool:
"""
Check if a token appears to be encrypted.
Args:
token: Token string to check
Returns:
True if token appears encrypted, False otherwise
"""
if not token:
return False
# Encrypted tokens are base64-encoded and have specific format
# This is a heuristic check - encrypted tokens are longer and base64-like
try:
# Try to decode as base64
base64.urlsafe_b64decode(token.encode())
# If it's base64 and reasonably long, likely encrypted
return len(token) > 20
except Exception:
return False

View file

@ -513,11 +513,22 @@ def validate_connector_config(
],
"validators": {},
},
"SLACK_CONNECTOR": {"required": ["SLACK_BOT_TOKEN"], "validators": {}},
"NOTION_CONNECTOR": {
"required": ["NOTION_INTEGRATION_TOKEN"],
"validators": {},
},
# "SLACK_CONNECTOR": {
# "required": [], # OAuth uses bot_token (encrypted), legacy uses SLACK_BOT_TOKEN
# "optional": [
# "bot_token",
# "SLACK_BOT_TOKEN",
# "bot_user_id",
# "team_id",
# "team_name",
# "token_type",
# "expires_in",
# "expires_at",
# "scope",
# "_token_encrypted",
# ],
# "validators": {},
# },
"GITHUB_CONNECTOR": {
"required": ["GITHUB_PAT", "repo_full_names"],
"validators": {
@ -526,31 +537,21 @@ def validate_connector_config(
)
},
},
"LINEAR_CONNECTOR": {"required": ["LINEAR_API_KEY"], "validators": {}},
"DISCORD_CONNECTOR": {"required": ["DISCORD_BOT_TOKEN"], "validators": {}},
"JIRA_CONNECTOR": {
"required": ["JIRA_EMAIL", "JIRA_API_TOKEN", "JIRA_BASE_URL"],
"validators": {
"JIRA_EMAIL": lambda: validate_email_field("JIRA_EMAIL", "JIRA"),
"JIRA_BASE_URL": lambda: validate_url_field("JIRA_BASE_URL", "JIRA"),
},
},
"CONFLUENCE_CONNECTOR": {
"required": [
"CONFLUENCE_BASE_URL",
"CONFLUENCE_EMAIL",
"CONFLUENCE_API_TOKEN",
],
"validators": {
"CONFLUENCE_EMAIL": lambda: validate_email_field(
"CONFLUENCE_EMAIL", "Confluence"
),
"CONFLUENCE_BASE_URL": lambda: validate_url_field(
"CONFLUENCE_BASE_URL", "Confluence"
),
},
},
"CLICKUP_CONNECTOR": {"required": ["CLICKUP_API_TOKEN"], "validators": {}},
# "DISCORD_CONNECTOR": {"required": ["DISCORD_BOT_TOKEN"], "validators": {}},
# "JIRA_CONNECTOR": {
# "required": ["JIRA_EMAIL", "JIRA_API_TOKEN", "JIRA_BASE_URL"],
# "validators": {
# "JIRA_EMAIL": lambda: validate_email_field("JIRA_EMAIL", "JIRA"),
# "JIRA_BASE_URL": lambda: validate_url_field("JIRA_BASE_URL", "JIRA"),
# },
# },
# "CONFLUENCE_CONNECTOR": {
# "required": [
# "access_token",
# ],
# "validators": {},
# },
# "CLICKUP_CONNECTOR": {"required": ["CLICKUP_API_TOKEN"], "validators": {}},
# "GOOGLE_CALENDAR_CONNECTOR": {
# "required": ["token", "refresh_token", "token_uri", "client_id", "expiry", "scopes", "client_secret"],
# "validators": {},

View file

@ -1,8 +1,7 @@
[project]
name = "surf-new-backend"
version = "0.0.10"
version = "0.0.11"
description = "SurfSense Backend"
readme = "README.md"
requires-python = ">=3.12"
dependencies = [
"alembic>=1.13.0",
@ -153,3 +152,11 @@ line-ending = "auto"
known-first-party = ["app"]
force-single-line = false
combine-as-imports = true
[tool.setuptools.packages.find]
where = ["."]
include = ["app*", "alembic*"]
[build-system]
requires = ["setuptools>=61.0", "wheel"]
build-backend = "setuptools.build_meta"

View file

@ -6409,8 +6409,8 @@ wheels = [
[[package]]
name = "surf-new-backend"
version = "0.0.10"
source = { virtual = "." }
version = "0.0.11"
source = { editable = "." }
dependencies = [
{ name = "alembic" },
{ name = "asyncpg" },

View file

@ -1,7 +1,7 @@
{
"name": "surfsense_browser_extension",
"displayName": "Surfsense Browser Extension",
"version": "0.0.10",
"version": "0.0.11",
"description": "Extension to collect Browsing History for SurfSense.",
"author": "https://github.com/MODSetter",
"engines": {

View file

@ -17,6 +17,7 @@ import { activeSearchSpaceIdAtom } from "@/atoms/search-spaces/search-space-quer
import { DocumentUploadDialogProvider } from "@/components/assistant-ui/document-upload-popup";
import { DashboardBreadcrumb } from "@/components/dashboard-breadcrumb";
import { LanguageSwitcher } from "@/components/LanguageSwitcher";
import { OnboardingTour } from "@/components/onboarding-tour";
import { AppSidebarProvider } from "@/components/sidebar/AppSidebarProvider";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import { Separator } from "@/components/ui/separator";
@ -242,6 +243,7 @@ export function DashboardClientLayout({
return (
<DocumentUploadDialogProvider>
<OnboardingTour />
<SidebarProvider className="h-full overflow-hidden" open={open} onOpenChange={setOpen}>
{/* Use AppSidebarProvider which fetches user, search space, and recent chats */}
<AppSidebarProvider

View file

@ -25,6 +25,9 @@ import {
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
import type { Document } from "./types";
// Only FILE and NOTE document types can be edited
const EDITABLE_DOCUMENT_TYPES = ["FILE", "NOTE"] as const;
export function RowActions({
document,
deleteDocument,
@ -41,6 +44,10 @@ export function RowActions({
const [isDeleting, setIsDeleting] = useState(false);
const router = useRouter();
const isEditable = EDITABLE_DOCUMENT_TYPES.includes(
document.document_type as (typeof EDITABLE_DOCUMENT_TYPES)[number]
);
const handleDelete = async () => {
setIsDeleting(true);
try {
@ -65,28 +72,30 @@ export function RowActions({
<div className="flex items-center justify-end gap-1">
{/* Desktop Actions */}
<div className="hidden md:flex items-center gap-1">
<Tooltip>
<TooltipTrigger asChild>
<motion.div
whileHover={{ scale: 1.1 }}
whileTap={{ scale: 0.95 }}
transition={{ type: "spring", stiffness: 400, damping: 17 }}
>
<Button
variant="ghost"
size="icon"
className="h-8 w-8 text-muted-foreground hover:text-foreground hover:bg-muted/80"
onClick={handleEdit}
{isEditable && (
<Tooltip>
<TooltipTrigger asChild>
<motion.div
whileHover={{ scale: 1.1 }}
whileTap={{ scale: 0.95 }}
transition={{ type: "spring", stiffness: 400, damping: 17 }}
>
<Pencil className="h-4 w-4" />
<span className="sr-only">Edit Document</span>
</Button>
</motion.div>
</TooltipTrigger>
<TooltipContent side="top">
<p>Edit Document</p>
</TooltipContent>
</Tooltip>
<Button
variant="ghost"
size="icon"
className="h-8 w-8 text-muted-foreground hover:text-foreground hover:bg-muted/80"
onClick={handleEdit}
>
<Pencil className="h-4 w-4" />
<span className="sr-only">Edit Document</span>
</Button>
</motion.div>
</TooltipTrigger>
<TooltipContent side="top">
<p>Edit Document</p>
</TooltipContent>
</Tooltip>
)}
<Tooltip>
<TooltipTrigger asChild>
@ -146,10 +155,12 @@ export function RowActions({
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align="end" className="w-40">
<DropdownMenuItem onClick={handleEdit}>
<Pencil className="mr-2 h-4 w-4" />
<span>Edit</span>
</DropdownMenuItem>
{isEditable && (
<DropdownMenuItem onClick={handleEdit}>
<Pencil className="mr-2 h-4 w-4" />
<span>Edit</span>
</DropdownMenuItem>
)}
<DropdownMenuItem onClick={() => setIsMetadataOpen(true)}>
<FileText className="mr-2 h-4 w-4" />
<span>Metadata</span>

View file

@ -20,7 +20,7 @@ import {
} from "@/atoms/chat/mentioned-documents.atom";
import {
clearPlanOwnerRegistry,
extractWriteTodosFromContent,
// extractWriteTodosFromContent,
hydratePlanStateAtom,
} from "@/atoms/chat/plan-state.atom";
import { Thread } from "@/components/assistant-ui/thread";
@ -30,7 +30,7 @@ import { DisplayImageToolUI } from "@/components/tool-ui/display-image";
import { GeneratePodcastToolUI } from "@/components/tool-ui/generate-podcast";
import { LinkPreviewToolUI } from "@/components/tool-ui/link-preview";
import { ScrapeWebpageToolUI } from "@/components/tool-ui/scrape-webpage";
import { WriteTodosToolUI } from "@/components/tool-ui/write-todos";
// import { WriteTodosToolUI } from "@/components/tool-ui/write-todos";
import { getBearerToken } from "@/lib/auth-utils";
import { createAttachmentAdapter, extractAttachmentContent } from "@/lib/chat/attachment-adapter";
import {
@ -199,7 +199,7 @@ const TOOLS_WITH_UI = new Set([
"link_preview",
"display_image",
"scrape_webpage",
"write_todos",
// "write_todos", // Disabled for now
]);
/**
@ -291,10 +291,11 @@ export default function NewChatPage() {
restoredThinkingSteps.set(`msg-${msg.id}`, steps);
}
// Hydrate write_todos plan state from persisted tool calls
const writeTodosCalls = extractWriteTodosFromContent(msg.content);
for (const todoData of writeTodosCalls) {
hydratePlanState(todoData);
}
// Disabled for now
// const writeTodosCalls = extractWriteTodosFromContent(msg.content);
// for (const todoData of writeTodosCalls) {
// hydratePlanState(todoData);
// }
}
if (msg.role === "user") {
const docs = extractMentionedDocuments(msg.content);
@ -911,7 +912,7 @@ export default function NewChatPage() {
<LinkPreviewToolUI />
<DisplayImageToolUI />
<ScrapeWebpageToolUI />
<WriteTodosToolUI />
{/* <WriteTodosToolUI /> Disabled for now */}
<div className="flex flex-col h-[calc(100vh-64px)] overflow-hidden">
<Thread
messageThinkingSteps={messageThinkingSteps}

View file

@ -2,7 +2,6 @@
import { Loader2 } from "lucide-react";
import { useEffect, useState } from "react";
import { AnnouncementBanner } from "@/components/announcement-banner";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import { getBearerToken, redirectToLogin } from "@/lib/auth-utils";
@ -43,7 +42,6 @@ export default function DashboardLayout({ children }: DashboardLayoutProps) {
return (
<div className="h-full flex flex-col ">
<AnnouncementBanner />
<div className="flex-1 min-h-0">{children}</div>
</div>
);

View file

@ -11,7 +11,17 @@ export default async function Page(props: { params: Promise<{ slug?: string[] }>
const MDX = page.data.body;
return (
<DocsPage toc={page.data.toc} full={page.data.full}>
<DocsPage
toc={page.data.toc}
full={page.data.full}
tableOfContent={{
style: "clerk",
single: false,
}}
tableOfContentPopover={{
style: "clerk",
}}
>
<DocsTitle>{page.data.title}</DocsTitle>
<DocsDescription>{page.data.description}</DocsDescription>
<DocsBody>

View file

@ -1,5 +0,0 @@
import { atomWithStorage } from "jotai/utils";
// Atom to track whether the announcement banner has been dismissed
// Persists to localStorage automatically
export const announcementDismissedAtom = atomWithStorage("surfsense_announcement_dismissed", false);

View file

@ -1,47 +0,0 @@
"use client";
import { useAtom } from "jotai";
import { ExternalLink, Info, X } from "lucide-react";
import { announcementDismissedAtom } from "@/atoms/announcement.atom";
import { Button } from "@/components/ui/button";
export function AnnouncementBanner() {
const [isDismissed, setIsDismissed] = useAtom(announcementDismissedAtom);
const handleDismiss = () => {
setIsDismissed(true);
};
if (isDismissed) return null;
return (
<div className="relative h-[3rem] flex items-center justify-center border bg-gradient-to-r from-blue-600 to-blue-500 dark:from-blue-700 dark:to-blue-600 border-b border-blue-700 dark:border-blue-800">
<div className="container mx-auto px-4">
<div className="flex items-center justify-center gap-3 py-2.5">
<Info className="h-4 w-4 text-blue-50 flex-shrink-0" />
<p className="text-sm text-blue-50 text-center font-medium">
SurfSense is a work in progress.{" "}
<a
href="https://github.com/MODSetter/SurfSense/issues"
target="_blank"
rel="noopener noreferrer"
className="inline-flex items-center gap-1 underline decoration-blue-200 underline-offset-2 hover:decoration-white transition-colors"
>
Report issues on GitHub
<ExternalLink className="h-3 w-3" />
</a>
</p>
<Button
variant="ghost"
size="sm"
className="h-7 w-7 p-0 shrink-0 text-blue-100 hover:text-white hover:bg-blue-700/50 dark:hover:bg-blue-800/50 absolute right-4"
onClick={handleDismiss}
>
<X className="h-3.5 w-3.5" />
<span className="sr-only">Dismiss</span>
</Button>
</div>
</div>
</div>
);
}

View file

@ -19,9 +19,11 @@ import { ConnectorDialogHeader } from "./connector-popup/components/connector-di
import { ConnectorConnectView } from "./connector-popup/connector-configs/views/connector-connect-view";
import { ConnectorEditView } from "./connector-popup/connector-configs/views/connector-edit-view";
import { IndexingConfigurationView } from "./connector-popup/connector-configs/views/indexing-configuration-view";
import { OAUTH_CONNECTORS } from "./connector-popup/constants/connector-constants";
import { useConnectorDialog } from "./connector-popup/hooks/use-connector-dialog";
import { ActiveConnectorsTab } from "./connector-popup/tabs/active-connectors-tab";
import { AllConnectorsTab } from "./connector-popup/tabs/all-connectors-tab";
import { ConnectorAccountsListView } from "./connector-popup/views/connector-accounts-list-view";
import { YouTubeCrawlerView } from "./connector-popup/views/youtube-crawler-view";
export const ConnectorIndicator: FC = () => {
@ -60,6 +62,7 @@ export const ConnectorIndicator: FC = () => {
periodicEnabled,
frequencyMinutes,
allConnectors,
viewingAccountsType,
setSearchQuery,
setStartDate,
setEndDate,
@ -81,6 +84,8 @@ export const ConnectorIndicator: FC = () => {
handleBackFromEdit,
handleBackFromConnect,
handleBackFromYouTube,
handleViewAccountsList,
handleBackFromAccountsList,
handleQuickIndexConnector,
connectorConfig,
setConnectorConfig,
@ -162,6 +167,7 @@ export const ConnectorIndicator: FC = () => {
return (
<Dialog open={isOpen} onOpenChange={handleOpenChange}>
<TooltipIconButton
data-joyride="connector-icon"
tooltip={hasConnectors ? `Manage ${activeConnectorsCount} connectors` : "Connect your data"}
side="bottom"
className={cn(
@ -189,10 +195,29 @@ export const ConnectorIndicator: FC = () => {
)}
</TooltipIconButton>
<DialogContent className="max-w-3xl w-[95vw] sm:w-full h-[90vh] sm:h-[85vh] flex flex-col p-0 gap-0 overflow-hidden border border-border bg-muted text-foreground [&>button]:right-6 sm:[&>button]:right-12 [&>button]:top-8 sm:[&>button]:top-10 [&>button]:opacity-80 hover:[&>button]:opacity-100 [&>button_svg]:size-5">
<DialogContent className="max-w-3xl w-[95vw] sm:w-full h-[75vh] sm:h-[85vh] flex flex-col p-0 gap-0 overflow-hidden border border-border bg-muted text-foreground [&>button]:right-4 sm:[&>button]:right-12 [&>button]:top-6 sm:[&>button]:top-10 [&>button]:opacity-80 hover:[&>button]:opacity-100 [&>button_svg]:size-5">
{/* YouTube Crawler View - shown when adding YouTube videos */}
{isYouTubeView && searchSpaceId ? (
<YouTubeCrawlerView searchSpaceId={searchSpaceId} onBack={handleBackFromYouTube} />
) : viewingAccountsType ? (
<ConnectorAccountsListView
connectorType={viewingAccountsType.connectorType}
connectorTitle={viewingAccountsType.connectorTitle}
connectors={(allConnectors || []) as SearchSourceConnector[]}
indexingConnectorIds={indexingConnectorIds}
logsSummary={logsSummary}
onBack={handleBackFromAccountsList}
onManage={handleStartEdit}
onAddAccount={() => {
const oauthConnector = OAUTH_CONNECTORS.find(
(c) => c.connectorType === viewingAccountsType.connectorType
);
if (oauthConnector) {
handleConnectOAuth(oauthConnector);
}
}}
isConnecting={connectingId !== null}
/>
) : connectingConnectorType ? (
<ConnectorConnectView
connectorType={connectingConnectorType}
@ -272,7 +297,7 @@ export const ConnectorIndicator: FC = () => {
{/* Content */}
<div className="flex-1 min-h-0 relative overflow-hidden">
<div className="h-full overflow-y-auto" onScroll={handleScroll}>
<div className="px-6 sm:px-12 py-6 sm:py-8 pb-16 sm:pb-16">
<div className="px-4 sm:px-12 py-4 sm:py-8 pb-12 sm:pb-16">
<TabsContent value="all" className="m-0">
<AllConnectorsTab
searchQuery={searchQuery}
@ -288,6 +313,7 @@ export const ConnectorIndicator: FC = () => {
onCreateWebcrawler={handleCreateWebcrawler}
onCreateYouTubeCrawler={handleCreateYouTubeCrawler}
onManage={handleStartEdit}
onViewAccountsList={handleViewAccountsList}
/>
</TabsContent>
@ -302,6 +328,7 @@ export const ConnectorIndicator: FC = () => {
searchSpaceId={searchSpaceId}
onTabChange={handleTabChange}
onManage={handleStartEdit}
onViewAccountsList={handleViewAccountsList}
/>
</div>
</div>

View file

@ -1,7 +1,7 @@
"use client";
import { IconBrandYoutube } from "@tabler/icons-react";
import { format } from "date-fns";
import { differenceInDays, differenceInMinutes, format, isToday, isYesterday } from "date-fns";
import { FileText, Loader2 } from "lucide-react";
import type { FC } from "react";
import { Button } from "@/components/ui/button";
@ -17,6 +17,7 @@ interface ConnectorCardProps {
isConnected?: boolean;
isConnecting?: boolean;
documentCount?: number;
accountCount?: number;
lastIndexedAt?: string | null;
isIndexing?: boolean;
activeTask?: LogActiveTask;
@ -49,6 +50,45 @@ function formatDocumentCount(count: number | undefined): string {
return `${m.replace(/\.0$/, "")}M docs`;
}
/**
* Format last indexed date with contextual messages
* Examples: "Just now", "10 minutes ago", "Today at 2:30 PM", "Yesterday at 3:45 PM", "3 days ago", "Jan 15, 2026"
*/
function formatLastIndexedDate(dateString: string): string {
const date = new Date(dateString);
const now = new Date();
const minutesAgo = differenceInMinutes(now, date);
const daysAgo = differenceInDays(now, date);
// Just now (within last minute)
if (minutesAgo < 1) {
return "Just now";
}
// X minutes ago (less than 1 hour)
if (minutesAgo < 60) {
return `${minutesAgo} ${minutesAgo === 1 ? "minute" : "minutes"} ago`;
}
// Today at [time]
if (isToday(date)) {
return `Today at ${format(date, "h:mm a")}`;
}
// Yesterday at [time]
if (isYesterday(date)) {
return `Yesterday at ${format(date, "h:mm a")}`;
}
// X days ago (less than 7 days)
if (daysAgo < 7) {
return `${daysAgo} ${daysAgo === 1 ? "day" : "days"} ago`;
}
// Full date for older entries
return format(date, "MMM d, yyyy");
}
export const ConnectorCard: FC<ConnectorCardProps> = ({
id,
title,
@ -57,6 +97,7 @@ export const ConnectorCard: FC<ConnectorCardProps> = ({
isConnected = false,
isConnecting = false,
documentCount,
accountCount,
lastIndexedAt,
isIndexing = false,
activeTask,
@ -86,13 +127,13 @@ export const ConnectorCard: FC<ConnectorCardProps> = ({
// Show last indexed date for connected connectors
if (lastIndexedAt) {
return (
<span className="whitespace-nowrap">
Last indexed: {format(new Date(lastIndexedAt), "MMM d, yyyy")}
<span className="whitespace-nowrap text-[10px]">
Last indexed: {formatLastIndexedDate(lastIndexedAt)}
</span>
);
}
// Fallback for connected but never indexed
return <span className="whitespace-nowrap">Never indexed</span>;
return <span className="whitespace-nowrap text-[10px]">Never indexed</span>;
}
return description;
@ -100,7 +141,7 @@ export const ConnectorCard: FC<ConnectorCardProps> = ({
return (
<div className="group relative flex items-center gap-4 p-4 rounded-xl text-left transition-all duration-200 w-full border border-border bg-slate-400/5 dark:bg-white/5 hover:bg-slate-400/10 dark:hover:bg-white/10">
<div className="flex h-12 w-12 items-center justify-center rounded-lg transition-colors flex-shrink-0 bg-slate-400/5 dark:bg-white/5 border border-slate-400/5 dark:border-white/5">
<div className="flex h-12 w-12 items-center justify-center rounded-lg transition-colors shrink-0 bg-slate-400/5 dark:bg-white/5 border border-slate-400/5 dark:border-white/5">
{connectorType ? (
getConnectorIcon(connectorType, "size-6")
) : id === "youtube-crawler" ? (
@ -111,12 +152,20 @@ export const ConnectorCard: FC<ConnectorCardProps> = ({
</div>
<div className="flex-1 min-w-0">
<div className="flex items-center gap-2">
<span className="text-[14px] font-semibold leading-tight">{title}</span>
<span className="text-[14px] font-semibold leading-tight truncate">{title}</span>
</div>
<div className="text-[11px] text-muted-foreground mt-1">{getStatusContent()}</div>
<div className="text-[10px] text-muted-foreground mt-1">{getStatusContent()}</div>
{isConnected && documentCount !== undefined && (
<p className="text-[11px] text-muted-foreground mt-0.5">
{formatDocumentCount(documentCount)}
<p className="text-[10px] text-muted-foreground mt-0.5 flex items-center gap-1.5">
<span>{formatDocumentCount(documentCount)}</span>
{accountCount !== undefined && accountCount > 0 && (
<>
<span className="text-muted-foreground/50"></span>
<span>
{accountCount} {accountCount === 1 ? "Account" : "Accounts"}
</span>
</>
)}
</p>
)}
</div>
@ -124,18 +173,16 @@ export const ConnectorCard: FC<ConnectorCardProps> = ({
size="sm"
variant={isConnected ? "secondary" : "default"}
className={cn(
"h-8 text-[11px] px-3 rounded-lg flex-shrink-0 font-medium",
"h-8 text-[11px] px-3 rounded-lg shrink-0 font-medium",
isConnected &&
"bg-white text-slate-700 hover:bg-slate-50 border-0 shadow-xs dark:bg-secondary dark:text-secondary-foreground dark:hover:bg-secondary/80",
!isConnected && "shadow-xs"
)}
onClick={isConnected ? onManage : onConnect}
disabled={isConnecting || isIndexing}
disabled={isConnecting}
>
{isConnecting ? (
<Loader2 className="size-3 animate-spin" />
) : isIndexing ? (
"Syncing..."
) : isConnected ? (
"Manage"
) : id === "youtube-crawler" ? (

View file

@ -24,20 +24,20 @@ export const ConnectorDialogHeader: FC<ConnectorDialogHeaderProps> = ({
return (
<div
className={cn(
"flex-shrink-0 px-6 sm:px-12 pt-8 sm:pt-10 transition-shadow duration-200 relative z-10",
"flex-shrink-0 px-4 sm:px-12 pt-5 sm:pt-10 transition-shadow duration-200 relative z-10",
isScrolled && "shadow-xl bg-muted/50 backdrop-blur-md"
)}
>
<DialogHeader>
<DialogTitle className="text-2xl sm:text-3xl font-semibold tracking-tight">
<DialogTitle className="text-xl sm:text-3xl font-semibold tracking-tight">
Connectors
</DialogTitle>
<DialogDescription className="text-sm sm:text-base text-muted-foreground/80 mt-1 sm:mt-1.5">
<DialogDescription className="text-xs sm:text-base text-muted-foreground/80 mt-1 sm:mt-1.5">
Search across all your apps and data in one place.
</DialogDescription>
</DialogHeader>
<div className="flex flex-col-reverse sm:flex-row sm:items-end justify-between gap-6 sm:gap-8 mt-6 sm:mt-8 border-b border-border/80 dark:border-white/5">
<div className="flex flex-col-reverse sm:flex-row sm:items-end justify-between gap-4 sm:gap-8 mt-4 sm:mt-8 border-b border-border/80 dark:border-white/5">
<TabsList className="bg-transparent p-0 gap-4 sm:gap-8 h-auto w-full sm:w-auto justify-center sm:justify-start">
<TabsTrigger
value="all"
@ -63,7 +63,7 @@ export const ConnectorDialogHeader: FC<ConnectorDialogHeaderProps> = ({
<div className="w-full sm:w-72 sm:pb-1">
<div className="relative">
<Search className="absolute left-3 top-1/2 -translate-y-1/2 size-4 text-muted-foreground/60" />
<Search className="absolute left-3 top-1/2 -translate-y-1/2 size-4 text-gray-500 dark:text-gray-500" />
<input
type="text"
placeholder="Search"
@ -78,7 +78,7 @@ export const ConnectorDialogHeader: FC<ConnectorDialogHeaderProps> = ({
<button
type="button"
onClick={() => onSearchChange("")}
className="absolute right-3 top-1/2 -translate-y-1/2 size-4 text-gray-500 dark:text-gray-400 hover:text-gray-700 dark:hover:text-gray-300 transition-colors"
className="absolute right-3 top-1/2 -translate-y-1/2 size-4 text-gray-500 dark:text-gray-500 hover:text-gray-700 dark:hover:text-gray-300 transition-colors"
aria-label="Clear search"
>
<X className="size-4" />

View file

@ -50,6 +50,9 @@ export const PeriodicSyncConfig: FC<PeriodicSyncConfigProps> = ({
<SelectValue placeholder="Select frequency" />
</SelectTrigger>
<SelectContent className="z-[100]">
<SelectItem value="5" className="text-xs sm:text-sm">
Every 5 minutes
</SelectItem>
<SelectItem value="15" className="text-xs sm:text-sm">
Every 15 minutes
</SelectItem>

View file

@ -256,6 +256,9 @@ export const BookStackConnectForm: FC<ConnectFormProps> = ({ onSubmit, isSubmitt
<SelectValue placeholder="Select frequency" />
</SelectTrigger>
<SelectContent className="z-[100]">
<SelectItem value="5" className="text-xs sm:text-sm">
Every 5 minutes
</SelectItem>
<SelectItem value="15" className="text-xs sm:text-sm">
Every 15 minutes
</SelectItem>

View file

@ -1,7 +1,7 @@
"use client";
import { zodResolver } from "@hookform/resolvers/zod";
import { Info, Webhook } from "lucide-react";
import { Webhook } from "lucide-react";
import type { FC } from "react";
import { useRef } from "react";
import { useForm } from "react-hook-form";

View file

@ -1,382 +0,0 @@
"use client";
import { zodResolver } from "@hookform/resolvers/zod";
import { Info } from "lucide-react";
import type { FC } from "react";
import { useRef, useState } from "react";
import { useForm } from "react-hook-form";
import * as z from "zod";
import {
Accordion,
AccordionContent,
AccordionItem,
AccordionTrigger,
} from "@/components/ui/accordion";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import {
Form,
FormControl,
FormDescription,
FormField,
FormItem,
FormLabel,
FormMessage,
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { Switch } from "@/components/ui/switch";
import { EnumConnectorName } from "@/contracts/enums/connector";
import { DateRangeSelector } from "../../components/date-range-selector";
import { getConnectorBenefits } from "../connector-benefits";
import type { ConnectFormProps } from "../index";
const clickupConnectorFormSchema = z.object({
name: z.string().min(3, {
message: "Connector name must be at least 3 characters.",
}),
api_token: z.string().min(10, {
message: "ClickUp API Token is required and must be valid.",
}),
});
type ClickUpConnectorFormValues = z.infer<typeof clickupConnectorFormSchema>;
export const ClickUpConnectForm: FC<ConnectFormProps> = ({ onSubmit, isSubmitting }) => {
const isSubmittingRef = useRef(false);
const [startDate, setStartDate] = useState<Date | undefined>(undefined);
const [endDate, setEndDate] = useState<Date | undefined>(undefined);
const [periodicEnabled, setPeriodicEnabled] = useState(false);
const [frequencyMinutes, setFrequencyMinutes] = useState("1440");
const form = useForm<ClickUpConnectorFormValues>({
resolver: zodResolver(clickupConnectorFormSchema),
defaultValues: {
name: "ClickUp Connector",
api_token: "",
},
});
const handleSubmit = async (values: ClickUpConnectorFormValues) => {
// Prevent multiple submissions
if (isSubmittingRef.current || isSubmitting) {
return;
}
isSubmittingRef.current = true;
try {
await onSubmit({
name: values.name,
connector_type: EnumConnectorName.CLICKUP_CONNECTOR,
config: {
CLICKUP_API_TOKEN: values.api_token,
},
is_indexable: true,
last_indexed_at: null,
periodic_indexing_enabled: periodicEnabled,
indexing_frequency_minutes: periodicEnabled ? parseInt(frequencyMinutes, 10) : null,
next_scheduled_at: null,
startDate,
endDate,
periodicEnabled,
frequencyMinutes,
});
} finally {
isSubmittingRef.current = false;
}
};
return (
<div className="space-y-6 pb-6">
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 p-2 sm:p-3 flex items-center [&>svg]:relative [&>svg]:left-0 [&>svg]:top-0 [&>svg+div]:translate-y-0">
<Info className="h-3 w-3 sm:h-4 sm:w-4 shrink-0 ml-1" />
<div className="-ml-1">
<AlertTitle className="text-xs sm:text-sm">API Token Required</AlertTitle>
<AlertDescription className="text-[10px] sm:text-xs !pl-0">
You'll need a ClickUp API Token to use this connector. You can create one from{" "}
<a
href="https://app.clickup.com/settings/apps"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
ClickUp Settings
</a>
</AlertDescription>
</div>
</Alert>
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<Form {...form}>
<form
id="clickup-connect-form"
onSubmit={form.handleSubmit(handleSubmit)}
className="space-y-4 sm:space-y-6"
>
<FormField
control={form.control}
name="name"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Connector Name</FormLabel>
<FormControl>
<Input
placeholder="My ClickUp Connector"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
A friendly name to identify this connector.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="api_token"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">ClickUp API Token</FormLabel>
<FormControl>
<Input
type="password"
placeholder="pk_..."
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
Your ClickUp API Token will be encrypted and stored securely.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
{/* Indexing Configuration */}
<div className="space-y-4 pt-4 border-t border-slate-400/20">
<h3 className="text-sm sm:text-base font-medium">Indexing Configuration</h3>
{/* Date Range Selector */}
<DateRangeSelector
startDate={startDate}
endDate={endDate}
onStartDateChange={setStartDate}
onEndDateChange={setEndDate}
/>
{/* Periodic Sync Config */}
<div className="rounded-xl bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6">
<div className="flex items-center justify-between">
<div className="space-y-1">
<h3 className="font-medium text-sm sm:text-base">Enable Periodic Sync</h3>
<p className="text-xs sm:text-sm text-muted-foreground">
Automatically re-index at regular intervals
</p>
</div>
<Switch
checked={periodicEnabled}
onCheckedChange={setPeriodicEnabled}
disabled={isSubmitting}
/>
</div>
{periodicEnabled && (
<div className="mt-4 pt-4 border-t border-slate-400/20 space-y-3">
<div className="space-y-2">
<Label htmlFor="frequency" className="text-xs sm:text-sm">
Sync Frequency
</Label>
<Select
value={frequencyMinutes}
onValueChange={setFrequencyMinutes}
disabled={isSubmitting}
>
<SelectTrigger
id="frequency"
className="w-full bg-slate-400/5 dark:bg-slate-400/5 border-slate-400/20 text-xs sm:text-sm"
>
<SelectValue placeholder="Select frequency" />
</SelectTrigger>
<SelectContent className="z-[100]">
<SelectItem value="15" className="text-xs sm:text-sm">
Every 15 minutes
</SelectItem>
<SelectItem value="60" className="text-xs sm:text-sm">
Every hour
</SelectItem>
<SelectItem value="360" className="text-xs sm:text-sm">
Every 6 hours
</SelectItem>
<SelectItem value="720" className="text-xs sm:text-sm">
Every 12 hours
</SelectItem>
<SelectItem value="1440" className="text-xs sm:text-sm">
Daily
</SelectItem>
<SelectItem value="10080" className="text-xs sm:text-sm">
Weekly
</SelectItem>
</SelectContent>
</Select>
</div>
</div>
)}
</div>
</div>
</form>
</Form>
</div>
{/* What you get section */}
{getConnectorBenefits(EnumConnectorName.CLICKUP_CONNECTOR) && (
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 px-3 sm:px-6 py-4 space-y-2">
<h4 className="text-xs sm:text-sm font-medium">What you get with ClickUp integration:</h4>
<ul className="list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
{getConnectorBenefits(EnumConnectorName.CLICKUP_CONNECTOR)?.map((benefit) => (
<li key={benefit}>{benefit}</li>
))}
</ul>
</div>
)}
{/* Documentation Section */}
<Accordion
type="single"
collapsible
className="w-full border border-border rounded-xl bg-slate-400/5 dark:bg-white/5"
>
<AccordionItem value="documentation" className="border-0">
<AccordionTrigger className="text-sm sm:text-base font-medium px-3 sm:px-6 no-underline hover:no-underline">
Documentation
</AccordionTrigger>
<AccordionContent className="px-3 sm:px-6 pb-3 sm:pb-6 space-y-6">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">How it works</h3>
<p className="text-[10px] sm:text-xs text-muted-foreground">
The ClickUp connector uses the ClickUp API to fetch all tasks and projects that your
API token has access to within your workspace.
</p>
<ul className="mt-2 list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
<li>
For follow up indexing runs, the connector retrieves tasks that have been updated
since the last indexing attempt.
</li>
<li>
Indexing is configured to run periodically, so updates should appear in your
search results within minutes.
</li>
</ul>
</div>
<div className="space-y-4">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">Authorization</h3>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 mb-4">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">API Token Required</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
You need a ClickUp personal API token to use this connector. The token will be
used to read your ClickUp data.
</AlertDescription>
</Alert>
<div className="space-y-4 sm:space-y-6">
<div>
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
Step 1: Get Your API Token
</h4>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground">
<li>Log in to your ClickUp account</li>
<li>Click your avatar in the upper-right corner and select "Settings"</li>
<li>In the sidebar, click "Apps"</li>
<li>
Under "API Token", click <strong>Generate</strong> or{" "}
<strong>Regenerate</strong>
</li>
<li>Copy the generated token (it typically starts with "pk_")</li>
<li>
Paste it in the form above. You can also visit{" "}
<a
href="https://app.clickup.com/settings/apps"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
ClickUp API Settings
</a>{" "}
directly.
</li>
</ol>
</div>
<div>
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
Step 2: Grant necessary access
</h4>
<p className="text-[10px] sm:text-xs text-muted-foreground mb-3">
The API Token will have access to all tasks and projects that your user
account can see. Make sure your account has appropriate permissions for the
workspaces you want to index.
</p>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">Data Privacy</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
Only tasks, comments, and basic metadata will be indexed. ClickUp
attachments and linked files are not indexed by this connector.
</AlertDescription>
</Alert>
</div>
</div>
</div>
</div>
<div className="space-y-4">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">Indexing</h3>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground mb-4">
<li>
Navigate to the Connector Dashboard and select the <strong>ClickUp</strong>{" "}
Connector.
</li>
<li>
Place your <strong>API Token</strong> in the form field.
</li>
<li>
Click <strong>Connect</strong> to establish the connection.
</li>
<li>Once connected, your ClickUp tasks will be indexed automatically.</li>
</ol>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">What Gets Indexed</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
<p className="mb-2">The ClickUp connector indexes the following data:</p>
<ul className="list-disc pl-5 space-y-1">
<li>Task names and descriptions</li>
<li>Task comments and discussion threads</li>
<li>Task status, priority, and assignee information</li>
<li>Project and workspace information</li>
</ul>
</AlertDescription>
</Alert>
</div>
</div>
</AccordionContent>
</AccordionItem>
</Accordion>
</div>
);
};

View file

@ -1,448 +0,0 @@
"use client";
import { zodResolver } from "@hookform/resolvers/zod";
import { Info } from "lucide-react";
import type { FC } from "react";
import { useRef, useState } from "react";
import { useForm } from "react-hook-form";
import * as z from "zod";
import {
Accordion,
AccordionContent,
AccordionItem,
AccordionTrigger,
} from "@/components/ui/accordion";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import {
Form,
FormControl,
FormDescription,
FormField,
FormItem,
FormLabel,
FormMessage,
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { Switch } from "@/components/ui/switch";
import { EnumConnectorName } from "@/contracts/enums/connector";
import { DateRangeSelector } from "../../components/date-range-selector";
import { getConnectorBenefits } from "../connector-benefits";
import type { ConnectFormProps } from "../index";
const confluenceConnectorFormSchema = z.object({
name: z.string().min(3, {
message: "Connector name must be at least 3 characters.",
}),
base_url: z.string().url({ message: "Please enter a valid Confluence base URL." }),
email: z.string().email({ message: "Please enter a valid email address." }),
api_token: z.string().min(10, {
message: "Confluence API Token is required and must be valid.",
}),
});
type ConfluenceConnectorFormValues = z.infer<typeof confluenceConnectorFormSchema>;
export const ConfluenceConnectForm: FC<ConnectFormProps> = ({ onSubmit, isSubmitting }) => {
const isSubmittingRef = useRef(false);
const [startDate, setStartDate] = useState<Date | undefined>(undefined);
const [endDate, setEndDate] = useState<Date | undefined>(undefined);
const [periodicEnabled, setPeriodicEnabled] = useState(false);
const [frequencyMinutes, setFrequencyMinutes] = useState("1440");
const form = useForm<ConfluenceConnectorFormValues>({
resolver: zodResolver(confluenceConnectorFormSchema),
defaultValues: {
name: "Confluence Connector",
base_url: "",
email: "",
api_token: "",
},
});
const handleSubmit = async (values: ConfluenceConnectorFormValues) => {
// Prevent multiple submissions
if (isSubmittingRef.current || isSubmitting) {
return;
}
isSubmittingRef.current = true;
try {
await onSubmit({
name: values.name,
connector_type: EnumConnectorName.CONFLUENCE_CONNECTOR,
config: {
CONFLUENCE_BASE_URL: values.base_url,
CONFLUENCE_EMAIL: values.email,
CONFLUENCE_API_TOKEN: values.api_token,
},
is_indexable: true,
last_indexed_at: null,
periodic_indexing_enabled: periodicEnabled,
indexing_frequency_minutes: periodicEnabled ? parseInt(frequencyMinutes, 10) : null,
next_scheduled_at: null,
startDate,
endDate,
periodicEnabled,
frequencyMinutes,
});
} finally {
isSubmittingRef.current = false;
}
};
return (
<div className="space-y-6 pb-6">
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 p-2 sm:p-3 flex items-center [&>svg]:relative [&>svg]:left-0 [&>svg]:top-0 [&>svg+div]:translate-y-0">
<Info className="h-3 w-3 sm:h-4 sm:w-4 shrink-0 ml-1" />
<div className="-ml-1">
<AlertTitle className="text-xs sm:text-sm">API Token Required</AlertTitle>
<AlertDescription className="text-[10px] sm:text-xs !pl-0">
You'll need a Confluence API Token to use this connector. You can create one from{" "}
<a
href="https://id.atlassian.com/manage-profile/security/api-tokens"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
Atlassian Account Settings
</a>
</AlertDescription>
</div>
</Alert>
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<Form {...form}>
<form
id="confluence-connect-form"
onSubmit={form.handleSubmit(handleSubmit)}
className="space-y-4 sm:space-y-6"
>
<FormField
control={form.control}
name="name"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Connector Name</FormLabel>
<FormControl>
<Input
placeholder="My Confluence Connector"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
A friendly name to identify this connector.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="base_url"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Confluence Base URL</FormLabel>
<FormControl>
<Input
type="url"
placeholder="https://your-domain.atlassian.net"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
The base URL of your Confluence instance (e.g.,
https://your-domain.atlassian.net).
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="email"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Email Address</FormLabel>
<FormControl>
<Input
type="email"
placeholder="your-email@example.com"
autoComplete="email"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
The email address associated with your Atlassian account.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="api_token"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">API Token</FormLabel>
<FormControl>
<Input
type="password"
placeholder="Your API Token"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
Your Confluence API Token will be encrypted and stored securely.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
{/* Indexing Configuration */}
<div className="space-y-4 pt-4 border-t border-slate-400/20">
<h3 className="text-sm sm:text-base font-medium">Indexing Configuration</h3>
{/* Date Range Selector */}
<DateRangeSelector
startDate={startDate}
endDate={endDate}
onStartDateChange={setStartDate}
onEndDateChange={setEndDate}
/>
{/* Periodic Sync Config */}
<div className="rounded-xl bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6">
<div className="flex items-center justify-between">
<div className="space-y-1">
<h3 className="font-medium text-sm sm:text-base">Enable Periodic Sync</h3>
<p className="text-xs sm:text-sm text-muted-foreground">
Automatically re-index at regular intervals
</p>
</div>
<Switch
checked={periodicEnabled}
onCheckedChange={setPeriodicEnabled}
disabled={isSubmitting}
/>
</div>
{periodicEnabled && (
<div className="mt-4 pt-4 border-t border-slate-400/20 space-y-3">
<div className="space-y-2">
<Label htmlFor="frequency" className="text-xs sm:text-sm">
Sync Frequency
</Label>
<Select
value={frequencyMinutes}
onValueChange={setFrequencyMinutes}
disabled={isSubmitting}
>
<SelectTrigger
id="frequency"
className="w-full bg-slate-400/5 dark:bg-slate-400/5 border-slate-400/20 text-xs sm:text-sm"
>
<SelectValue placeholder="Select frequency" />
</SelectTrigger>
<SelectContent className="z-[100]">
<SelectItem value="15" className="text-xs sm:text-sm">
Every 15 minutes
</SelectItem>
<SelectItem value="60" className="text-xs sm:text-sm">
Every hour
</SelectItem>
<SelectItem value="360" className="text-xs sm:text-sm">
Every 6 hours
</SelectItem>
<SelectItem value="720" className="text-xs sm:text-sm">
Every 12 hours
</SelectItem>
<SelectItem value="1440" className="text-xs sm:text-sm">
Daily
</SelectItem>
<SelectItem value="10080" className="text-xs sm:text-sm">
Weekly
</SelectItem>
</SelectContent>
</Select>
</div>
</div>
)}
</div>
</div>
</form>
</Form>
</div>
{/* What you get section */}
{getConnectorBenefits(EnumConnectorName.CONFLUENCE_CONNECTOR) && (
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 px-3 sm:px-6 py-4 space-y-2">
<h4 className="text-xs sm:text-sm font-medium">
What you get with Confluence integration:
</h4>
<ul className="list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
{getConnectorBenefits(EnumConnectorName.CONFLUENCE_CONNECTOR)?.map((benefit) => (
<li key={benefit}>{benefit}</li>
))}
</ul>
</div>
)}
{/* Documentation Section */}
<Accordion
type="single"
collapsible
className="w-full border border-border rounded-xl bg-slate-400/5 dark:bg-white/5"
>
<AccordionItem value="documentation" className="border-0">
<AccordionTrigger className="text-sm sm:text-base font-medium px-3 sm:px-6 no-underline hover:no-underline">
Documentation
</AccordionTrigger>
<AccordionContent className="px-3 sm:px-6 pb-3 sm:pb-6 space-y-6">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">How it works</h3>
<p className="text-[10px] sm:text-xs text-muted-foreground">
The Confluence connector uses the Confluence REST API to fetch all pages and
comments that your account has access to within your Confluence instance.
</p>
<ul className="mt-2 list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
<li>
For follow up indexing runs, the connector retrieves pages and comments that have
been updated since the last indexing attempt.
</li>
<li>
Indexing is configured to run periodically, so updates should appear in your
search results within minutes.
</li>
</ul>
</div>
<div className="space-y-4">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">Authorization</h3>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 mb-4">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">
Read-Only Access is Sufficient
</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
You only need read access for this connector to work. The API Token will only be
used to read your Confluence data.
</AlertDescription>
</Alert>
<div className="space-y-4 sm:space-y-6">
<div>
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
Step 1: Create an API Token
</h4>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground">
<li>Log in to your Atlassian account</li>
<li>
Navigate to{" "}
<a
href="https://id.atlassian.com/manage-profile/security/api-tokens"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
https://id.atlassian.com/manage-profile/security/api-tokens
</a>{" "}
in your browser.
</li>
<li>
Click <strong>Create API token</strong>
</li>
<li>Enter a label for your token (like "SurfSense Connector")</li>
<li>
Click <strong>Create</strong>
</li>
<li>Copy the generated token as it will only be shown once</li>
</ol>
</div>
<div>
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
Step 2: Grant necessary access
</h4>
<p className="text-[10px] sm:text-xs text-muted-foreground mb-3">
The API Token will have access to all spaces and pages that your user account
can see. Make sure your account has appropriate permissions for the spaces you
want to index.
</p>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">Data Privacy</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
Only pages, comments, and basic metadata will be indexed. Confluence
attachments and linked files are not indexed by this connector.
</AlertDescription>
</Alert>
</div>
</div>
</div>
</div>
<div className="space-y-4">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">Indexing</h3>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground mb-4">
<li>
Navigate to the Connector Dashboard and select the <strong>Confluence</strong>{" "}
Connector.
</li>
<li>
Enter your <strong>Confluence Instance URL</strong> (e.g.,
https://yourcompany.atlassian.net)
</li>
<li>
Enter your <strong>Email Address</strong> associated with your Atlassian account
</li>
<li>
Place your <strong>API Token</strong> in the form field.
</li>
<li>
Click <strong>Connect</strong> to establish the connection.
</li>
<li>Once connected, your Confluence pages will be indexed automatically.</li>
</ol>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">What Gets Indexed</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
<p className="mb-2">The Confluence connector indexes the following data:</p>
<ul className="list-disc pl-5 space-y-1">
<li>All pages from accessible spaces</li>
<li>Page content and metadata</li>
<li>Comments on pages (both footer and inline comments)</li>
<li>Page titles and descriptions</li>
</ul>
</AlertDescription>
</Alert>
</div>
</div>
</AccordionContent>
</AccordionItem>
</Accordion>
</div>
);
};

View file

@ -1,406 +0,0 @@
"use client";
import { zodResolver } from "@hookform/resolvers/zod";
import { Info } from "lucide-react";
import type { FC } from "react";
import { useRef, useState } from "react";
import { useForm } from "react-hook-form";
import * as z from "zod";
import {
Accordion,
AccordionContent,
AccordionItem,
AccordionTrigger,
} from "@/components/ui/accordion";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import {
Form,
FormControl,
FormDescription,
FormField,
FormItem,
FormLabel,
FormMessage,
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { Switch } from "@/components/ui/switch";
import { EnumConnectorName } from "@/contracts/enums/connector";
import { DateRangeSelector } from "../../components/date-range-selector";
import { getConnectorBenefits } from "../connector-benefits";
import type { ConnectFormProps } from "../index";
const discordConnectorFormSchema = z.object({
name: z.string().min(3, {
message: "Connector name must be at least 3 characters.",
}),
bot_token: z.string().min(10, {
message: "Discord Bot Token is required and must be valid.",
}),
});
type DiscordConnectorFormValues = z.infer<typeof discordConnectorFormSchema>;
export const DiscordConnectForm: FC<ConnectFormProps> = ({ onSubmit, isSubmitting }) => {
const isSubmittingRef = useRef(false);
const [startDate, setStartDate] = useState<Date | undefined>(undefined);
const [endDate, setEndDate] = useState<Date | undefined>(undefined);
const [periodicEnabled, setPeriodicEnabled] = useState(false);
const [frequencyMinutes, setFrequencyMinutes] = useState("1440");
const form = useForm<DiscordConnectorFormValues>({
resolver: zodResolver(discordConnectorFormSchema),
defaultValues: {
name: "Discord Connector",
bot_token: "",
},
});
const handleSubmit = async (values: DiscordConnectorFormValues) => {
// Prevent multiple submissions
if (isSubmittingRef.current || isSubmitting) {
return;
}
isSubmittingRef.current = true;
try {
await onSubmit({
name: values.name,
connector_type: EnumConnectorName.DISCORD_CONNECTOR,
config: {
DISCORD_BOT_TOKEN: values.bot_token,
},
is_indexable: true,
last_indexed_at: null,
periodic_indexing_enabled: periodicEnabled,
indexing_frequency_minutes: periodicEnabled ? parseInt(frequencyMinutes, 10) : null,
next_scheduled_at: null,
startDate,
endDate,
periodicEnabled,
frequencyMinutes,
});
} finally {
isSubmittingRef.current = false;
}
};
return (
<div className="space-y-6 pb-6">
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 p-2 sm:p-3 flex items-center [&>svg]:relative [&>svg]:left-0 [&>svg]:top-0 [&>svg+div]:translate-y-0">
<Info className="h-3 w-3 sm:h-4 sm:w-4 shrink-0 ml-1" />
<div className="-ml-1">
<AlertTitle className="text-xs sm:text-sm">Bot Token Required</AlertTitle>
<AlertDescription className="text-[10px] sm:text-xs !pl-0">
You'll need a Discord Bot Token to use this connector. You can create one from{" "}
<a
href="https://discord.com/developers/applications"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
Discord Developer Portal
</a>
</AlertDescription>
</div>
</Alert>
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<Form {...form}>
<form
id="discord-connect-form"
onSubmit={form.handleSubmit(handleSubmit)}
className="space-y-4 sm:space-y-6"
>
<FormField
control={form.control}
name="name"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Connector Name</FormLabel>
<FormControl>
<Input
placeholder="My Discord Connector"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
A friendly name to identify this connector.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="bot_token"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Discord Bot Token</FormLabel>
<FormControl>
<Input
type="password"
placeholder="Your Bot Token"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
Your Discord Bot Token will be encrypted and stored securely.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
{/* Indexing Configuration */}
<div className="space-y-4 pt-4 border-t border-slate-400/20">
<h3 className="text-sm sm:text-base font-medium">Indexing Configuration</h3>
{/* Date Range Selector */}
<DateRangeSelector
startDate={startDate}
endDate={endDate}
onStartDateChange={setStartDate}
onEndDateChange={setEndDate}
/>
{/* Periodic Sync Config */}
<div className="rounded-xl bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6">
<div className="flex items-center justify-between">
<div className="space-y-1">
<h3 className="font-medium text-sm sm:text-base">Enable Periodic Sync</h3>
<p className="text-xs sm:text-sm text-muted-foreground">
Automatically re-index at regular intervals
</p>
</div>
<Switch
checked={periodicEnabled}
onCheckedChange={setPeriodicEnabled}
disabled={isSubmitting}
/>
</div>
{periodicEnabled && (
<div className="mt-4 pt-4 border-t border-slate-400/20 space-y-3">
<div className="space-y-2">
<Label htmlFor="frequency" className="text-xs sm:text-sm">
Sync Frequency
</Label>
<Select
value={frequencyMinutes}
onValueChange={setFrequencyMinutes}
disabled={isSubmitting}
>
<SelectTrigger
id="frequency"
className="w-full bg-slate-400/5 dark:bg-slate-400/5 border-slate-400/20 text-xs sm:text-sm"
>
<SelectValue placeholder="Select frequency" />
</SelectTrigger>
<SelectContent className="z-[100]">
<SelectItem value="15" className="text-xs sm:text-sm">
Every 15 minutes
</SelectItem>
<SelectItem value="60" className="text-xs sm:text-sm">
Every hour
</SelectItem>
<SelectItem value="360" className="text-xs sm:text-sm">
Every 6 hours
</SelectItem>
<SelectItem value="720" className="text-xs sm:text-sm">
Every 12 hours
</SelectItem>
<SelectItem value="1440" className="text-xs sm:text-sm">
Daily
</SelectItem>
<SelectItem value="10080" className="text-xs sm:text-sm">
Weekly
</SelectItem>
</SelectContent>
</Select>
</div>
</div>
)}
</div>
</div>
</form>
</Form>
</div>
{/* What you get section */}
{getConnectorBenefits(EnumConnectorName.DISCORD_CONNECTOR) && (
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 px-3 sm:px-6 py-4 space-y-2">
<h4 className="text-xs sm:text-sm font-medium">What you get with Discord integration:</h4>
<ul className="list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
{getConnectorBenefits(EnumConnectorName.DISCORD_CONNECTOR)?.map((benefit) => (
<li key={benefit}>{benefit}</li>
))}
</ul>
</div>
)}
{/* Documentation Section */}
<Accordion
type="single"
collapsible
className="w-full border border-border rounded-xl bg-slate-400/5 dark:bg-white/5"
>
<AccordionItem value="documentation" className="border-0">
<AccordionTrigger className="text-sm sm:text-base font-medium px-3 sm:px-6 no-underline hover:no-underline">
Documentation
</AccordionTrigger>
<AccordionContent className="px-3 sm:px-6 pb-3 sm:pb-6 space-y-6">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">How it works</h3>
<p className="text-[10px] sm:text-xs text-muted-foreground">
The Discord connector uses the Discord API to fetch messages from all accessible
channels that the bot token has access to within a server.
</p>
<ul className="mt-2 list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
<li>
For follow up indexing runs, the connector retrieves messages that have been
updated since the last indexing attempt.
</li>
<li>
Indexing is configured to run periodically, so updates should appear in your
search results within minutes.
</li>
</ul>
</div>
<div className="space-y-4">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">Authorization</h3>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 mb-4">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">Bot Token Required</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
You need to create a Discord application and bot to get a bot token. The bot
needs read access to channels and messages.
</AlertDescription>
</Alert>
<div className="space-y-4 sm:space-y-6">
<div>
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
Step 1: Create a Discord Application
</h4>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground">
<li>
Go to{" "}
<a
href="https://discord.com/developers/applications"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
https://discord.com/developers/applications
</a>
</li>
<li>
Click <strong>New Application</strong>
</li>
<li>
Enter an application name and click <strong>Create</strong>
</li>
</ol>
</div>
<div>
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
Step 2: Create a Bot
</h4>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground">
<li>
Navigate to <strong>Bot</strong> in the sidebar
</li>
<li>
Click <strong>Add Bot</strong> and confirm
</li>
<li>
Under <strong>Privileged Gateway Intents</strong>, enable:
<ul className="list-disc pl-5 mt-1 space-y-1">
<li>
<code className="bg-muted px-1 py-0.5 rounded">
MESSAGE CONTENT INTENT
</code>{" "}
- Required to read message content
</li>
</ul>
</li>
</ol>
</div>
<div>
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
Step 3: Get Bot Token and Invite Bot
</h4>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground">
<li>
Under <strong>Token</strong>, click <strong>Reset Token</strong> and copy
the token
</li>
<li>
Navigate to <strong>OAuth2 URL Generator</strong>
</li>
<li>
Select <strong>bot</strong> scope and <strong>Read Messages</strong>{" "}
permission
</li>
<li>Copy the generated URL and open it in your browser</li>
<li>Select your server and authorize the bot</li>
</ol>
</div>
</div>
</div>
</div>
<div className="space-y-4">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">Indexing</h3>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground mb-4">
<li>
Navigate to the Connector Dashboard and select the <strong>Discord</strong>{" "}
Connector.
</li>
<li>
Place the <strong>Bot Token</strong> in the form field.
</li>
<li>
Click <strong>Connect</strong> to establish the connection.
</li>
<li>Once connected, your Discord messages will be indexed automatically.</li>
</ol>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">What Gets Indexed</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
<p className="mb-2">The Discord connector indexes the following data:</p>
<ul className="list-disc pl-5 space-y-1">
<li>Messages from all accessible channels</li>
<li>Direct messages (if bot has access)</li>
<li>Message timestamps and metadata</li>
<li>Thread replies and conversations</li>
</ul>
</AlertDescription>
</Alert>
</div>
</div>
</AccordionContent>
</AccordionItem>
</Accordion>
</div>
);
};

View file

@ -616,6 +616,9 @@ export const ElasticsearchConnectForm: FC<ConnectFormProps> = ({ onSubmit, isSub
<SelectValue placeholder="Select frequency" />
</SelectTrigger>
<SelectContent className="z-[100]">
<SelectItem value="5" className="text-xs sm:text-sm">
Every 5 minutes
</SelectItem>
<SelectItem value="15" className="text-xs sm:text-sm">
Every 15 minutes
</SelectItem>

View file

@ -269,6 +269,9 @@ export const GithubConnectForm: FC<ConnectFormProps> = ({ onSubmit, isSubmitting
<SelectValue placeholder="Select frequency" />
</SelectTrigger>
<SelectContent className="z-[100]">
<SelectItem value="5" className="text-xs sm:text-sm">
Every 5 minutes
</SelectItem>
<SelectItem value="15" className="text-xs sm:text-sm">
Every 15 minutes
</SelectItem>

View file

@ -1,447 +0,0 @@
"use client";
import { zodResolver } from "@hookform/resolvers/zod";
import { Info } from "lucide-react";
import type { FC } from "react";
import { useRef, useState } from "react";
import { useForm } from "react-hook-form";
import * as z from "zod";
import {
Accordion,
AccordionContent,
AccordionItem,
AccordionTrigger,
} from "@/components/ui/accordion";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import {
Form,
FormControl,
FormDescription,
FormField,
FormItem,
FormLabel,
FormMessage,
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { Switch } from "@/components/ui/switch";
import { EnumConnectorName } from "@/contracts/enums/connector";
import { DateRangeSelector } from "../../components/date-range-selector";
import { getConnectorBenefits } from "../connector-benefits";
import type { ConnectFormProps } from "../index";
const jiraConnectorFormSchema = z.object({
name: z.string().min(3, {
message: "Connector name must be at least 3 characters.",
}),
base_url: z.string().url({ message: "Please enter a valid Jira base URL." }),
email: z.string().email({ message: "Please enter a valid email address." }),
api_token: z.string().min(10, {
message: "Jira API Token is required and must be valid.",
}),
});
type JiraConnectorFormValues = z.infer<typeof jiraConnectorFormSchema>;
export const JiraConnectForm: FC<ConnectFormProps> = ({ onSubmit, isSubmitting }) => {
const isSubmittingRef = useRef(false);
const [startDate, setStartDate] = useState<Date | undefined>(undefined);
const [endDate, setEndDate] = useState<Date | undefined>(undefined);
const [periodicEnabled, setPeriodicEnabled] = useState(false);
const [frequencyMinutes, setFrequencyMinutes] = useState("1440");
const form = useForm<JiraConnectorFormValues>({
resolver: zodResolver(jiraConnectorFormSchema),
defaultValues: {
name: "Jira Connector",
base_url: "",
email: "",
api_token: "",
},
});
const handleSubmit = async (values: JiraConnectorFormValues) => {
// Prevent multiple submissions
if (isSubmittingRef.current || isSubmitting) {
return;
}
isSubmittingRef.current = true;
try {
await onSubmit({
name: values.name,
connector_type: EnumConnectorName.JIRA_CONNECTOR,
config: {
JIRA_BASE_URL: values.base_url,
JIRA_EMAIL: values.email,
JIRA_API_TOKEN: values.api_token,
},
is_indexable: true,
last_indexed_at: null,
periodic_indexing_enabled: periodicEnabled,
indexing_frequency_minutes: periodicEnabled ? parseInt(frequencyMinutes, 10) : null,
next_scheduled_at: null,
startDate,
endDate,
periodicEnabled,
frequencyMinutes,
});
} finally {
isSubmittingRef.current = false;
}
};
return (
<div className="space-y-6 pb-6">
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 p-2 sm:p-3 flex items-center [&>svg]:relative [&>svg]:left-0 [&>svg]:top-0 [&>svg+div]:translate-y-0">
<Info className="h-3 w-3 sm:h-4 sm:w-4 shrink-0 ml-1" />
<div className="-ml-1">
<AlertTitle className="text-xs sm:text-sm">API Token Required</AlertTitle>
<AlertDescription className="text-[10px] sm:text-xs !pl-0">
You'll need a Jira API Token to use this connector. You can create one from{" "}
<a
href="https://id.atlassian.com/manage-profile/security/api-tokens"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
Atlassian Account Settings
</a>
</AlertDescription>
</div>
</Alert>
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<Form {...form}>
<form
id="jira-connect-form"
onSubmit={form.handleSubmit(handleSubmit)}
className="space-y-4 sm:space-y-6"
>
<FormField
control={form.control}
name="name"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Connector Name</FormLabel>
<FormControl>
<Input
placeholder="My Jira Connector"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
A friendly name to identify this connector.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="base_url"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Jira Base URL</FormLabel>
<FormControl>
<Input
type="url"
placeholder="https://your-domain.atlassian.net"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
The base URL of your Jira instance (e.g., https://your-domain.atlassian.net).
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="email"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Email Address</FormLabel>
<FormControl>
<Input
type="email"
placeholder="your-email@example.com"
autoComplete="email"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
The email address associated with your Atlassian account.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="api_token"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">API Token</FormLabel>
<FormControl>
<Input
type="password"
placeholder="Your API Token"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
Your Jira API Token will be encrypted and stored securely.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
{/* Indexing Configuration */}
<div className="space-y-4 pt-4 border-t border-slate-400/20">
<h3 className="text-sm sm:text-base font-medium">Indexing Configuration</h3>
{/* Date Range Selector */}
<DateRangeSelector
startDate={startDate}
endDate={endDate}
onStartDateChange={setStartDate}
onEndDateChange={setEndDate}
/>
{/* Periodic Sync Config */}
<div className="rounded-xl bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6">
<div className="flex items-center justify-between">
<div className="space-y-1">
<h3 className="font-medium text-sm sm:text-base">Enable Periodic Sync</h3>
<p className="text-xs sm:text-sm text-muted-foreground">
Automatically re-index at regular intervals
</p>
</div>
<Switch
checked={periodicEnabled}
onCheckedChange={setPeriodicEnabled}
disabled={isSubmitting}
/>
</div>
{periodicEnabled && (
<div className="mt-4 pt-4 border-t border-slate-400/20 space-y-3">
<div className="space-y-2">
<Label htmlFor="frequency" className="text-xs sm:text-sm">
Sync Frequency
</Label>
<Select
value={frequencyMinutes}
onValueChange={setFrequencyMinutes}
disabled={isSubmitting}
>
<SelectTrigger
id="frequency"
className="w-full bg-slate-400/5 dark:bg-slate-400/5 border-slate-400/20 text-xs sm:text-sm"
>
<SelectValue placeholder="Select frequency" />
</SelectTrigger>
<SelectContent className="z-[100]">
<SelectItem value="15" className="text-xs sm:text-sm">
Every 15 minutes
</SelectItem>
<SelectItem value="60" className="text-xs sm:text-sm">
Every hour
</SelectItem>
<SelectItem value="360" className="text-xs sm:text-sm">
Every 6 hours
</SelectItem>
<SelectItem value="720" className="text-xs sm:text-sm">
Every 12 hours
</SelectItem>
<SelectItem value="1440" className="text-xs sm:text-sm">
Daily
</SelectItem>
<SelectItem value="10080" className="text-xs sm:text-sm">
Weekly
</SelectItem>
</SelectContent>
</Select>
</div>
</div>
)}
</div>
</div>
</form>
</Form>
</div>
{/* What you get section */}
{getConnectorBenefits(EnumConnectorName.JIRA_CONNECTOR) && (
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 px-3 sm:px-6 py-4 space-y-2">
<h4 className="text-xs sm:text-sm font-medium">What you get with Jira integration:</h4>
<ul className="list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
{getConnectorBenefits(EnumConnectorName.JIRA_CONNECTOR)?.map((benefit) => (
<li key={benefit}>{benefit}</li>
))}
</ul>
</div>
)}
{/* Documentation Section */}
<Accordion
type="single"
collapsible
className="w-full border border-border rounded-xl bg-slate-400/5 dark:bg-white/5"
>
<AccordionItem value="documentation" className="border-0">
<AccordionTrigger className="text-sm sm:text-base font-medium px-3 sm:px-6 no-underline hover:no-underline">
Documentation
</AccordionTrigger>
<AccordionContent className="px-3 sm:px-6 pb-3 sm:pb-6 space-y-6">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">How it works</h3>
<p className="text-[10px] sm:text-xs text-muted-foreground">
The Jira connector uses the Jira REST API with Basic Authentication to fetch all
issues and comments that your account has access to within your Jira instance.
</p>
<ul className="mt-2 list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
<li>
For follow up indexing runs, the connector retrieves issues and comments that have
been updated since the last indexing attempt.
</li>
<li>
Indexing is configured to run periodically, so updates should appear in your
search results within minutes.
</li>
</ul>
</div>
<div className="space-y-4">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">Authorization</h3>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 mb-4">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">
Read-Only Access is Sufficient
</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
You only need read access for this connector to work. The API Token will only be
used to read your Jira data.
</AlertDescription>
</Alert>
<div className="space-y-4 sm:space-y-6">
<div>
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
Step 1: Create an API Token
</h4>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground">
<li>Log in to your Atlassian account</li>
<li>
Navigate to{" "}
<a
href="https://id.atlassian.com/manage-profile/security/api-tokens"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
https://id.atlassian.com/manage-profile/security/api-tokens
</a>{" "}
in your browser.
</li>
<li>
Click <strong>Create API token</strong>
</li>
<li>Enter a label for your token (like "SurfSense Connector")</li>
<li>
Click <strong>Create</strong>
</li>
<li>Copy the generated token as it will only be shown once</li>
</ol>
</div>
<div>
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
Step 2: Grant necessary access
</h4>
<p className="text-[10px] sm:text-xs text-muted-foreground mb-3">
The API Token will have access to all projects and issues that your user
account can see. Make sure your account has appropriate permissions for the
projects you want to index.
</p>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">Data Privacy</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
Only issues, comments, and basic metadata will be indexed. Jira attachments
and linked files are not indexed by this connector.
</AlertDescription>
</Alert>
</div>
</div>
</div>
</div>
<div className="space-y-4">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">Indexing</h3>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground mb-4">
<li>
Navigate to the Connector Dashboard and select the <strong>Jira</strong>{" "}
Connector.
</li>
<li>
Enter your <strong>Jira Instance URL</strong> (e.g.,
https://yourcompany.atlassian.net)
</li>
<li>
Enter your <strong>Email Address</strong> associated with your Atlassian account
</li>
<li>
Place your <strong>API Token</strong> in the form field.
</li>
<li>
Click <strong>Connect</strong> to establish the connection.
</li>
<li>Once connected, your Jira issues will be indexed automatically.</li>
</ol>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">What Gets Indexed</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
<p className="mb-2">The Jira connector indexes the following data:</p>
<ul className="list-disc pl-5 space-y-1">
<li>Issue keys and summaries (e.g., PROJ-123)</li>
<li>Issue descriptions</li>
<li>Issue comments and discussion threads</li>
<li>Issue status, priority, and type information</li>
<li>Assignee and reporter information</li>
<li>Project information</li>
</ul>
</AlertDescription>
</Alert>
</div>
</div>
</AccordionContent>
</AccordionItem>
</Accordion>
</div>
);
};

View file

@ -1,396 +0,0 @@
"use client";
import { zodResolver } from "@hookform/resolvers/zod";
import { Info } from "lucide-react";
import type { FC } from "react";
import { useRef, useState } from "react";
import { useForm } from "react-hook-form";
import * as z from "zod";
import {
Accordion,
AccordionContent,
AccordionItem,
AccordionTrigger,
} from "@/components/ui/accordion";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import {
Form,
FormControl,
FormDescription,
FormField,
FormItem,
FormLabel,
FormMessage,
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { Switch } from "@/components/ui/switch";
import { EnumConnectorName } from "@/contracts/enums/connector";
import { DateRangeSelector } from "../../components/date-range-selector";
import { getConnectorBenefits } from "../connector-benefits";
import type { ConnectFormProps } from "../index";
const linearConnectorFormSchema = z.object({
name: z.string().min(3, {
message: "Connector name must be at least 3 characters.",
}),
api_key: z
.string()
.min(10, {
message: "Linear API Key is required and must be valid.",
})
.regex(/^lin_api_/, {
message: "Linear API Key should start with 'lin_api_'",
}),
});
type LinearConnectorFormValues = z.infer<typeof linearConnectorFormSchema>;
export const LinearConnectForm: FC<ConnectFormProps> = ({ onSubmit, isSubmitting }) => {
const isSubmittingRef = useRef(false);
const [startDate, setStartDate] = useState<Date | undefined>(undefined);
const [endDate, setEndDate] = useState<Date | undefined>(undefined);
const [periodicEnabled, setPeriodicEnabled] = useState(false);
const [frequencyMinutes, setFrequencyMinutes] = useState("1440");
const form = useForm<LinearConnectorFormValues>({
resolver: zodResolver(linearConnectorFormSchema),
defaultValues: {
name: "Linear Connector",
api_key: "",
},
});
const handleSubmit = async (values: LinearConnectorFormValues) => {
// Prevent multiple submissions
if (isSubmittingRef.current || isSubmitting) {
return;
}
isSubmittingRef.current = true;
try {
await onSubmit({
name: values.name,
connector_type: EnumConnectorName.LINEAR_CONNECTOR,
config: {
LINEAR_API_KEY: values.api_key,
},
is_indexable: true,
last_indexed_at: null,
periodic_indexing_enabled: periodicEnabled,
indexing_frequency_minutes: periodicEnabled ? parseInt(frequencyMinutes, 10) : null,
next_scheduled_at: null,
startDate,
endDate,
periodicEnabled,
frequencyMinutes,
});
} finally {
isSubmittingRef.current = false;
}
};
return (
<div className="space-y-6 pb-6">
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 p-2 sm:p-3 flex items-center [&>svg]:relative [&>svg]:left-0 [&>svg]:top-0 [&>svg+div]:translate-y-0">
<Info className="h-3 w-3 sm:h-4 sm:w-4 shrink-0 ml-1" />
<div className="-ml-1">
<AlertTitle className="text-xs sm:text-sm">API Key Required</AlertTitle>
<AlertDescription className="text-[10px] sm:text-xs !pl-0">
You'll need a Linear API Key to use this connector. You can create one from{" "}
<a
href="https://linear.app/settings/api"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
Linear API Settings
</a>
</AlertDescription>
</div>
</Alert>
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<Form {...form}>
<form
id="linear-connect-form"
onSubmit={form.handleSubmit(handleSubmit)}
className="space-y-4 sm:space-y-6"
>
<FormField
control={form.control}
name="name"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Connector Name</FormLabel>
<FormControl>
<Input
placeholder="My Linear Connector"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
A friendly name to identify this connector.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="api_key"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Linear API Key</FormLabel>
<FormControl>
<Input
type="password"
placeholder="lin_api_..."
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
Your Linear API Key will be encrypted and stored securely. It typically starts
with "lin_api_".
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
{/* Indexing Configuration */}
<div className="space-y-4 pt-4 border-t border-slate-400/20">
<h3 className="text-sm sm:text-base font-medium">Indexing Configuration</h3>
{/* Date Range Selector */}
<DateRangeSelector
startDate={startDate}
endDate={endDate}
onStartDateChange={setStartDate}
onEndDateChange={setEndDate}
/>
{/* Periodic Sync Config */}
<div className="rounded-xl bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6">
<div className="flex items-center justify-between">
<div className="space-y-1">
<h3 className="font-medium text-sm sm:text-base">Enable Periodic Sync</h3>
<p className="text-xs sm:text-sm text-muted-foreground">
Automatically re-index at regular intervals
</p>
</div>
<Switch
checked={periodicEnabled}
onCheckedChange={setPeriodicEnabled}
disabled={isSubmitting}
/>
</div>
{periodicEnabled && (
<div className="mt-4 pt-4 border-t border-slate-400/20 space-y-3">
<div className="space-y-2">
<Label htmlFor="frequency" className="text-xs sm:text-sm">
Sync Frequency
</Label>
<Select
value={frequencyMinutes}
onValueChange={setFrequencyMinutes}
disabled={isSubmitting}
>
<SelectTrigger
id="frequency"
className="w-full bg-slate-400/5 dark:bg-slate-400/5 border-slate-400/20 text-xs sm:text-sm"
>
<SelectValue placeholder="Select frequency" />
</SelectTrigger>
<SelectContent className="z-[100]">
<SelectItem value="15" className="text-xs sm:text-sm">
Every 15 minutes
</SelectItem>
<SelectItem value="60" className="text-xs sm:text-sm">
Every hour
</SelectItem>
<SelectItem value="360" className="text-xs sm:text-sm">
Every 6 hours
</SelectItem>
<SelectItem value="720" className="text-xs sm:text-sm">
Every 12 hours
</SelectItem>
<SelectItem value="1440" className="text-xs sm:text-sm">
Daily
</SelectItem>
<SelectItem value="10080" className="text-xs sm:text-sm">
Weekly
</SelectItem>
</SelectContent>
</Select>
</div>
</div>
)}
</div>
</div>
</form>
</Form>
</div>
{/* What you get section */}
{getConnectorBenefits(EnumConnectorName.LINEAR_CONNECTOR) && (
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 px-3 sm:px-6 py-4 space-y-2">
<h4 className="text-xs sm:text-sm font-medium">What you get with Linear integration:</h4>
<ul className="list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
{getConnectorBenefits(EnumConnectorName.LINEAR_CONNECTOR)?.map((benefit) => (
<li key={benefit}>{benefit}</li>
))}
</ul>
</div>
)}
{/* Documentation Section */}
<Accordion
type="single"
collapsible
className="w-full border border-border rounded-xl bg-slate-400/5 dark:bg-white/5"
>
<AccordionItem value="documentation" className="border-0">
<AccordionTrigger className="text-sm sm:text-base font-medium px-3 sm:px-6 no-underline hover:no-underline">
Documentation
</AccordionTrigger>
<AccordionContent className="px-3 sm:px-6 pb-3 sm:pb-6 space-y-6">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">How it works</h3>
<p className="text-[10px] sm:text-xs text-muted-foreground">
The Linear connector uses the Linear GraphQL API to fetch all issues and comments
that the API key has access to within a workspace.
</p>
<ul className="mt-2 list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
<li>
For follow up indexing runs, the connector retrieves issues and comments that have
been updated since the last indexing attempt.
</li>
<li>
Indexing is configured to run periodically, so updates should appear in your
search results within minutes.
</li>
</ul>
</div>
<div className="space-y-4">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">Authorization</h3>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 mb-4">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">
Read-Only Access is Sufficient
</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
You only need a read-only API key for this connector to work. This limits the
permissions to just reading your Linear data.
</AlertDescription>
</Alert>
<div className="space-y-4 sm:space-y-6">
<div>
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
Step 1: Create an API key
</h4>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground">
<li>Log in to your Linear account</li>
<li>
Navigate to{" "}
<a
href="https://linear.app/settings/api"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
https://linear.app/settings/api
</a>{" "}
in your browser.
</li>
<li>Alternatively, click on your profile picture Settings API</li>
<li>
Click the <strong>+ New API key</strong> button.
</li>
<li>Enter a description for your key (like "Search Connector").</li>
<li>Select "Read-only" as the permission.</li>
<li>
Click <strong>Create</strong> to generate the API key.
</li>
<li>
Copy the generated API key that starts with 'lin_api_' as it will only be
shown once.
</li>
</ol>
</div>
<div>
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
Step 2: Grant necessary access
</h4>
<p className="text-[10px] sm:text-xs text-muted-foreground mb-3">
The API key will have access to all issues and comments that your user account
can see. If you're creating the key as an admin, it will have access to all
issues in the workspace.
</p>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">Data Privacy</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
Only issues and comments will be indexed. Linear attachments and linked
files are not indexed by this connector.
</AlertDescription>
</Alert>
</div>
</div>
</div>
</div>
<div className="space-y-4">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">Indexing</h3>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground mb-4">
<li>
Navigate to the Connector Dashboard and select the <strong>Linear</strong>{" "}
Connector.
</li>
<li>
Place the <strong>API Key</strong> in the form field.
</li>
<li>
Click <strong>Connect</strong> to establish the connection.
</li>
<li>Once connected, your Linear issues will be indexed automatically.</li>
</ol>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">What Gets Indexed</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
<p className="mb-2">The Linear connector indexes the following data:</p>
<ul className="list-disc pl-5 space-y-1">
<li>Issue titles and identifiers (e.g., PROJ-123)</li>
<li>Issue descriptions</li>
<li>Issue comments</li>
<li>Issue status and metadata</li>
</ul>
</AlertDescription>
</Alert>
</div>
</div>
</AccordionContent>
</AccordionItem>
</Accordion>
</div>
);
};

View file

@ -209,6 +209,9 @@ export const LumaConnectForm: FC<ConnectFormProps> = ({ onSubmit, isSubmitting }
<SelectValue placeholder="Select frequency" />
</SelectTrigger>
<SelectContent className="z-[100]">
<SelectItem value="5" className="text-xs sm:text-sm">
Every 5 minutes
</SelectItem>
<SelectItem value="15" className="text-xs sm:text-sm">
Every 15 minutes
</SelectItem>

View file

@ -1,396 +0,0 @@
"use client";
import { zodResolver } from "@hookform/resolvers/zod";
import { Info } from "lucide-react";
import type { FC } from "react";
import { useRef, useState } from "react";
import { useForm } from "react-hook-form";
import * as z from "zod";
import {
Accordion,
AccordionContent,
AccordionItem,
AccordionTrigger,
} from "@/components/ui/accordion";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import {
Form,
FormControl,
FormDescription,
FormField,
FormItem,
FormLabel,
FormMessage,
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { Switch } from "@/components/ui/switch";
import { EnumConnectorName } from "@/contracts/enums/connector";
import { DateRangeSelector } from "../../components/date-range-selector";
import { getConnectorBenefits } from "../connector-benefits";
import type { ConnectFormProps } from "../index";
const notionConnectorFormSchema = z.object({
name: z.string().min(3, {
message: "Connector name must be at least 3 characters.",
}),
integration_token: z.string().min(10, {
message: "Notion Integration Token is required and must be valid.",
}),
});
type NotionConnectorFormValues = z.infer<typeof notionConnectorFormSchema>;
export const NotionConnectForm: FC<ConnectFormProps> = ({ onSubmit, isSubmitting }) => {
const isSubmittingRef = useRef(false);
const [startDate, setStartDate] = useState<Date | undefined>(undefined);
const [endDate, setEndDate] = useState<Date | undefined>(undefined);
const [periodicEnabled, setPeriodicEnabled] = useState(false);
const [frequencyMinutes, setFrequencyMinutes] = useState("1440");
const form = useForm<NotionConnectorFormValues>({
resolver: zodResolver(notionConnectorFormSchema),
defaultValues: {
name: "Notion Connector",
integration_token: "",
},
});
const handleSubmit = async (values: NotionConnectorFormValues) => {
// Prevent multiple submissions
if (isSubmittingRef.current || isSubmitting) {
return;
}
isSubmittingRef.current = true;
try {
await onSubmit({
name: values.name,
connector_type: EnumConnectorName.NOTION_CONNECTOR,
config: {
NOTION_INTEGRATION_TOKEN: values.integration_token,
},
is_indexable: true,
last_indexed_at: null,
periodic_indexing_enabled: periodicEnabled,
indexing_frequency_minutes: periodicEnabled ? parseInt(frequencyMinutes, 10) : null,
next_scheduled_at: null,
startDate,
endDate,
periodicEnabled,
frequencyMinutes,
});
} finally {
isSubmittingRef.current = false;
}
};
return (
<div className="space-y-6 pb-6">
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 p-2 sm:p-3 flex items-center [&>svg]:relative [&>svg]:left-0 [&>svg]:top-0 [&>svg+div]:translate-y-0">
<Info className="h-3 w-3 sm:h-4 sm:w-4 shrink-0 ml-1" />
<div className="-ml-1">
<AlertTitle className="text-xs sm:text-sm">Integration Token Required</AlertTitle>
<AlertDescription className="text-[10px] sm:text-xs !pl-0">
You'll need a Notion Integration Token to use this connector. You can create one from{" "}
<a
href="https://www.notion.so/my-integrations"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
Notion Integrations
</a>
</AlertDescription>
</div>
</Alert>
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<Form {...form}>
<form
id="notion-connect-form"
onSubmit={form.handleSubmit(handleSubmit)}
className="space-y-4 sm:space-y-6"
>
<FormField
control={form.control}
name="name"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Connector Name</FormLabel>
<FormControl>
<Input
placeholder="My Notion Connector"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
A friendly name to identify this connector.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="integration_token"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Notion Integration Token</FormLabel>
<FormControl>
<Input
type="password"
placeholder="ntn_..."
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
Your Notion Integration Token will be encrypted and stored securely. It
typically starts with "ntn_".
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
{/* Indexing Configuration */}
<div className="space-y-4 pt-4 border-t border-slate-400/20">
<h3 className="text-sm sm:text-base font-medium">Indexing Configuration</h3>
{/* Date Range Selector */}
<DateRangeSelector
startDate={startDate}
endDate={endDate}
onStartDateChange={setStartDate}
onEndDateChange={setEndDate}
/>
{/* Periodic Sync Config */}
<div className="rounded-xl bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6">
<div className="flex items-center justify-between">
<div className="space-y-1">
<h3 className="font-medium text-sm sm:text-base">Enable Periodic Sync</h3>
<p className="text-xs sm:text-sm text-muted-foreground">
Automatically re-index at regular intervals
</p>
</div>
<Switch
checked={periodicEnabled}
onCheckedChange={setPeriodicEnabled}
disabled={isSubmitting}
/>
</div>
{periodicEnabled && (
<div className="mt-4 pt-4 border-t border-slate-400/20 space-y-3">
<div className="space-y-2">
<Label htmlFor="frequency" className="text-xs sm:text-sm">
Sync Frequency
</Label>
<Select
value={frequencyMinutes}
onValueChange={setFrequencyMinutes}
disabled={isSubmitting}
>
<SelectTrigger
id="frequency"
className="w-full bg-slate-400/5 dark:bg-slate-400/5 border-slate-400/20 text-xs sm:text-sm"
>
<SelectValue placeholder="Select frequency" />
</SelectTrigger>
<SelectContent className="z-[100]">
<SelectItem value="15" className="text-xs sm:text-sm">
Every 15 minutes
</SelectItem>
<SelectItem value="60" className="text-xs sm:text-sm">
Every hour
</SelectItem>
<SelectItem value="360" className="text-xs sm:text-sm">
Every 6 hours
</SelectItem>
<SelectItem value="720" className="text-xs sm:text-sm">
Every 12 hours
</SelectItem>
<SelectItem value="1440" className="text-xs sm:text-sm">
Daily
</SelectItem>
<SelectItem value="10080" className="text-xs sm:text-sm">
Weekly
</SelectItem>
</SelectContent>
</Select>
</div>
</div>
)}
</div>
</div>
</form>
</Form>
</div>
{/* What you get section */}
{getConnectorBenefits(EnumConnectorName.NOTION_CONNECTOR) && (
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 px-3 sm:px-6 py-4 space-y-2">
<h4 className="text-xs sm:text-sm font-medium">What you get with Notion integration:</h4>
<ul className="list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
{getConnectorBenefits(EnumConnectorName.NOTION_CONNECTOR)?.map((benefit) => (
<li key={benefit}>{benefit}</li>
))}
</ul>
</div>
)}
{/* Documentation Section */}
<Accordion
type="single"
collapsible
className="w-full border border-border rounded-xl bg-slate-400/5 dark:bg-white/5"
>
<AccordionItem value="documentation" className="border-0">
<AccordionTrigger className="text-sm sm:text-base font-medium px-3 sm:px-6 no-underline hover:no-underline">
Documentation
</AccordionTrigger>
<AccordionContent className="px-3 sm:px-6 pb-3 sm:pb-6 space-y-6">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">How it works</h3>
<p className="text-[10px] sm:text-xs text-muted-foreground">
The Notion connector uses the Notion API to fetch pages from all accessible
workspaces that the integration token has access to.
</p>
<ul className="mt-2 list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
<li>
For follow up indexing runs, the connector retrieves pages that have been updated
since the last indexing attempt.
</li>
<li>
Indexing is configured to run periodically, so updates should appear in your
search results within minutes.
</li>
</ul>
</div>
<div className="space-y-4">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">Authorization</h3>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 mb-4">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">
Integration Token Required
</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
You need to create a Notion integration and share pages with it to get access.
The integration needs read access to pages.
</AlertDescription>
</Alert>
<div className="space-y-4 sm:space-y-6">
<div>
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
Step 1: Create a Notion Integration
</h4>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground">
<li>
Go to{" "}
<a
href="https://www.notion.so/my-integrations"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
https://www.notion.so/my-integrations
</a>
</li>
<li>
Click <strong>+ New integration</strong>
</li>
<li>Enter a name for your integration (e.g., "Search Connector")</li>
<li>Select your workspace</li>
<li>
Under <strong>Capabilities</strong>, enable <strong>Read content</strong>
</li>
<li>
Click <strong>Submit</strong> to create the integration
</li>
<li>
Copy the <strong>Internal Integration Token</strong> (starts with "ntn_")
</li>
</ol>
</div>
<div>
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
Step 2: Share Pages with Integration
</h4>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground">
<li>Open the Notion pages or databases you want to index</li>
<li>
Click the <strong></strong> (three dots) menu in the top right
</li>
<li>
Select <strong>Add connections</strong> or <strong>Connections</strong>
</li>
<li>Search for and select your integration</li>
<li>Repeat for all pages you want to index</li>
</ol>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 mt-3">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">Important</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
The integration can only access pages that have been explicitly shared with
it. Make sure to share all pages you want to index.
</AlertDescription>
</Alert>
</div>
</div>
</div>
</div>
<div className="space-y-4">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">Indexing</h3>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground mb-4">
<li>
Navigate to the Connector Dashboard and select the <strong>Notion</strong>{" "}
Connector.
</li>
<li>
Place the <strong>Integration Token</strong> in the form field.
</li>
<li>
Click <strong>Connect</strong> to establish the connection.
</li>
<li>Once connected, your Notion pages will be indexed automatically.</li>
</ol>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">What Gets Indexed</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
<p className="mb-2">The Notion connector indexes the following data:</p>
<ul className="list-disc pl-5 space-y-1">
<li>Page titles and content</li>
<li>Database entries and properties</li>
<li>Page metadata and properties</li>
<li>Nested pages and sub-pages</li>
</ul>
</AlertDescription>
</Alert>
</div>
</div>
</AccordionContent>
</AccordionItem>
</Accordion>
</div>
);
};

View file

@ -1,426 +0,0 @@
"use client";
import { zodResolver } from "@hookform/resolvers/zod";
import { Info } from "lucide-react";
import type { FC } from "react";
import { useRef, useState } from "react";
import { useForm } from "react-hook-form";
import * as z from "zod";
import {
Accordion,
AccordionContent,
AccordionItem,
AccordionTrigger,
} from "@/components/ui/accordion";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import {
Form,
FormControl,
FormDescription,
FormField,
FormItem,
FormLabel,
FormMessage,
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
import { Switch } from "@/components/ui/switch";
import { EnumConnectorName } from "@/contracts/enums/connector";
import { DateRangeSelector } from "../../components/date-range-selector";
import { getConnectorBenefits } from "../connector-benefits";
import type { ConnectFormProps } from "../index";
const slackConnectorFormSchema = z.object({
name: z.string().min(3, {
message: "Connector name must be at least 3 characters.",
}),
bot_token: z.string().min(10, {
message: "Slack Bot Token is required and must be valid.",
}),
});
type SlackConnectorFormValues = z.infer<typeof slackConnectorFormSchema>;
export const SlackConnectForm: FC<ConnectFormProps> = ({ onSubmit, isSubmitting }) => {
const isSubmittingRef = useRef(false);
const [startDate, setStartDate] = useState<Date | undefined>(undefined);
const [endDate, setEndDate] = useState<Date | undefined>(undefined);
const [periodicEnabled, setPeriodicEnabled] = useState(false);
const [frequencyMinutes, setFrequencyMinutes] = useState("1440");
const form = useForm<SlackConnectorFormValues>({
resolver: zodResolver(slackConnectorFormSchema),
defaultValues: {
name: "Slack Connector",
bot_token: "",
},
});
const handleSubmit = async (values: SlackConnectorFormValues) => {
// Prevent multiple submissions
if (isSubmittingRef.current || isSubmitting) {
return;
}
isSubmittingRef.current = true;
try {
await onSubmit({
name: values.name,
connector_type: EnumConnectorName.SLACK_CONNECTOR,
config: {
SLACK_BOT_TOKEN: values.bot_token,
},
is_indexable: true,
last_indexed_at: null,
periodic_indexing_enabled: periodicEnabled,
indexing_frequency_minutes: periodicEnabled ? parseInt(frequencyMinutes, 10) : null,
next_scheduled_at: null,
startDate,
endDate,
periodicEnabled,
frequencyMinutes,
});
} finally {
isSubmittingRef.current = false;
}
};
return (
<div className="space-y-6 pb-6">
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 p-2 sm:p-3 flex items-center [&>svg]:relative [&>svg]:left-0 [&>svg]:top-0 [&>svg+div]:translate-y-0">
<Info className="h-3 w-3 sm:h-4 sm:w-4 shrink-0 ml-1" />
<div className="-ml-1">
<AlertTitle className="text-xs sm:text-sm">Bot User OAuth Token Required</AlertTitle>
<AlertDescription className="text-[10px] sm:text-xs !pl-0">
You'll need a Slack Bot User OAuth Token to use this connector. You can create a Slack
app and get the token from{" "}
<a
href="https://api.slack.com/apps"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
Slack API Dashboard
</a>
</AlertDescription>
</div>
</Alert>
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<Form {...form}>
<form
id="slack-connect-form"
onSubmit={form.handleSubmit(handleSubmit)}
className="space-y-4 sm:space-y-6"
>
<FormField
control={form.control}
name="name"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Connector Name</FormLabel>
<FormControl>
<Input
placeholder="My Slack Connector"
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
A friendly name to identify this connector.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="bot_token"
render={({ field }) => (
<FormItem>
<FormLabel className="text-xs sm:text-sm">Slack Bot User OAuth Token</FormLabel>
<FormControl>
<Input
type="password"
placeholder="xoxb-..."
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
disabled={isSubmitting}
{...field}
/>
</FormControl>
<FormDescription className="text-[10px] sm:text-xs">
Your Bot User OAuth Token will be encrypted and stored securely. It typically
starts with "xoxb-".
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
{/* Indexing Configuration */}
<div className="space-y-4 pt-4 border-t border-slate-400/20">
<h3 className="text-sm sm:text-base font-medium">Indexing Configuration</h3>
{/* Date Range Selector */}
<DateRangeSelector
startDate={startDate}
endDate={endDate}
onStartDateChange={setStartDate}
onEndDateChange={setEndDate}
/>
{/* Periodic Sync Config */}
<div className="rounded-xl bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6">
<div className="flex items-center justify-between">
<div className="space-y-1">
<h3 className="font-medium text-sm sm:text-base">Enable Periodic Sync</h3>
<p className="text-xs sm:text-sm text-muted-foreground">
Automatically re-index at regular intervals
</p>
</div>
<Switch
checked={periodicEnabled}
onCheckedChange={setPeriodicEnabled}
disabled={isSubmitting}
/>
</div>
{periodicEnabled && (
<div className="mt-4 pt-4 border-t border-slate-400/20 space-y-3">
<div className="space-y-2">
<Label htmlFor="frequency" className="text-xs sm:text-sm">
Sync Frequency
</Label>
<Select
value={frequencyMinutes}
onValueChange={setFrequencyMinutes}
disabled={isSubmitting}
>
<SelectTrigger
id="frequency"
className="w-full bg-slate-400/5 dark:bg-slate-400/5 border-slate-400/20 text-xs sm:text-sm"
>
<SelectValue placeholder="Select frequency" />
</SelectTrigger>
<SelectContent className="z-[100]">
<SelectItem value="15" className="text-xs sm:text-sm">
Every 15 minutes
</SelectItem>
<SelectItem value="60" className="text-xs sm:text-sm">
Every hour
</SelectItem>
<SelectItem value="360" className="text-xs sm:text-sm">
Every 6 hours
</SelectItem>
<SelectItem value="720" className="text-xs sm:text-sm">
Every 12 hours
</SelectItem>
<SelectItem value="1440" className="text-xs sm:text-sm">
Daily
</SelectItem>
<SelectItem value="10080" className="text-xs sm:text-sm">
Weekly
</SelectItem>
</SelectContent>
</Select>
</div>
</div>
)}
</div>
</div>
</form>
</Form>
</div>
{/* What you get section */}
{getConnectorBenefits(EnumConnectorName.SLACK_CONNECTOR) && (
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 px-3 sm:px-6 py-4 space-y-2">
<h4 className="text-xs sm:text-sm font-medium">What you get with Slack integration:</h4>
<ul className="list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
{getConnectorBenefits(EnumConnectorName.SLACK_CONNECTOR)?.map((benefit) => (
<li key={benefit}>{benefit}</li>
))}
</ul>
</div>
)}
{/* Documentation Section */}
<Accordion
type="single"
collapsible
className="w-full border border-border rounded-xl bg-slate-400/5 dark:bg-white/5"
>
<AccordionItem value="documentation" className="border-0">
<AccordionTrigger className="text-sm sm:text-base font-medium px-3 sm:px-6 no-underline hover:no-underline">
Documentation
</AccordionTrigger>
<AccordionContent className="px-3 sm:px-6 pb-3 sm:pb-6 space-y-6">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">How it works</h3>
<p className="text-[10px] sm:text-xs text-muted-foreground">
The Slack connector uses the Slack Web API to fetch messages from all accessible
channels that the bot token has access to within a workspace.
</p>
<ul className="mt-2 list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
<li>
For follow up indexing runs, the connector retrieves messages that have been
updated since the last indexing attempt.
</li>
<li>
Indexing is configured to run periodically, so updates should appear in your
search results within minutes.
</li>
</ul>
</div>
<div className="space-y-4">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">Authorization</h3>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 mb-4">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">
Bot User OAuth Token Required
</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
You need to create a Slack app and install it to your workspace to get a Bot
User OAuth Token. The bot needs read access to channels and messages.
</AlertDescription>
</Alert>
<div className="space-y-4 sm:space-y-6">
<div>
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
Step 1: Create a Slack App
</h4>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground">
<li>
Go to{" "}
<a
href="https://api.slack.com/apps"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
https://api.slack.com/apps
</a>
</li>
<li>
Click <strong>Create New App</strong> and choose "From scratch"
</li>
<li>Enter an app name and select your workspace</li>
<li>
Click <strong>Create App</strong>
</li>
</ol>
</div>
<div>
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
Step 2: Configure Bot Scopes
</h4>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground">
<li>
Navigate to <strong>OAuth & Permissions</strong> in the sidebar
</li>
<li>
Under <strong>Bot Token Scopes</strong>, add the following scopes:
<ul className="list-disc pl-5 mt-1 space-y-1">
<li>
<code className="bg-muted px-1 py-0.5 rounded">channels:read</code> -
View basic information about public channels
</li>
<li>
<code className="bg-muted px-1 py-0.5 rounded">channels:history</code> -
View messages in public channels
</li>
<li>
<code className="bg-muted px-1 py-0.5 rounded">groups:read</code> - View
basic information about private channels
</li>
<li>
<code className="bg-muted px-1 py-0.5 rounded">groups:history</code> -
View messages in private channels
</li>
<li>
<code className="bg-muted px-1 py-0.5 rounded">im:read</code> - View
basic information about direct messages
</li>
<li>
<code className="bg-muted px-1 py-0.5 rounded">im:history</code> - View
messages in direct messages
</li>
</ul>
</li>
</ol>
</div>
<div>
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
Step 3: Install App to Workspace
</h4>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground">
<li>
Go to <strong>Install App</strong> in the sidebar
</li>
<li>
Click <strong>Install to Workspace</strong>
</li>
<li>
Review the permissions and click <strong>Allow</strong>
</li>
<li>
Copy the <strong>Bot User OAuth Token</strong> from the "OAuth &
Permissions" page (starts with "xoxb-")
</li>
</ol>
</div>
</div>
</div>
</div>
<div className="space-y-4">
<div>
<h3 className="text-sm sm:text-base font-semibold mb-2">Indexing</h3>
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground mb-4">
<li>
Navigate to the Connector Dashboard and select the <strong>Slack</strong>{" "}
Connector.
</li>
<li>
Place the <strong>Bot User OAuth Token</strong> in the form field.
</li>
<li>
Click <strong>Connect</strong> to establish the connection.
</li>
<li>Once connected, your Slack messages will be indexed automatically.</li>
</ol>
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20">
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
<AlertTitle className="text-[10px] sm:text-xs">What Gets Indexed</AlertTitle>
<AlertDescription className="text-[9px] sm:text-[10px]">
<p className="mb-2">The Slack connector indexes the following data:</p>
<ul className="list-disc pl-5 space-y-1">
<li>Messages from all accessible channels (public and private)</li>
<li>Direct messages (if bot has access)</li>
<li>Message timestamps and metadata</li>
<li>Thread replies and conversations</li>
</ul>
</AlertDescription>
</Alert>
</div>
</div>
</AccordionContent>
</AccordionItem>
</Accordion>
</div>
);
};

View file

@ -2,18 +2,11 @@ import type { FC } from "react";
import { BaiduSearchApiConnectForm } from "./components/baidu-search-api-connect-form";
import { BookStackConnectForm } from "./components/bookstack-connect-form";
import { CirclebackConnectForm } from "./components/circleback-connect-form";
import { ClickUpConnectForm } from "./components/clickup-connect-form";
import { ConfluenceConnectForm } from "./components/confluence-connect-form";
import { DiscordConnectForm } from "./components/discord-connect-form";
import { ElasticsearchConnectForm } from "./components/elasticsearch-connect-form";
import { GithubConnectForm } from "./components/github-connect-form";
import { JiraConnectForm } from "./components/jira-connect-form";
import { LinearConnectForm } from "./components/linear-connect-form";
import { LinkupApiConnectForm } from "./components/linkup-api-connect-form";
import { LumaConnectForm } from "./components/luma-connect-form";
import { NotionConnectForm } from "./components/notion-connect-form";
import { SearxngConnectForm } from "./components/searxng-connect-form";
import { SlackConnectForm } from "./components/slack-connect-form";
import { TavilyApiConnectForm } from "./components/tavily-api-connect-form";
export interface ConnectFormProps {
@ -51,26 +44,12 @@ export function getConnectFormComponent(connectorType: string): ConnectFormCompo
return LinkupApiConnectForm;
case "BAIDU_SEARCH_API":
return BaiduSearchApiConnectForm;
case "LINEAR_CONNECTOR":
return LinearConnectForm;
case "ELASTICSEARCH_CONNECTOR":
return ElasticsearchConnectForm;
case "SLACK_CONNECTOR":
return SlackConnectForm;
case "DISCORD_CONNECTOR":
return DiscordConnectForm;
case "NOTION_CONNECTOR":
return NotionConnectForm;
case "CONFLUENCE_CONNECTOR":
return ConfluenceConnectForm;
case "BOOKSTACK_CONNECTOR":
return BookStackConnectForm;
case "GITHUB_CONNECTOR":
return GithubConnectForm;
case "JIRA_CONNECTOR":
return JiraConnectForm;
case "CLICKUP_CONNECTOR":
return ClickUpConnectForm;
case "LUMA_CONNECTOR":
return LumaConnectForm;
case "CIRCLEBACK_CONNECTOR":

View file

@ -1,6 +1,6 @@
"use client";
import { KeyRound } from "lucide-react";
import { Info, KeyRound } from "lucide-react";
import type { FC } from "react";
import { useEffect, useState } from "react";
import { Input } from "@/components/ui/input";
@ -16,17 +16,22 @@ export const ClickUpConfig: FC<ClickUpConfigProps> = ({
onConfigChange,
onNameChange,
}) => {
// Check if this is an OAuth connector (has access_token or _token_encrypted flag)
const isOAuth = !!(connector.config?.access_token || connector.config?._token_encrypted);
const [apiToken, setApiToken] = useState<string>(
(connector.config?.CLICKUP_API_TOKEN as string) || ""
);
const [name, setName] = useState<string>(connector.name || "");
// Update API token and name when connector changes
// Update values when connector changes (only for legacy connectors)
useEffect(() => {
const token = (connector.config?.CLICKUP_API_TOKEN as string) || "";
setApiToken(token);
if (!isOAuth) {
const token = (connector.config?.CLICKUP_API_TOKEN as string) || "";
setApiToken(token);
}
setName(connector.name || "");
}, [connector.config, connector.name]);
}, [connector.config, connector.name, isOAuth]);
const handleApiTokenChange = (value: string) => {
setApiToken(value);
@ -45,6 +50,32 @@ export const ClickUpConfig: FC<ClickUpConfigProps> = ({
}
};
// For OAuth connectors, show simple info message
if (isOAuth) {
const workspaceName = (connector.config?.workspace_name as string) || "Unknown Workspace";
return (
<div className="space-y-6">
{/* OAuth Info */}
<div className="rounded-xl border border-border bg-primary/5 p-4 flex items-start gap-3">
<div className="flex h-8 w-8 items-center justify-center rounded-lg bg-primary/10 shrink-0 mt-0.5">
<Info className="size-4" />
</div>
<div className="text-xs sm:text-sm">
<p className="font-medium text-xs sm:text-sm">Connected via OAuth</p>
<p className="text-muted-foreground mt-1 text-[10px] sm:text-sm">
Workspace:{" "}
<code className="bg-muted px-1 py-0.5 rounded text-inherit">{workspaceName}</code>
</p>
<p className="text-muted-foreground mt-1 text-[10px] sm:text-sm">
To update your connection, reconnect this connector.
</p>
</div>
</div>
</div>
);
}
// For legacy API token connectors, show the form
return (
<div className="space-y-6">
{/* Connector Name */}
@ -82,7 +113,8 @@ export const ClickUpConfig: FC<ClickUpConfigProps> = ({
className="border-slate-400/20 focus-visible:border-slate-400/40"
/>
<p className="text-[10px] sm:text-xs text-muted-foreground">
Update your ClickUp API Token if needed.
Update your ClickUp API Token if needed. For better security and automatic token
refresh, consider disconnecting and reconnecting using OAuth 2.0.
</p>
</div>
</div>

View file

@ -1,6 +1,6 @@
"use client";
import { KeyRound } from "lucide-react";
import { Info, KeyRound } from "lucide-react";
import type { FC } from "react";
import { useEffect, useState } from "react";
import { Input } from "@/components/ui/input";
@ -16,6 +16,9 @@ export const ConfluenceConfig: FC<ConfluenceConfigProps> = ({
onConfigChange,
onNameChange,
}) => {
// Check if this is an OAuth connector (has access_token or _token_encrypted flag)
const isOAuth = !!(connector.config?.access_token || connector.config?._token_encrypted);
const [baseUrl, setBaseUrl] = useState<string>(
(connector.config?.CONFLUENCE_BASE_URL as string) || ""
);
@ -25,16 +28,18 @@ export const ConfluenceConfig: FC<ConfluenceConfigProps> = ({
);
const [name, setName] = useState<string>(connector.name || "");
// Update values when connector changes
// Update values when connector changes (only for legacy connectors)
useEffect(() => {
const url = (connector.config?.CONFLUENCE_BASE_URL as string) || "";
const emailVal = (connector.config?.CONFLUENCE_EMAIL as string) || "";
const token = (connector.config?.CONFLUENCE_API_TOKEN as string) || "";
setBaseUrl(url);
setEmail(emailVal);
setApiToken(token);
if (!isOAuth) {
const url = (connector.config?.CONFLUENCE_BASE_URL as string) || "";
const emailVal = (connector.config?.CONFLUENCE_EMAIL as string) || "";
const token = (connector.config?.CONFLUENCE_API_TOKEN as string) || "";
setBaseUrl(url);
setEmail(emailVal);
setApiToken(token);
}
setName(connector.name || "");
}, [connector.config, connector.name]);
}, [connector.config, connector.name, isOAuth]);
const handleBaseUrlChange = (value: string) => {
setBaseUrl(value);
@ -73,6 +78,35 @@ export const ConfluenceConfig: FC<ConfluenceConfigProps> = ({
}
};
// For OAuth connectors, show simple info message
if (isOAuth) {
const siteUrl =
(connector.config?.base_url as string) || (connector.config?.site_url as string) || "Unknown";
return (
<div className="space-y-6">
{/* OAuth Info */}
<div className="rounded-xl border border-border bg-primary/5 p-4 flex items-start gap-3">
<div className="flex h-8 w-8 items-center justify-center rounded-lg bg-primary/10 shrink-0 mt-0.5">
<Info className="size-4" />
</div>
<div className="text-xs sm:text-sm">
<p className="font-medium text-xs sm:text-sm">Connected via OAuth</p>
<p className="text-muted-foreground mt-1 text-[10px] sm:text-sm">
This connector is authenticated using OAuth 2.0. Your Confluence instance is:
</p>
<p className="text-muted-foreground mt-1 text-[10px] sm:text-sm">
<code className="bg-muted px-1 py-0.5 rounded text-inherit">{siteUrl}</code>
</p>
<p className="text-muted-foreground mt-1 text-[10px] sm:text-sm">
To update your connection, reconnect this connector.
</p>
</div>
</div>
</div>
);
}
// For legacy API token connectors, show the form
return (
<div className="space-y-6">
{/* Connector Name */}

View file

@ -1,88 +1,26 @@
"use client";
import { KeyRound } from "lucide-react";
import { Info } from "lucide-react";
import type { FC } from "react";
import { useEffect, useState } from "react";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import type { ConnectorConfigProps } from "../index";
export interface DiscordConfigProps extends ConnectorConfigProps {
onNameChange?: (name: string) => void;
}
export const DiscordConfig: FC<DiscordConfigProps> = ({
connector,
onConfigChange,
onNameChange,
}) => {
const [botToken, setBotToken] = useState<string>(
(connector.config?.DISCORD_BOT_TOKEN as string) || ""
);
const [name, setName] = useState<string>(connector.name || "");
// Update bot token and name when connector changes
useEffect(() => {
const token = (connector.config?.DISCORD_BOT_TOKEN as string) || "";
setBotToken(token);
setName(connector.name || "");
}, [connector.config, connector.name]);
const handleBotTokenChange = (value: string) => {
setBotToken(value);
if (onConfigChange) {
onConfigChange({
...connector.config,
DISCORD_BOT_TOKEN: value,
});
}
};
const handleNameChange = (value: string) => {
setName(value);
if (onNameChange) {
onNameChange(value);
}
};
export const DiscordConfig: FC<DiscordConfigProps> = () => {
return (
<div className="space-y-6">
{/* Connector Name */}
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<div className="space-y-2">
<Label className="text-xs sm:text-sm">Connector Name</Label>
<Input
value={name}
onChange={(e) => handleNameChange(e.target.value)}
placeholder="My Discord Connector"
className="border-slate-400/20 focus-visible:border-slate-400/40"
/>
<p className="text-[10px] sm:text-xs text-muted-foreground">
A friendly name to identify this connector.
</p>
<div className="rounded-xl border border-border bg-primary/5 p-4 flex items-start gap-3">
<div className="flex h-8 w-8 items-center justify-center rounded-lg bg-primary/10 shrink-0 mt-0.5">
<Info className="size-4" />
</div>
</div>
{/* Configuration */}
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<div className="space-y-1 sm:space-y-2">
<h3 className="font-medium text-sm sm:text-base">Configuration</h3>
</div>
<div className="space-y-2">
<Label className="flex items-center gap-2 text-xs sm:text-sm">
<KeyRound className="h-4 w-4" />
Discord Bot Token
</Label>
<Input
type="password"
value={botToken}
onChange={(e) => handleBotTokenChange(e.target.value)}
placeholder="Your Bot Token"
className="border-slate-400/20 focus-visible:border-slate-400/40"
/>
<p className="text-[10px] sm:text-xs text-muted-foreground">
Update your Discord Bot Token if needed.
<div className="text-xs sm:text-sm">
<p className="font-medium text-xs sm:text-sm">Add Bot to Servers</p>
<p className="text-muted-foreground mt-1 text-[10px] sm:text-sm">
Before indexing, make sure the Discord bot has been added to the servers (guilds) you
want to index. The bot can only access messages from servers it's been added to. Use the
OAuth authorization flow to add the bot to your servers.
</p>
</div>
</div>

View file

@ -1,6 +1,6 @@
"use client";
import { KeyRound } from "lucide-react";
import { Info, KeyRound } from "lucide-react";
import type { FC } from "react";
import { useEffect, useState } from "react";
import { Input } from "@/components/ui/input";
@ -12,6 +12,9 @@ export interface JiraConfigProps extends ConnectorConfigProps {
}
export const JiraConfig: FC<JiraConfigProps> = ({ connector, onConfigChange, onNameChange }) => {
// Check if this is an OAuth connector (has access_token or _token_encrypted flag)
const isOAuth = !!(connector.config?.access_token || connector.config?._token_encrypted);
const [baseUrl, setBaseUrl] = useState<string>((connector.config?.JIRA_BASE_URL as string) || "");
const [email, setEmail] = useState<string>((connector.config?.JIRA_EMAIL as string) || "");
const [apiToken, setApiToken] = useState<string>(
@ -19,16 +22,18 @@ export const JiraConfig: FC<JiraConfigProps> = ({ connector, onConfigChange, onN
);
const [name, setName] = useState<string>(connector.name || "");
// Update values when connector changes
// Update values when connector changes (only for legacy connectors)
useEffect(() => {
const url = (connector.config?.JIRA_BASE_URL as string) || "";
const emailVal = (connector.config?.JIRA_EMAIL as string) || "";
const token = (connector.config?.JIRA_API_TOKEN as string) || "";
setBaseUrl(url);
setEmail(emailVal);
setApiToken(token);
if (!isOAuth) {
const url = (connector.config?.JIRA_BASE_URL as string) || "";
const emailVal = (connector.config?.JIRA_EMAIL as string) || "";
const token = (connector.config?.JIRA_API_TOKEN as string) || "";
setBaseUrl(url);
setEmail(emailVal);
setApiToken(token);
}
setName(connector.name || "");
}, [connector.config, connector.name]);
}, [connector.config, connector.name, isOAuth]);
const handleBaseUrlChange = (value: string) => {
setBaseUrl(value);
@ -67,6 +72,34 @@ export const JiraConfig: FC<JiraConfigProps> = ({ connector, onConfigChange, onN
}
};
// For OAuth connectors, show simple info message
if (isOAuth) {
const baseUrl = (connector.config?.base_url as string) || "Unknown";
return (
<div className="space-y-6">
{/* OAuth Info */}
<div className="rounded-xl border border-border bg-primary/5 p-4 flex items-start gap-3">
<div className="flex h-8 w-8 items-center justify-center rounded-lg bg-primary/10 shrink-0 mt-0.5">
<Info className="size-4" />
</div>
<div className="text-xs sm:text-sm">
<p className="font-medium text-xs sm:text-sm">Connected via OAuth</p>
<p className="text-muted-foreground mt-1 text-[10px] sm:text-sm">
This connector is authenticated using OAuth 2.0. Your Jira instance is:
</p>
<p className="text-muted-foreground mt-1 text-[10px] sm:text-sm">
<code className="bg-muted px-1 py-0.5 rounded text-inherit">{baseUrl}</code>
</p>
<p className="text-muted-foreground mt-1 text-[10px] sm:text-sm">
To update your connection, reconnect this connector.
</p>
</div>
</div>
</div>
);
}
// For legacy API token connectors, show the form
return (
<div className="space-y-6">
{/* Connector Name */}

View file

@ -1,89 +0,0 @@
"use client";
import { KeyRound } from "lucide-react";
import type { FC } from "react";
import { useEffect, useState } from "react";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import type { ConnectorConfigProps } from "../index";
export interface LinearConfigProps extends ConnectorConfigProps {
onNameChange?: (name: string) => void;
}
export const LinearConfig: FC<LinearConfigProps> = ({
connector,
onConfigChange,
onNameChange,
}) => {
const [apiKey, setApiKey] = useState<string>((connector.config?.LINEAR_API_KEY as string) || "");
const [name, setName] = useState<string>(connector.name || "");
// Update API key and name when connector changes
useEffect(() => {
const key = (connector.config?.LINEAR_API_KEY as string) || "";
setApiKey(key);
setName(connector.name || "");
}, [connector.config, connector.name]);
const handleApiKeyChange = (value: string) => {
setApiKey(value);
if (onConfigChange) {
onConfigChange({
...connector.config,
LINEAR_API_KEY: value,
});
}
};
const handleNameChange = (value: string) => {
setName(value);
if (onNameChange) {
onNameChange(value);
}
};
return (
<div className="space-y-6">
{/* Connector Name */}
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<div className="space-y-2">
<Label className="text-xs sm:text-sm">Connector Name</Label>
<Input
value={name}
onChange={(e) => handleNameChange(e.target.value)}
placeholder="My Linear Connector"
className="border-slate-400/20 focus-visible:border-slate-400/40"
/>
<p className="text-[10px] sm:text-xs text-muted-foreground">
A friendly name to identify this connector.
</p>
</div>
</div>
{/* Configuration */}
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<div className="space-y-1 sm:space-y-2">
<h3 className="font-medium text-sm sm:text-base">Configuration</h3>
</div>
<div className="space-y-2">
<Label className="flex items-center gap-2 text-xs sm:text-sm">
<KeyRound className="h-4 w-4" />
Linear API Key
</Label>
<Input
type="password"
value={apiKey}
onChange={(e) => handleApiKeyChange(e.target.value)}
placeholder="Begins with lin_api_..."
className="border-slate-400/20 focus-visible:border-slate-400/40"
/>
<p className="text-[10px] sm:text-xs text-muted-foreground">
Update your Linear API Key if needed.
</p>
</div>
</div>
</div>
);
};

View file

@ -1,91 +0,0 @@
"use client";
import { KeyRound } from "lucide-react";
import type { FC } from "react";
import { useEffect, useState } from "react";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import type { ConnectorConfigProps } from "../index";
export interface NotionConfigProps extends ConnectorConfigProps {
onNameChange?: (name: string) => void;
}
export const NotionConfig: FC<NotionConfigProps> = ({
connector,
onConfigChange,
onNameChange,
}) => {
const [integrationToken, setIntegrationToken] = useState<string>(
(connector.config?.NOTION_INTEGRATION_TOKEN as string) || ""
);
const [name, setName] = useState<string>(connector.name || "");
// Update integration token and name when connector changes
useEffect(() => {
const token = (connector.config?.NOTION_INTEGRATION_TOKEN as string) || "";
setIntegrationToken(token);
setName(connector.name || "");
}, [connector.config, connector.name]);
const handleIntegrationTokenChange = (value: string) => {
setIntegrationToken(value);
if (onConfigChange) {
onConfigChange({
...connector.config,
NOTION_INTEGRATION_TOKEN: value,
});
}
};
const handleNameChange = (value: string) => {
setName(value);
if (onNameChange) {
onNameChange(value);
}
};
return (
<div className="space-y-6">
{/* Connector Name */}
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<div className="space-y-2">
<Label className="text-xs sm:text-sm">Connector Name</Label>
<Input
value={name}
onChange={(e) => handleNameChange(e.target.value)}
placeholder="My Notion Connector"
className="border-slate-400/20 focus-visible:border-slate-400/40"
/>
<p className="text-[10px] sm:text-xs text-muted-foreground">
A friendly name to identify this connector.
</p>
</div>
</div>
{/* Configuration */}
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<div className="space-y-1 sm:space-y-2">
<h3 className="font-medium text-sm sm:text-base">Configuration</h3>
</div>
<div className="space-y-2">
<Label className="flex items-center gap-2 text-xs sm:text-sm">
<KeyRound className="h-4 w-4" />
Notion Integration Token
</Label>
<Input
type="password"
value={integrationToken}
onChange={(e) => handleIntegrationTokenChange(e.target.value)}
placeholder="Begins with secret_..."
className="border-slate-400/20 focus-visible:border-slate-400/40"
/>
<p className="text-[10px] sm:text-xs text-muted-foreground">
Update your Notion Integration Token if needed.
</p>
</div>
</div>
</div>
);
};

View file

@ -1,84 +1,27 @@
"use client";
import { KeyRound } from "lucide-react";
import { Info } from "lucide-react";
import type { FC } from "react";
import { useEffect, useState } from "react";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import type { ConnectorConfigProps } from "../index";
export interface SlackConfigProps extends ConnectorConfigProps {
onNameChange?: (name: string) => void;
}
export const SlackConfig: FC<SlackConfigProps> = ({ connector, onConfigChange, onNameChange }) => {
const [botToken, setBotToken] = useState<string>(
(connector.config?.SLACK_BOT_TOKEN as string) || ""
);
const [name, setName] = useState<string>(connector.name || "");
// Update bot token and name when connector changes
useEffect(() => {
const token = (connector.config?.SLACK_BOT_TOKEN as string) || "";
setBotToken(token);
setName(connector.name || "");
}, [connector.config, connector.name]);
const handleBotTokenChange = (value: string) => {
setBotToken(value);
if (onConfigChange) {
onConfigChange({
...connector.config,
SLACK_BOT_TOKEN: value,
});
}
};
const handleNameChange = (value: string) => {
setName(value);
if (onNameChange) {
onNameChange(value);
}
};
export const SlackConfig: FC<SlackConfigProps> = () => {
return (
<div className="space-y-6">
{/* Connector Name */}
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<div className="space-y-2">
<Label className="text-xs sm:text-sm">Connector Name</Label>
<Input
value={name}
onChange={(e) => handleNameChange(e.target.value)}
placeholder="My Slack Connector"
className="border-slate-400/20 focus-visible:border-slate-400/40"
/>
<p className="text-[10px] sm:text-xs text-muted-foreground">
A friendly name to identify this connector.
</p>
<div className="rounded-xl border border-border bg-primary/5 p-4 flex items-start gap-3">
<div className="flex h-8 w-8 items-center justify-center rounded-lg bg-primary/10 shrink-0 mt-0.5">
<Info className="size-4" />
</div>
</div>
{/* Configuration */}
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
<div className="space-y-1 sm:space-y-2">
<h3 className="font-medium text-sm sm:text-base">Configuration</h3>
</div>
<div className="space-y-2">
<Label className="flex items-center gap-2 text-xs sm:text-sm">
<KeyRound className="h-4 w-4" />
Slack Bot User OAuth Token
</Label>
<Input
type="password"
value={botToken}
onChange={(e) => handleBotTokenChange(e.target.value)}
placeholder="Begins with xoxb-..."
className="border-slate-400/20 focus-visible:border-slate-400/40"
/>
<p className="text-[10px] sm:text-xs text-muted-foreground">
Update your Bot User OAuth Token if needed.
<div className="text-xs sm:text-sm">
<p className="font-medium text-xs sm:text-sm">Add Bot to Channels</p>
<p className="text-muted-foreground mt-1 text-[10px] sm:text-sm">
Before indexing, add the SurfSense bot to each channel you want to index. The bot can
only access messages from channels it's been added to. Type{" "}
<code className="bg-muted px-1 py-0.5 rounded text-[9px]">/invite @SurfSense</code> in
any channel to add it.
</p>
</div>
</div>

View file

@ -12,10 +12,8 @@ import { ElasticsearchConfig } from "./components/elasticsearch-config";
import { GithubConfig } from "./components/github-config";
import { GoogleDriveConfig } from "./components/google-drive-config";
import { JiraConfig } from "./components/jira-config";
import { LinearConfig } from "./components/linear-config";
import { LinkupApiConfig } from "./components/linkup-api-config";
import { LumaConfig } from "./components/luma-config";
import { NotionConfig } from "./components/notion-config";
import { SearxngConfig } from "./components/searxng-config";
import { SlackConfig } from "./components/slack-config";
import { TavilyApiConfig } from "./components/tavily-api-config";
@ -46,8 +44,6 @@ export function getConnectorConfigComponent(
return LinkupApiConfig;
case "BAIDU_SEARCH_API":
return BaiduSearchApiConfig;
case "LINEAR_CONNECTOR":
return LinearConfig;
case "WEBCRAWLER_CONNECTOR":
return WebcrawlerConfig;
case "ELASTICSEARCH_CONNECTOR":
@ -56,8 +52,6 @@ export function getConnectorConfigComponent(
return SlackConfig;
case "DISCORD_CONNECTOR":
return DiscordConfig;
case "NOTION_CONNECTOR":
return NotionConfig;
case "CONFLUENCE_CONNECTOR":
return ConfluenceConfig;
case "BOOKSTACK_CONNECTOR":
@ -72,7 +66,7 @@ export function getConnectorConfigComponent(
return LumaConfig;
case "CIRCLEBACK_CONNECTOR":
return CirclebackConfig;
// OAuth connectors (Gmail, Calendar, Airtable) and others don't need special config UI
// OAuth connectors (Gmail, Calendar, Airtable, Notion) and others don't need special config UI
default:
return null;
}

View file

@ -51,16 +51,9 @@ export const ConnectorConnectView: FC<ConnectorConnectViewProps> = ({
SEARXNG_API: "searxng-connect-form",
LINKUP_API: "linkup-api-connect-form",
BAIDU_SEARCH_API: "baidu-search-api-connect-form",
LINEAR_CONNECTOR: "linear-connect-form",
ELASTICSEARCH_CONNECTOR: "elasticsearch-connect-form",
SLACK_CONNECTOR: "slack-connect-form",
DISCORD_CONNECTOR: "discord-connect-form",
NOTION_CONNECTOR: "notion-connect-form",
CONFLUENCE_CONNECTOR: "confluence-connect-form",
BOOKSTACK_CONNECTOR: "bookstack-connect-form",
GITHUB_CONNECTOR: "github-connect-form",
JIRA_CONNECTOR: "jira-connect-form",
CLICKUP_CONNECTOR: "clickup-connect-form",
LUMA_CONNECTOR: "luma-connect-form",
CIRCLEBACK_CONNECTOR: "circleback-connect-form",
};

View file

@ -59,6 +59,7 @@ export const ConnectorEditView: FC<ConnectorEditViewProps> = ({
const [isScrolled, setIsScrolled] = useState(false);
const [hasMoreContent, setHasMoreContent] = useState(false);
const [showDisconnectConfirm, setShowDisconnectConfirm] = useState(false);
const [isQuickIndexing, setIsQuickIndexing] = useState(false);
const scrollContainerRef = useRef<HTMLDivElement>(null);
const checkScrollState = useCallback(() => {
@ -94,6 +95,13 @@ export const ConnectorEditView: FC<ConnectorEditViewProps> = ({
};
}, [checkScrollState]);
// Reset local quick indexing state when indexing completes
useEffect(() => {
if (!isIndexing) {
setIsQuickIndexing(false);
}
}, [isIndexing]);
const handleDisconnectClick = () => {
setShowDisconnectConfirm(true);
};
@ -107,6 +115,13 @@ export const ConnectorEditView: FC<ConnectorEditViewProps> = ({
setShowDisconnectConfirm(false);
};
const handleQuickIndex = useCallback(() => {
if (onQuickIndex) {
setIsQuickIndexing(true);
onQuickIndex();
}
}, [onQuickIndex]);
return (
<div className="flex-1 flex flex-col min-h-0 overflow-hidden">
{/* Fixed Header */}
@ -128,12 +143,14 @@ export const ConnectorEditView: FC<ConnectorEditViewProps> = ({
{/* Connector header */}
<div className="flex flex-col sm:flex-row items-start sm:items-center gap-4 mb-6">
<div className="flex items-center gap-4 flex-1 w-full sm:w-auto">
<div className="flex h-14 w-14 items-center justify-center rounded-xl bg-primary/10 border border-primary/20 flex-shrink-0">
<div className="flex gap-4 flex-1 w-full sm:w-auto">
<div className="flex h-14 w-14 items-center justify-center rounded-xl bg-primary/10 border border-primary/20 shrink-0">
{getConnectorIcon(connector.connector_type, "size-7")}
</div>
<div className="flex-1 min-w-0">
<h2 className="text-xl sm:text-2xl font-semibold tracking-tight">{connector.name}</h2>
<h2 className="text-xl sm:text-2xl font-semibold tracking-tight text-wrap whitespace-normal wrap-break-word">
{connector.name}
</h2>
<p className="text-xs sm:text-base text-muted-foreground mt-1">
Manage your connector settings and sync configuration
</p>
@ -146,11 +163,11 @@ export const ConnectorEditView: FC<ConnectorEditViewProps> = ({
<Button
variant="secondary"
size="sm"
onClick={onQuickIndex}
disabled={isIndexing || isSaving || isDisconnecting}
onClick={handleQuickIndex}
disabled={isQuickIndexing || isIndexing || isSaving || isDisconnecting}
className="text-xs sm:text-sm bg-slate-400/10 dark:bg-white/10 hover:bg-slate-400/20 dark:hover:bg-white/20 border-slate-400/20 dark:border-white/20 w-full sm:w-auto"
>
{isIndexing ? (
{isQuickIndexing || isIndexing ? (
<>
<RefreshCw className="mr-2 h-4 w-4 animate-spin" />
Indexing...

View file

@ -1,13 +1,16 @@
"use client";
import { ArrowLeft, Check, Info, Loader2 } from "lucide-react";
import { useSearchParams } from "next/navigation";
import { type FC, useCallback, useEffect, useMemo, useRef, useState } from "react";
import { Button } from "@/components/ui/button";
import type { SearchSourceConnector } from "@/contracts/types/connector.types";
import { getConnectorTypeDisplay } from "@/lib/connectors/utils";
import { cn } from "@/lib/utils";
import { DateRangeSelector } from "../../components/date-range-selector";
import { PeriodicSyncConfig } from "../../components/periodic-sync-config";
import type { IndexingConfigState } from "../../constants/connector-constants";
import { type IndexingConfigState, OAUTH_CONNECTORS } from "../../constants/connector-constants";
import { getConnectorDisplayName } from "../../tabs/all-connectors-tab";
import { getConnectorConfigComponent } from "../index";
interface IndexingConfigurationViewProps {
@ -43,6 +46,9 @@ export const IndexingConfigurationView: FC<IndexingConfigurationViewProps> = ({
onStartIndexing,
onSkip,
}) => {
const searchParams = useSearchParams();
const isFromOAuth = searchParams.get("view") === "configure";
// Get connector-specific config component
const ConnectorConfigComponent = useMemo(
() => (connector ? getConnectorConfigComponent(connector.connector_type) : null),
@ -85,34 +91,43 @@ export const IndexingConfigurationView: FC<IndexingConfigurationViewProps> = ({
};
}, [checkScrollState]);
const authConnector = OAUTH_CONNECTORS.find((c) => c.connectorType === connector?.connector_type);
return (
<div className="flex-1 flex flex-col min-h-0 overflow-hidden">
{/* Fixed Header */}
<div
className={cn(
"flex-shrink-0 px-6 sm:px-12 pt-8 sm:pt-10 transition-shadow duration-200 relative z-10",
"shrink-0 px-6 sm:px-12 pt-8 sm:pt-10 transition-shadow duration-200 relative z-10",
isScrolled && "shadow-sm"
)}
>
{/* Back button */}
<button
type="button"
onClick={onSkip}
className="flex items-center gap-2 text-xs sm:text-sm text-muted-foreground hover:text-foreground mb-6 w-fit"
>
<ArrowLeft className="size-4" />
Back to connectors
</button>
{/* Back button - only show if not from OAuth */}
{!isFromOAuth && (
<button
type="button"
onClick={onSkip}
className="flex items-center gap-2 text-xs sm:text-sm text-muted-foreground hover:text-foreground mb-6 w-fit"
>
<ArrowLeft className="size-4" />
Back to connectors
</button>
)}
{/* Success header */}
<div className="flex items-center gap-4 mb-6">
<div className="flex gap-4 mb-6">
<div className="flex h-14 w-14 items-center justify-center rounded-xl bg-green-500/10 border border-green-500/20">
<Check className="size-7 text-green-500" />
</div>
<div>
<h2 className="text-xl sm:text-2xl font-semibold tracking-tight">
{config.connectorTitle} Connected!
</h2>
<div className="flex flex-col">
<span className="text-xl sm:text-2xl font-semibold tracking-tight text-wrap whitespace-normal wrap-break-word">
{getConnectorTypeDisplay(connector?.connector_type || "")} Connected !
</span>{" "}
<span className="text-xl sm:text-xl font-semibold text-muted-foreground tracking-tight text-wrap whitespace-normal wrap-break-word">
{getConnectorDisplayName(connector?.name || "")}
</span>
</div>
<p className="text-xs sm:text-base text-muted-foreground mt-1">
Configure when to start syncing your data
</p>
@ -187,15 +202,7 @@ export const IndexingConfigurationView: FC<IndexingConfigurationViewProps> = ({
</div>
{/* Fixed Footer - Action buttons */}
<div className="flex-shrink-0 flex items-center justify-between px-6 sm:px-12 py-6 bg-muted">
<Button
variant="ghost"
onClick={onSkip}
disabled={isStartingIndexing}
className="text-xs sm:text-sm"
>
Skip for now
</Button>
<div className="flex-shrink-0 flex items-center justify-end px-6 sm:px-12 py-6 bg-muted">
<Button
onClick={onStartIndexing}
disabled={isStartingIndexing}

View file

@ -30,6 +30,55 @@ export const OAUTH_CONNECTORS = [
connectorType: EnumConnectorName.AIRTABLE_CONNECTOR,
authEndpoint: "/api/v1/auth/airtable/connector/add/",
},
{
id: "notion-connector",
title: "Notion",
description: "Search your Notion pages",
connectorType: EnumConnectorName.NOTION_CONNECTOR,
authEndpoint: "/api/v1/auth/notion/connector/add/",
},
{
id: "linear-connector",
title: "Linear",
description: "Search issues & projects",
connectorType: EnumConnectorName.LINEAR_CONNECTOR,
authEndpoint: "/api/v1/auth/linear/connector/add/",
},
{
id: "slack-connector",
title: "Slack",
description: "Search Slack messages",
connectorType: EnumConnectorName.SLACK_CONNECTOR,
authEndpoint: "/api/v1/auth/slack/connector/add/",
},
{
id: "discord-connector",
title: "Discord",
description: "Search Discord messages",
connectorType: EnumConnectorName.DISCORD_CONNECTOR,
authEndpoint: "/api/v1/auth/discord/connector/add/",
},
{
id: "jira-connector",
title: "Jira",
description: "Search Jira issues",
connectorType: EnumConnectorName.JIRA_CONNECTOR,
authEndpoint: "/api/v1/auth/jira/connector/add/",
},
{
id: "confluence-connector",
title: "Confluence",
description: "Search documentation",
connectorType: EnumConnectorName.CONFLUENCE_CONNECTOR,
authEndpoint: "/api/v1/auth/confluence/connector/add/",
},
{
id: "clickup-connector",
title: "ClickUp",
description: "Search ClickUp tasks",
connectorType: EnumConnectorName.CLICKUP_CONNECTOR,
authEndpoint: "/api/v1/auth/clickup/connector/add/",
},
] as const;
// Content Sources (tools that extract and import content from external sources)
@ -50,30 +99,6 @@ export const CRAWLERS = [
// Non-OAuth Connectors (redirect to old connector config pages)
export const OTHER_CONNECTORS = [
{
id: "slack-connector",
title: "Slack",
description: "Search Slack messages",
connectorType: EnumConnectorName.SLACK_CONNECTOR,
},
{
id: "discord-connector",
title: "Discord",
description: "Search Discord messages",
connectorType: EnumConnectorName.DISCORD_CONNECTOR,
},
{
id: "notion-connector",
title: "Notion",
description: "Search Notion pages",
connectorType: EnumConnectorName.NOTION_CONNECTOR,
},
{
id: "confluence-connector",
title: "Confluence",
description: "Search documentation",
connectorType: EnumConnectorName.CONFLUENCE_CONNECTOR,
},
{
id: "bookstack-connector",
title: "BookStack",
@ -86,24 +111,6 @@ export const OTHER_CONNECTORS = [
description: "Search repositories",
connectorType: EnumConnectorName.GITHUB_CONNECTOR,
},
{
id: "linear-connector",
title: "Linear",
description: "Search issues & projects",
connectorType: EnumConnectorName.LINEAR_CONNECTOR,
},
{
id: "jira-connector",
title: "Jira",
description: "Search Jira issues",
connectorType: EnumConnectorName.JIRA_CONNECTOR,
},
{
id: "clickup-connector",
title: "ClickUp",
description: "Search ClickUp tasks",
connectorType: EnumConnectorName.CLICKUP_CONNECTOR,
},
{
id: "luma-connector",
title: "Luma",
@ -143,7 +150,7 @@ export const OTHER_CONNECTORS = [
{
id: "circleback-connector",
title: "Circleback",
description: "Receive meeting notes via webhook",
description: "Receive meeting notes, transcripts",
connectorType: EnumConnectorName.CIRCLEBACK_CONNECTOR,
},
] as const;

Some files were not shown because too many files have changed in this diff Show more