mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-05-01 03:46:25 +02:00
Merge pull request #670 from AnishSarkar22/fix/connector
feat: Clickup OAuth Connector, fixed Airtable OAuth Connector
This commit is contained in:
commit
fabbae2b48
20 changed files with 1277 additions and 561 deletions
|
|
@ -117,6 +117,11 @@ class Config:
|
|||
DISCORD_REDIRECT_URI = os.getenv("DISCORD_REDIRECT_URI")
|
||||
DISCORD_BOT_TOKEN = os.getenv("DISCORD_BOT_TOKEN")
|
||||
|
||||
# ClickUp OAuth
|
||||
CLICKUP_CLIENT_ID = os.getenv("CLICKUP_CLIENT_ID")
|
||||
CLICKUP_CLIENT_SECRET = os.getenv("CLICKUP_CLIENT_SECRET")
|
||||
CLICKUP_REDIRECT_URI = os.getenv("CLICKUP_REDIRECT_URI")
|
||||
|
||||
# LLM instances are now managed per-user through the LLMConfig system
|
||||
# Legacy environment variables removed in favor of user-specific configurations
|
||||
|
||||
|
|
|
|||
|
|
@ -294,6 +294,12 @@ class AirtableConnector:
|
|||
Tuple of (records, error_message)
|
||||
"""
|
||||
try:
|
||||
# Validate date strings before parsing
|
||||
if not start_date or start_date.lower() in ("undefined", "null", "none"):
|
||||
return [], "Invalid start_date: date string is required"
|
||||
if not end_date or end_date.lower() in ("undefined", "null", "none"):
|
||||
return [], "Invalid end_date: date string is required"
|
||||
|
||||
# Parse and validate dates
|
||||
start_dt = isoparse(start_date)
|
||||
end_dt = isoparse(end_date)
|
||||
|
|
|
|||
175
surfsense_backend/app/connectors/airtable_history.py
Normal file
175
surfsense_backend/app/connectors/airtable_history.py
Normal file
|
|
@ -0,0 +1,175 @@
|
|||
"""
|
||||
Airtable OAuth Connector.
|
||||
|
||||
Handles OAuth-based authentication and token refresh for Airtable API access.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.connectors.airtable_connector import AirtableConnector
|
||||
from app.db import SearchSourceConnector
|
||||
from app.routes.airtable_add_connector_route import refresh_airtable_token
|
||||
from app.schemas.airtable_auth_credentials import AirtableAuthCredentialsBase
|
||||
from app.utils.oauth_security import TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AirtableHistoryConnector:
|
||||
"""
|
||||
Airtable connector with OAuth support and automatic token refresh.
|
||||
|
||||
This connector uses OAuth 2.0 access tokens to authenticate with the
|
||||
Airtable API. It automatically refreshes expired tokens when needed.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
connector_id: int,
|
||||
credentials: AirtableAuthCredentialsBase | None = None,
|
||||
):
|
||||
"""
|
||||
Initialize the AirtableHistoryConnector with auto-refresh capability.
|
||||
|
||||
Args:
|
||||
session: Database session for updating connector
|
||||
connector_id: Connector ID for direct updates
|
||||
credentials: Airtable OAuth credentials (optional, will be loaded from DB if not provided)
|
||||
"""
|
||||
self._session = session
|
||||
self._connector_id = connector_id
|
||||
self._credentials = credentials
|
||||
self._airtable_connector: AirtableConnector | None = None
|
||||
|
||||
async def _get_valid_token(self) -> str:
|
||||
"""
|
||||
Get valid Airtable access token, refreshing if needed.
|
||||
|
||||
Returns:
|
||||
Valid access token
|
||||
|
||||
Raises:
|
||||
ValueError: If credentials are missing or invalid
|
||||
Exception: If token refresh fails
|
||||
"""
|
||||
# Load credentials from DB if not provided
|
||||
if self._credentials is None:
|
||||
result = await self._session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.id == self._connector_id
|
||||
)
|
||||
)
|
||||
connector = result.scalars().first()
|
||||
|
||||
if not connector:
|
||||
raise ValueError(f"Connector {self._connector_id} not found")
|
||||
|
||||
config_data = connector.config.copy()
|
||||
|
||||
# Decrypt credentials if they are encrypted
|
||||
token_encrypted = config_data.get("_token_encrypted", False)
|
||||
if token_encrypted and config.SECRET_KEY:
|
||||
try:
|
||||
token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
|
||||
# Decrypt sensitive fields
|
||||
if config_data.get("access_token"):
|
||||
config_data["access_token"] = token_encryption.decrypt_token(
|
||||
config_data["access_token"]
|
||||
)
|
||||
if config_data.get("refresh_token"):
|
||||
config_data["refresh_token"] = token_encryption.decrypt_token(
|
||||
config_data["refresh_token"]
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Decrypted Airtable credentials for connector {self._connector_id}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to decrypt Airtable credentials for connector {self._connector_id}: {e!s}"
|
||||
)
|
||||
raise ValueError(
|
||||
f"Failed to decrypt Airtable credentials: {e!s}"
|
||||
) from e
|
||||
|
||||
try:
|
||||
self._credentials = AirtableAuthCredentialsBase.from_dict(config_data)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid Airtable credentials: {e!s}") from e
|
||||
|
||||
# Check if token is expired and refreshable
|
||||
if self._credentials.is_expired and self._credentials.is_refreshable:
|
||||
try:
|
||||
logger.info(
|
||||
f"Airtable token expired for connector {self._connector_id}, refreshing..."
|
||||
)
|
||||
|
||||
# Get connector for refresh
|
||||
result = await self._session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.id == self._connector_id
|
||||
)
|
||||
)
|
||||
connector = result.scalars().first()
|
||||
|
||||
if not connector:
|
||||
raise RuntimeError(
|
||||
f"Connector {self._connector_id} not found; cannot refresh token."
|
||||
)
|
||||
|
||||
# Refresh token
|
||||
connector = await refresh_airtable_token(self._session, connector)
|
||||
|
||||
# Reload credentials after refresh
|
||||
config_data = connector.config.copy()
|
||||
token_encrypted = config_data.get("_token_encrypted", False)
|
||||
if token_encrypted and config.SECRET_KEY:
|
||||
token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
if config_data.get("access_token"):
|
||||
config_data["access_token"] = token_encryption.decrypt_token(
|
||||
config_data["access_token"]
|
||||
)
|
||||
if config_data.get("refresh_token"):
|
||||
config_data["refresh_token"] = token_encryption.decrypt_token(
|
||||
config_data["refresh_token"]
|
||||
)
|
||||
|
||||
self._credentials = AirtableAuthCredentialsBase.from_dict(config_data)
|
||||
|
||||
# Invalidate cached connector so it's recreated with new token
|
||||
self._airtable_connector = None
|
||||
|
||||
logger.info(
|
||||
f"Successfully refreshed Airtable token for connector {self._connector_id}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to refresh Airtable token for connector {self._connector_id}: {e!s}"
|
||||
)
|
||||
raise Exception(
|
||||
f"Failed to refresh Airtable OAuth credentials: {e!s}"
|
||||
) from e
|
||||
|
||||
return self._credentials.access_token
|
||||
|
||||
async def _get_connector(self) -> AirtableConnector:
|
||||
"""
|
||||
Get or create AirtableConnector with valid token.
|
||||
|
||||
Returns:
|
||||
AirtableConnector instance
|
||||
"""
|
||||
if self._airtable_connector is None:
|
||||
# Ensure we have valid credentials (this will refresh if needed)
|
||||
await self._get_valid_token()
|
||||
# Use the credentials object which is now guaranteed to be valid
|
||||
if not self._credentials:
|
||||
raise ValueError("Credentials not loaded")
|
||||
self._airtable_connector = AirtableConnector(self._credentials)
|
||||
return self._airtable_connector
|
||||
349
surfsense_backend/app/connectors/clickup_history.py
Normal file
349
surfsense_backend/app/connectors/clickup_history.py
Normal file
|
|
@ -0,0 +1,349 @@
|
|||
"""
|
||||
ClickUp History Module
|
||||
|
||||
A module for retrieving data from ClickUp with OAuth support and backward compatibility.
|
||||
Allows fetching tasks from workspaces and lists with automatic token refresh.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.connectors.clickup_connector import ClickUpConnector
|
||||
from app.db import SearchSourceConnector
|
||||
from app.routes.clickup_add_connector_route import refresh_clickup_token
|
||||
from app.schemas.clickup_auth_credentials import ClickUpAuthCredentialsBase
|
||||
from app.utils.oauth_security import TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ClickUpHistoryConnector:
|
||||
"""
|
||||
Class for retrieving data from ClickUp with OAuth support and backward compatibility.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
connector_id: int,
|
||||
credentials: ClickUpAuthCredentialsBase | None = None,
|
||||
api_token: str | None = None, # For backward compatibility
|
||||
):
|
||||
"""
|
||||
Initialize the ClickUpHistoryConnector.
|
||||
|
||||
Args:
|
||||
session: Database session for token refresh
|
||||
connector_id: Connector ID for direct updates
|
||||
credentials: ClickUp OAuth credentials (optional, will be loaded from DB if not provided)
|
||||
api_token: Legacy API token for backward compatibility (optional)
|
||||
"""
|
||||
self._session = session
|
||||
self._connector_id = connector_id
|
||||
self._credentials = credentials
|
||||
self._api_token = api_token # Legacy API token
|
||||
self._use_oauth = False
|
||||
self._use_legacy = api_token is not None
|
||||
self._clickup_client: ClickUpConnector | None = None
|
||||
|
||||
async def _get_valid_token(self) -> str:
|
||||
"""
|
||||
Get valid ClickUp access token, refreshing if needed.
|
||||
For legacy API tokens, returns the token directly.
|
||||
|
||||
Returns:
|
||||
Valid access token or API token
|
||||
|
||||
Raises:
|
||||
ValueError: If credentials are missing or invalid
|
||||
Exception: If token refresh fails
|
||||
"""
|
||||
# If using legacy API token, return it directly
|
||||
if self._use_legacy and self._api_token:
|
||||
return self._api_token
|
||||
|
||||
# Load credentials from DB if not provided
|
||||
if self._credentials is None:
|
||||
result = await self._session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.id == self._connector_id
|
||||
)
|
||||
)
|
||||
connector = result.scalars().first()
|
||||
|
||||
if not connector:
|
||||
raise ValueError(f"Connector {self._connector_id} not found")
|
||||
|
||||
config_data = connector.config.copy()
|
||||
|
||||
# Check if using OAuth or legacy API token
|
||||
is_oauth = config_data.get("_token_encrypted", False) or config_data.get(
|
||||
"access_token"
|
||||
)
|
||||
has_legacy_token = config_data.get("CLICKUP_API_TOKEN") is not None
|
||||
|
||||
if is_oauth:
|
||||
# OAuth 2.0 authentication
|
||||
self._use_oauth = True
|
||||
# Decrypt credentials if they are encrypted
|
||||
token_encrypted = config_data.get("_token_encrypted", False)
|
||||
if token_encrypted and config.SECRET_KEY:
|
||||
try:
|
||||
token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
|
||||
# Decrypt sensitive fields
|
||||
if config_data.get("access_token"):
|
||||
config_data["access_token"] = (
|
||||
token_encryption.decrypt_token(
|
||||
config_data["access_token"]
|
||||
)
|
||||
)
|
||||
if config_data.get("refresh_token"):
|
||||
config_data["refresh_token"] = (
|
||||
token_encryption.decrypt_token(
|
||||
config_data["refresh_token"]
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Decrypted ClickUp OAuth credentials for connector {self._connector_id}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to decrypt ClickUp OAuth credentials for connector {self._connector_id}: {e!s}"
|
||||
)
|
||||
raise ValueError(
|
||||
f"Failed to decrypt ClickUp OAuth credentials: {e!s}"
|
||||
) from e
|
||||
|
||||
try:
|
||||
self._credentials = ClickUpAuthCredentialsBase.from_dict(
|
||||
config_data
|
||||
)
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid ClickUp OAuth credentials: {e!s}") from e
|
||||
elif has_legacy_token:
|
||||
# Legacy API token authentication (backward compatibility)
|
||||
self._use_legacy = True
|
||||
self._api_token = config_data.get("CLICKUP_API_TOKEN")
|
||||
|
||||
# Decrypt token if it's encrypted (legacy tokens might be encrypted)
|
||||
token_encrypted = config_data.get("_token_encrypted", False)
|
||||
if token_encrypted and config.SECRET_KEY and self._api_token:
|
||||
try:
|
||||
token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
self._api_token = token_encryption.decrypt_token(
|
||||
self._api_token
|
||||
)
|
||||
logger.info(
|
||||
f"Decrypted legacy ClickUp API token for connector {self._connector_id}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Failed to decrypt legacy ClickUp API token for connector {self._connector_id}: {e!s}. "
|
||||
"Trying to use token as-is (might be unencrypted)."
|
||||
)
|
||||
# Continue with token as-is - might be unencrypted legacy token
|
||||
|
||||
if not self._api_token:
|
||||
raise ValueError("ClickUp API token not found in connector config")
|
||||
|
||||
# Return legacy token directly (no refresh needed)
|
||||
return self._api_token
|
||||
else:
|
||||
raise ValueError(
|
||||
"ClickUp credentials not found in connector config (neither OAuth nor API token)"
|
||||
)
|
||||
|
||||
# Check if token is expired and refreshable (only for OAuth)
|
||||
if (
|
||||
self._use_oauth
|
||||
and self._credentials.is_expired
|
||||
and self._credentials.is_refreshable
|
||||
):
|
||||
try:
|
||||
logger.info(
|
||||
f"ClickUp token expired for connector {self._connector_id}, refreshing..."
|
||||
)
|
||||
|
||||
# Get connector for refresh
|
||||
result = await self._session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.id == self._connector_id
|
||||
)
|
||||
)
|
||||
connector = result.scalars().first()
|
||||
|
||||
if not connector:
|
||||
raise RuntimeError(
|
||||
f"Connector {self._connector_id} not found; cannot refresh token."
|
||||
)
|
||||
|
||||
# Refresh token
|
||||
connector = await refresh_clickup_token(self._session, connector)
|
||||
|
||||
# Reload credentials after refresh
|
||||
config_data = connector.config.copy()
|
||||
token_encrypted = config_data.get("_token_encrypted", False)
|
||||
if token_encrypted and config.SECRET_KEY:
|
||||
token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
if config_data.get("access_token"):
|
||||
config_data["access_token"] = token_encryption.decrypt_token(
|
||||
config_data["access_token"]
|
||||
)
|
||||
if config_data.get("refresh_token"):
|
||||
config_data["refresh_token"] = token_encryption.decrypt_token(
|
||||
config_data["refresh_token"]
|
||||
)
|
||||
|
||||
self._credentials = ClickUpAuthCredentialsBase.from_dict(config_data)
|
||||
|
||||
# Invalidate cached client so it's recreated with new token
|
||||
self._clickup_client = None
|
||||
|
||||
logger.info(
|
||||
f"Successfully refreshed ClickUp token for connector {self._connector_id}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to refresh ClickUp token for connector {self._connector_id}: {e!s}"
|
||||
)
|
||||
raise Exception(
|
||||
f"Failed to refresh ClickUp OAuth credentials: {e!s}"
|
||||
) from e
|
||||
|
||||
if self._use_oauth:
|
||||
return self._credentials.access_token
|
||||
else:
|
||||
return self._api_token
|
||||
|
||||
async def _get_client(self) -> ClickUpConnector:
|
||||
"""
|
||||
Get or create ClickUpConnector with valid token.
|
||||
|
||||
Returns:
|
||||
ClickUpConnector instance
|
||||
"""
|
||||
if self._clickup_client is None:
|
||||
token = await self._get_valid_token()
|
||||
# ClickUp API uses Bearer token for OAuth, or direct token for legacy
|
||||
if self._use_oauth:
|
||||
# For OAuth, use Bearer token format (ClickUp OAuth expects "Bearer {token}")
|
||||
self._clickup_client = ClickUpConnector(api_token=f"Bearer {token}")
|
||||
else:
|
||||
# For legacy API token, use token directly (format: "pk_...")
|
||||
self._clickup_client = ClickUpConnector(api_token=token)
|
||||
return self._clickup_client
|
||||
|
||||
async def close(self):
|
||||
"""Close any open connections."""
|
||||
self._clickup_client = None
|
||||
|
||||
async def __aenter__(self):
|
||||
"""Async context manager entry."""
|
||||
return self
|
||||
|
||||
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Async context manager exit."""
|
||||
await self.close()
|
||||
|
||||
async def get_authorized_workspaces(self) -> dict[str, Any]:
|
||||
"""
|
||||
Fetch authorized workspaces (teams) from ClickUp.
|
||||
|
||||
Returns:
|
||||
Dictionary containing teams data
|
||||
|
||||
Raises:
|
||||
ValueError: If credentials have not been set
|
||||
Exception: If the API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
return client.get_authorized_workspaces()
|
||||
|
||||
async def get_workspace_tasks(
|
||||
self, workspace_id: str, include_closed: bool = False
|
||||
) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Fetch all tasks from a ClickUp workspace.
|
||||
|
||||
Args:
|
||||
workspace_id: ClickUp workspace (team) ID
|
||||
include_closed: Whether to include closed tasks (default: False)
|
||||
|
||||
Returns:
|
||||
List of task objects
|
||||
|
||||
Raises:
|
||||
ValueError: If credentials have not been set
|
||||
Exception: If the API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
return client.get_workspace_tasks(
|
||||
workspace_id=workspace_id, include_closed=include_closed
|
||||
)
|
||||
|
||||
async def get_tasks_in_date_range(
|
||||
self,
|
||||
workspace_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
include_closed: bool = False,
|
||||
) -> tuple[list[dict[str, Any]], str | None]:
|
||||
"""
|
||||
Fetch tasks from ClickUp within a specific date range.
|
||||
|
||||
Args:
|
||||
workspace_id: ClickUp workspace (team) ID
|
||||
start_date: Start date in YYYY-MM-DD format
|
||||
end_date: End date in YYYY-MM-DD format
|
||||
include_closed: Whether to include closed tasks (default: False)
|
||||
|
||||
Returns:
|
||||
Tuple containing (tasks list, error message or None)
|
||||
"""
|
||||
client = await self._get_client()
|
||||
return client.get_tasks_in_date_range(
|
||||
workspace_id=workspace_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
include_closed=include_closed,
|
||||
)
|
||||
|
||||
async def get_task_details(self, task_id: str) -> dict[str, Any]:
|
||||
"""
|
||||
Fetch detailed information about a specific task.
|
||||
|
||||
Args:
|
||||
task_id: ClickUp task ID
|
||||
|
||||
Returns:
|
||||
Task details
|
||||
|
||||
Raises:
|
||||
ValueError: If credentials have not been set
|
||||
Exception: If the API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
return client.get_task_details(task_id)
|
||||
|
||||
async def get_task_comments(self, task_id: str) -> dict[str, Any]:
|
||||
"""
|
||||
Fetch comments for a specific task.
|
||||
|
||||
Args:
|
||||
task_id: ClickUp task ID
|
||||
|
||||
Returns:
|
||||
Task comments
|
||||
|
||||
Raises:
|
||||
ValueError: If credentials have not been set
|
||||
Exception: If the API request fails
|
||||
"""
|
||||
client = await self._get_client()
|
||||
return client.get_task_comments(task_id)
|
||||
|
|
@ -377,7 +377,7 @@ class SlackHistory:
|
|||
else:
|
||||
raise # Re-raise to outer handler for not_in_channel or other SlackApiErrors
|
||||
|
||||
if not current_api_call_successful:
|
||||
if not current_api_call_successful or result is None:
|
||||
continue # Retry the current page fetch due to handled rate limit
|
||||
|
||||
# Process result if successful
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from .airtable_add_connector_route import (
|
|||
router as airtable_add_connector_router,
|
||||
)
|
||||
from .circleback_webhook_route import router as circleback_webhook_router
|
||||
from .clickup_add_connector_route import router as clickup_add_connector_router
|
||||
from .confluence_add_connector_route import router as confluence_add_connector_router
|
||||
from .discord_add_connector_route import router as discord_add_connector_router
|
||||
from .documents_routes import router as documents_router
|
||||
|
|
@ -52,6 +53,7 @@ router.include_router(slack_add_connector_router)
|
|||
router.include_router(discord_add_connector_router)
|
||||
router.include_router(jira_add_connector_router)
|
||||
router.include_router(confluence_add_connector_router)
|
||||
router.include_router(clickup_add_connector_router)
|
||||
router.include_router(new_llm_config_router) # LLM configs with prompt configuration
|
||||
router.include_router(logs_router)
|
||||
router.include_router(circleback_webhook_router) # Circleback meeting webhooks
|
||||
|
|
|
|||
|
|
@ -381,7 +381,7 @@ async def airtable_callback(
|
|||
|
||||
async def refresh_airtable_token(
|
||||
session: AsyncSession, connector: SearchSourceConnector
|
||||
):
|
||||
) -> SearchSourceConnector:
|
||||
"""
|
||||
Refresh the Airtable access token for a connector.
|
||||
|
||||
|
|
@ -411,6 +411,12 @@ async def refresh_airtable_token(
|
|||
status_code=500, detail="Failed to decrypt stored refresh token"
|
||||
) from e
|
||||
|
||||
if not refresh_token:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="No refresh token available. Please re-authenticate.",
|
||||
)
|
||||
|
||||
auth_header = make_basic_auth_header(
|
||||
config.AIRTABLE_CLIENT_ID, config.AIRTABLE_CLIENT_SECRET
|
||||
)
|
||||
|
|
@ -435,8 +441,14 @@ async def refresh_airtable_token(
|
|||
)
|
||||
|
||||
if token_response.status_code != 200:
|
||||
error_detail = token_response.text
|
||||
try:
|
||||
error_json = token_response.json()
|
||||
error_detail = error_json.get("error_description", error_detail)
|
||||
except Exception:
|
||||
pass
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Token refresh failed: {token_response.text}"
|
||||
status_code=400, detail=f"Token refresh failed: {error_detail}"
|
||||
)
|
||||
|
||||
token_json = token_response.json()
|
||||
|
|
@ -478,6 +490,8 @@ async def refresh_airtable_token(
|
|||
)
|
||||
|
||||
return connector
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to refresh Airtable token: {e!s}"
|
||||
|
|
|
|||
481
surfsense_backend/app/routes/clickup_add_connector_route.py
Normal file
481
surfsense_backend/app/routes/clickup_add_connector_route.py
Normal file
|
|
@ -0,0 +1,481 @@
|
|||
"""
|
||||
ClickUp Connector OAuth Routes.
|
||||
|
||||
Handles OAuth 2.0 authentication flow for ClickUp connector.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from uuid import UUID
|
||||
|
||||
import httpx
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request
|
||||
from fastapi.responses import RedirectResponse
|
||||
from pydantic import ValidationError
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.config import config
|
||||
from app.db import (
|
||||
SearchSourceConnector,
|
||||
SearchSourceConnectorType,
|
||||
User,
|
||||
get_async_session,
|
||||
)
|
||||
from app.schemas.clickup_auth_credentials import ClickUpAuthCredentialsBase
|
||||
from app.users import current_active_user
|
||||
from app.utils.oauth_security import OAuthStateManager, TokenEncryption
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# ClickUp OAuth endpoints
|
||||
AUTHORIZATION_URL = "https://app.clickup.com/api"
|
||||
TOKEN_URL = "https://api.clickup.com/api/v2/oauth/token"
|
||||
|
||||
# Initialize security utilities
|
||||
_state_manager = None
|
||||
_token_encryption = None
|
||||
|
||||
|
||||
def get_state_manager() -> OAuthStateManager:
|
||||
"""Get or create OAuth state manager instance."""
|
||||
global _state_manager
|
||||
if _state_manager is None:
|
||||
if not config.SECRET_KEY:
|
||||
raise ValueError("SECRET_KEY must be set for OAuth security")
|
||||
_state_manager = OAuthStateManager(config.SECRET_KEY)
|
||||
return _state_manager
|
||||
|
||||
|
||||
def get_token_encryption() -> TokenEncryption:
|
||||
"""Get or create token encryption instance."""
|
||||
global _token_encryption
|
||||
if _token_encryption is None:
|
||||
if not config.SECRET_KEY:
|
||||
raise ValueError("SECRET_KEY must be set for token encryption")
|
||||
_token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
return _token_encryption
|
||||
|
||||
|
||||
@router.get("/auth/clickup/connector/add")
|
||||
async def connect_clickup(space_id: int, user: User = Depends(current_active_user)):
|
||||
"""
|
||||
Initiate ClickUp OAuth flow.
|
||||
|
||||
Args:
|
||||
space_id: The search space ID
|
||||
user: Current authenticated user
|
||||
|
||||
Returns:
|
||||
Authorization URL for redirect
|
||||
"""
|
||||
try:
|
||||
if not space_id:
|
||||
raise HTTPException(status_code=400, detail="space_id is required")
|
||||
|
||||
if not config.CLICKUP_CLIENT_ID:
|
||||
raise HTTPException(status_code=500, detail="ClickUp OAuth not configured.")
|
||||
|
||||
if not config.SECRET_KEY:
|
||||
raise HTTPException(
|
||||
status_code=500, detail="SECRET_KEY not configured for OAuth security."
|
||||
)
|
||||
|
||||
# Generate secure state parameter with HMAC signature
|
||||
state_manager = get_state_manager()
|
||||
state_encoded = state_manager.generate_secure_state(space_id, user.id)
|
||||
|
||||
# Build authorization URL
|
||||
from urllib.parse import urlencode
|
||||
|
||||
auth_params = {
|
||||
"client_id": config.CLICKUP_CLIENT_ID,
|
||||
"redirect_uri": config.CLICKUP_REDIRECT_URI,
|
||||
"state": state_encoded,
|
||||
}
|
||||
|
||||
auth_url = f"{AUTHORIZATION_URL}?{urlencode(auth_params)}"
|
||||
|
||||
logger.info(f"Generated ClickUp OAuth URL for user {user.id}, space {space_id}")
|
||||
return {"auth_url": auth_url}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initiate ClickUp OAuth: {e!s}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to initiate ClickUp OAuth: {e!s}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/auth/clickup/connector/callback")
|
||||
async def clickup_callback(
|
||||
request: Request,
|
||||
code: str | None = None,
|
||||
error: str | None = None,
|
||||
state: str | None = None,
|
||||
session: AsyncSession = Depends(get_async_session),
|
||||
):
|
||||
"""
|
||||
Handle ClickUp OAuth callback.
|
||||
|
||||
Args:
|
||||
request: FastAPI request object
|
||||
code: Authorization code from ClickUp (if user granted access)
|
||||
error: Error code from ClickUp (if user denied access or error occurred)
|
||||
state: State parameter containing user/space info
|
||||
session: Database session
|
||||
|
||||
Returns:
|
||||
Redirect response to frontend
|
||||
"""
|
||||
try:
|
||||
# Handle OAuth errors (e.g., user denied access)
|
||||
if error:
|
||||
logger.warning(f"ClickUp OAuth error: {error}")
|
||||
# Try to decode state to get space_id for redirect, but don't fail if it's invalid
|
||||
space_id = None
|
||||
if state:
|
||||
try:
|
||||
state_manager = get_state_manager()
|
||||
data = state_manager.validate_state(state)
|
||||
space_id = data.get("space_id")
|
||||
except Exception:
|
||||
# If state is invalid, we'll redirect without space_id
|
||||
logger.warning("Failed to validate state in error handler")
|
||||
|
||||
# Redirect to frontend with error parameter
|
||||
if space_id:
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=clickup_oauth_denied"
|
||||
)
|
||||
else:
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=clickup_oauth_denied"
|
||||
)
|
||||
|
||||
# Validate required parameters for successful flow
|
||||
if not code:
|
||||
raise HTTPException(status_code=400, detail="Missing authorization code")
|
||||
if not state:
|
||||
raise HTTPException(status_code=400, detail="Missing state parameter")
|
||||
|
||||
# Validate and decode state with signature verification
|
||||
state_manager = get_state_manager()
|
||||
try:
|
||||
data = state_manager.validate_state(state)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Invalid state parameter: {e!s}"
|
||||
) from e
|
||||
|
||||
user_id = UUID(data["user_id"])
|
||||
space_id = data["space_id"]
|
||||
|
||||
# Validate redirect URI (security: ensure it matches configured value)
|
||||
if not config.CLICKUP_REDIRECT_URI:
|
||||
raise HTTPException(
|
||||
status_code=500, detail="CLICKUP_REDIRECT_URI not configured"
|
||||
)
|
||||
|
||||
# Exchange authorization code for access token
|
||||
token_data = {
|
||||
"client_id": config.CLICKUP_CLIENT_ID,
|
||||
"client_secret": config.CLICKUP_CLIENT_SECRET,
|
||||
"code": code,
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
token_response = await client.post(
|
||||
TOKEN_URL,
|
||||
json=token_data,
|
||||
headers={"Content-Type": "application/json"},
|
||||
timeout=30.0,
|
||||
)
|
||||
|
||||
if token_response.status_code != 200:
|
||||
error_detail = token_response.text
|
||||
try:
|
||||
error_json = token_response.json()
|
||||
error_detail = error_json.get("error", error_detail)
|
||||
except Exception:
|
||||
pass
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Token exchange failed: {error_detail}"
|
||||
)
|
||||
|
||||
token_json = token_response.json()
|
||||
|
||||
# Extract access token
|
||||
access_token = token_json.get("access_token")
|
||||
if not access_token:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="No access token received from ClickUp"
|
||||
)
|
||||
|
||||
# Extract refresh token if available
|
||||
refresh_token = token_json.get("refresh_token")
|
||||
|
||||
# Encrypt sensitive tokens before storing
|
||||
token_encryption = get_token_encryption()
|
||||
|
||||
# Calculate expiration time (UTC, tz-aware)
|
||||
expires_at = None
|
||||
expires_in = token_json.get("expires_in")
|
||||
if expires_in:
|
||||
now_utc = datetime.now(UTC)
|
||||
expires_at = now_utc + timedelta(seconds=int(expires_in))
|
||||
|
||||
# Get user information and workspace information from ClickUp API
|
||||
user_info = {}
|
||||
workspace_info = {}
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
# Get user info
|
||||
user_response = await client.get(
|
||||
"https://api.clickup.com/api/v2/user",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
timeout=30.0,
|
||||
)
|
||||
if user_response.status_code == 200:
|
||||
user_data = user_response.json().get("user", {})
|
||||
user_info = {
|
||||
"user_id": str(user_data.get("id"))
|
||||
if user_data.get("id") is not None
|
||||
else None,
|
||||
"user_email": user_data.get("email"),
|
||||
"user_name": user_data.get("username"),
|
||||
}
|
||||
|
||||
# Get workspace (team) info - get the first workspace
|
||||
team_response = await client.get(
|
||||
"https://api.clickup.com/api/v2/team",
|
||||
headers={"Authorization": f"Bearer {access_token}"},
|
||||
timeout=30.0,
|
||||
)
|
||||
if team_response.status_code == 200:
|
||||
teams_data = team_response.json().get("teams", [])
|
||||
if teams_data and len(teams_data) > 0:
|
||||
first_team = teams_data[0]
|
||||
workspace_info = {
|
||||
"workspace_id": str(first_team.get("id"))
|
||||
if first_team.get("id") is not None
|
||||
else None,
|
||||
"workspace_name": first_team.get("name"),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to fetch user/workspace info from ClickUp: {e!s}")
|
||||
|
||||
# Store the encrypted tokens and user/workspace info in connector config
|
||||
connector_config = {
|
||||
"access_token": token_encryption.encrypt_token(access_token),
|
||||
"refresh_token": token_encryption.encrypt_token(refresh_token)
|
||||
if refresh_token
|
||||
else None,
|
||||
"expires_in": expires_in,
|
||||
"expires_at": expires_at.isoformat() if expires_at else None,
|
||||
"user_id": user_info.get("user_id"),
|
||||
"user_email": user_info.get("user_email"),
|
||||
"user_name": user_info.get("user_name"),
|
||||
"workspace_id": workspace_info.get("workspace_id"),
|
||||
"workspace_name": workspace_info.get("workspace_name"),
|
||||
# Mark that token is encrypted for backward compatibility
|
||||
"_token_encrypted": True,
|
||||
}
|
||||
|
||||
# Check if connector already exists for this search space and user
|
||||
existing_connector_result = await session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.search_space_id == space_id,
|
||||
SearchSourceConnector.user_id == user_id,
|
||||
SearchSourceConnector.connector_type
|
||||
== SearchSourceConnectorType.CLICKUP_CONNECTOR,
|
||||
)
|
||||
)
|
||||
existing_connector = existing_connector_result.scalars().first()
|
||||
|
||||
if existing_connector:
|
||||
# Update existing connector
|
||||
existing_connector.config = connector_config
|
||||
existing_connector.name = "ClickUp Connector"
|
||||
existing_connector.is_indexable = True
|
||||
logger.info(
|
||||
f"Updated existing ClickUp connector for user {user_id} in space {space_id}"
|
||||
)
|
||||
else:
|
||||
# Create new connector
|
||||
new_connector = SearchSourceConnector(
|
||||
name="ClickUp Connector",
|
||||
connector_type=SearchSourceConnectorType.CLICKUP_CONNECTOR,
|
||||
is_indexable=True,
|
||||
config=connector_config,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
session.add(new_connector)
|
||||
logger.info(
|
||||
f"Created new ClickUp connector for user {user_id} in space {space_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
await session.commit()
|
||||
logger.info(f"Successfully saved ClickUp connector for user {user_id}")
|
||||
|
||||
# Redirect to the frontend with success params
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=clickup-connector"
|
||||
)
|
||||
|
||||
except ValidationError as e:
|
||||
await session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=422, detail=f"Validation error: {e!s}"
|
||||
) from e
|
||||
except IntegrityError as e:
|
||||
await session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail=f"Integrity error: A connector with this type already exists. {e!s}",
|
||||
) from e
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create search source connector: {e!s}")
|
||||
await session.rollback()
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to create search source connector: {e!s}",
|
||||
) from e
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to complete ClickUp OAuth: {e!s}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to complete ClickUp OAuth: {e!s}"
|
||||
) from e
|
||||
|
||||
|
||||
async def refresh_clickup_token(
|
||||
session: AsyncSession, connector: SearchSourceConnector
|
||||
) -> SearchSourceConnector:
|
||||
"""
|
||||
Refresh the ClickUp access token for a connector.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
connector: ClickUp connector to refresh
|
||||
|
||||
Returns:
|
||||
Updated connector object
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Refreshing ClickUp token for connector {connector.id}")
|
||||
|
||||
credentials = ClickUpAuthCredentialsBase.from_dict(connector.config)
|
||||
|
||||
# Decrypt tokens if they are encrypted
|
||||
token_encryption = get_token_encryption()
|
||||
is_encrypted = connector.config.get("_token_encrypted", False)
|
||||
|
||||
refresh_token = credentials.refresh_token
|
||||
if is_encrypted and refresh_token:
|
||||
try:
|
||||
refresh_token = token_encryption.decrypt_token(refresh_token)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to decrypt refresh token: {e!s}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail="Failed to decrypt stored refresh token"
|
||||
) from e
|
||||
|
||||
if not refresh_token:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="No refresh token available. Please re-authenticate.",
|
||||
)
|
||||
|
||||
# Prepare token refresh data
|
||||
refresh_data = {
|
||||
"client_id": config.CLICKUP_CLIENT_ID,
|
||||
"client_secret": config.CLICKUP_CLIENT_SECRET,
|
||||
"refresh_token": refresh_token,
|
||||
}
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
token_response = await client.post(
|
||||
TOKEN_URL,
|
||||
json=refresh_data,
|
||||
headers={"Content-Type": "application/json"},
|
||||
timeout=30.0,
|
||||
)
|
||||
|
||||
if token_response.status_code != 200:
|
||||
error_detail = token_response.text
|
||||
try:
|
||||
error_json = token_response.json()
|
||||
error_detail = error_json.get("error", error_detail)
|
||||
except Exception:
|
||||
pass
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Token refresh failed: {error_detail}"
|
||||
)
|
||||
|
||||
token_json = token_response.json()
|
||||
|
||||
# Calculate expiration time (UTC, tz-aware)
|
||||
expires_at = None
|
||||
expires_in = token_json.get("expires_in")
|
||||
if expires_in:
|
||||
now_utc = datetime.now(UTC)
|
||||
expires_at = now_utc + timedelta(seconds=int(expires_in))
|
||||
|
||||
# Encrypt new tokens before storing
|
||||
access_token = token_json.get("access_token")
|
||||
new_refresh_token = token_json.get("refresh_token")
|
||||
|
||||
if not access_token:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="No access token received from ClickUp refresh"
|
||||
)
|
||||
|
||||
# Update credentials object with encrypted tokens
|
||||
credentials.access_token = token_encryption.encrypt_token(access_token)
|
||||
if new_refresh_token:
|
||||
credentials.refresh_token = token_encryption.encrypt_token(
|
||||
new_refresh_token
|
||||
)
|
||||
credentials.expires_in = expires_in
|
||||
credentials.expires_at = expires_at
|
||||
|
||||
# Preserve user and workspace info
|
||||
if not credentials.user_id:
|
||||
credentials.user_id = connector.config.get("user_id")
|
||||
if not credentials.user_email:
|
||||
credentials.user_email = connector.config.get("user_email")
|
||||
if not credentials.user_name:
|
||||
credentials.user_name = connector.config.get("user_name")
|
||||
if not credentials.workspace_id:
|
||||
credentials.workspace_id = connector.config.get("workspace_id")
|
||||
if not credentials.workspace_name:
|
||||
credentials.workspace_name = connector.config.get("workspace_name")
|
||||
|
||||
# Update connector config with encrypted tokens
|
||||
credentials_dict = credentials.to_dict()
|
||||
credentials_dict["_token_encrypted"] = True
|
||||
connector.config = credentials_dict
|
||||
await session.commit()
|
||||
await session.refresh(connector)
|
||||
|
||||
logger.info(
|
||||
f"Successfully refreshed ClickUp token for connector {connector.id}"
|
||||
)
|
||||
|
||||
return connector
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to refresh ClickUp token: {e!s}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to refresh ClickUp token: {e!s}"
|
||||
) from e
|
||||
85
surfsense_backend/app/schemas/clickup_auth_credentials.py
Normal file
85
surfsense_backend/app/schemas/clickup_auth_credentials.py
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
from datetime import UTC, datetime
|
||||
|
||||
from pydantic import BaseModel, field_validator
|
||||
|
||||
|
||||
class ClickUpAuthCredentialsBase(BaseModel):
|
||||
access_token: str
|
||||
refresh_token: str | None = None
|
||||
expires_in: int | None = None
|
||||
expires_at: datetime | None = None
|
||||
user_id: str | None = None
|
||||
user_email: str | None = None
|
||||
user_name: str | None = None
|
||||
workspace_id: str | None = None
|
||||
workspace_name: str | None = None
|
||||
|
||||
@property
|
||||
def is_expired(self) -> bool:
|
||||
"""Check if the credentials have expired."""
|
||||
if self.expires_at is None:
|
||||
return False # Long-lived token, treat as not expired
|
||||
return self.expires_at <= datetime.now(UTC)
|
||||
|
||||
@property
|
||||
def is_refreshable(self) -> bool:
|
||||
"""Check if the credentials can be refreshed."""
|
||||
return self.refresh_token is not None
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""Convert credentials to dictionary for storage."""
|
||||
return {
|
||||
"access_token": self.access_token,
|
||||
"refresh_token": self.refresh_token,
|
||||
"expires_in": self.expires_in,
|
||||
"expires_at": self.expires_at.isoformat() if self.expires_at else None,
|
||||
"user_id": self.user_id,
|
||||
"user_email": self.user_email,
|
||||
"user_name": self.user_name,
|
||||
"workspace_id": self.workspace_id,
|
||||
"workspace_name": self.workspace_name,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> "ClickUpAuthCredentialsBase":
|
||||
"""Create credentials from dictionary."""
|
||||
expires_at = None
|
||||
if data.get("expires_at"):
|
||||
expires_at = datetime.fromisoformat(data["expires_at"])
|
||||
|
||||
# Convert user_id to string if it's an integer (for backward compatibility)
|
||||
user_id = data.get("user_id")
|
||||
if user_id is not None and not isinstance(user_id, str):
|
||||
user_id = str(user_id)
|
||||
|
||||
# Convert workspace_id to string if it's an integer (for backward compatibility)
|
||||
workspace_id = data.get("workspace_id")
|
||||
if workspace_id is not None and not isinstance(workspace_id, str):
|
||||
workspace_id = str(workspace_id)
|
||||
|
||||
return cls(
|
||||
access_token=data.get("access_token", ""),
|
||||
refresh_token=data.get("refresh_token"),
|
||||
expires_in=data.get("expires_in"),
|
||||
expires_at=expires_at,
|
||||
user_id=user_id,
|
||||
user_email=data.get("user_email"),
|
||||
user_name=data.get("user_name"),
|
||||
workspace_id=workspace_id,
|
||||
workspace_name=data.get("workspace_name"),
|
||||
)
|
||||
|
||||
@field_validator("expires_at", mode="before")
|
||||
@classmethod
|
||||
def ensure_aware_utc(cls, v):
|
||||
# Strings like "2025-08-26T14:46:57.367184"
|
||||
if isinstance(v, str):
|
||||
# add +00:00 if missing tz info
|
||||
if v.endswith("Z"):
|
||||
return datetime.fromisoformat(v.replace("Z", "+00:00"))
|
||||
dt = datetime.fromisoformat(v)
|
||||
return dt if dt.tzinfo else dt.replace(tzinfo=UTC)
|
||||
# datetime objects
|
||||
if isinstance(v, datetime):
|
||||
return v if v.tzinfo else v.replace(tzinfo=UTC)
|
||||
return v
|
||||
|
|
@ -6,10 +6,8 @@ from sqlalchemy.exc import SQLAlchemyError
|
|||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.config import config
|
||||
from app.connectors.airtable_connector import AirtableConnector
|
||||
from app.connectors.airtable_history import AirtableHistoryConnector
|
||||
from app.db import Document, DocumentType, SearchSourceConnectorType
|
||||
from app.routes.airtable_add_connector_route import refresh_airtable_token
|
||||
from app.schemas.airtable_auth_credentials import AirtableAuthCredentialsBase
|
||||
from app.services.llm_service import get_user_long_context_llm
|
||||
from app.services.task_logging_service import TaskLoggingService
|
||||
from app.utils.document_converters import (
|
||||
|
|
@ -18,7 +16,6 @@ from app.utils.document_converters import (
|
|||
generate_document_summary,
|
||||
generate_unique_identifier_hash,
|
||||
)
|
||||
from app.utils.oauth_security import TokenEncryption
|
||||
|
||||
from .base import (
|
||||
calculate_date_range,
|
||||
|
|
@ -85,76 +82,11 @@ async def index_airtable_records(
|
|||
)
|
||||
return 0, f"Connector with ID {connector_id} not found"
|
||||
|
||||
# Create credentials from connector config
|
||||
config_data = (
|
||||
connector.config.copy()
|
||||
) # Work with a copy to avoid modifying original
|
||||
|
||||
# Decrypt tokens if they are encrypted (only when explicitly marked)
|
||||
token_encrypted = config_data.get("_token_encrypted", False)
|
||||
if token_encrypted:
|
||||
# Tokens are explicitly marked as encrypted, attempt decryption
|
||||
if not config.SECRET_KEY:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"SECRET_KEY not configured but tokens are marked as encrypted for connector {connector_id}",
|
||||
"Missing SECRET_KEY for token decryption",
|
||||
{"error_type": "MissingSecretKey"},
|
||||
)
|
||||
return 0, "SECRET_KEY not configured but tokens are marked as encrypted"
|
||||
try:
|
||||
token_encryption = TokenEncryption(config.SECRET_KEY)
|
||||
|
||||
# Decrypt access_token
|
||||
if config_data.get("access_token"):
|
||||
config_data["access_token"] = token_encryption.decrypt_token(
|
||||
config_data["access_token"]
|
||||
)
|
||||
logger.info(
|
||||
f"Decrypted Airtable access token for connector {connector_id}"
|
||||
)
|
||||
|
||||
# Decrypt refresh_token if present
|
||||
if config_data.get("refresh_token"):
|
||||
config_data["refresh_token"] = token_encryption.decrypt_token(
|
||||
config_data["refresh_token"]
|
||||
)
|
||||
logger.info(
|
||||
f"Decrypted Airtable refresh token for connector {connector_id}"
|
||||
)
|
||||
except Exception as e:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Failed to decrypt Airtable tokens for connector {connector_id}: {e!s}",
|
||||
"Token decryption failed",
|
||||
{"error_type": "TokenDecryptionError"},
|
||||
)
|
||||
return 0, f"Failed to decrypt Airtable tokens: {e!s}"
|
||||
# If _token_encrypted is False or not set, treat tokens as plaintext
|
||||
|
||||
try:
|
||||
credentials = AirtableAuthCredentialsBase.from_dict(config_data)
|
||||
except Exception as e:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Invalid Airtable credentials in connector {connector_id}",
|
||||
str(e),
|
||||
{"error_type": "InvalidCredentials"},
|
||||
)
|
||||
return 0, f"Invalid Airtable credentials: {e!s}"
|
||||
|
||||
# Check if credentials are expired
|
||||
if credentials.is_expired:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Airtable credentials expired for connector {connector_id}",
|
||||
"Credentials expired",
|
||||
{"error_type": "ExpiredCredentials"},
|
||||
)
|
||||
|
||||
connector = await refresh_airtable_token(session, connector)
|
||||
|
||||
# return 0, "Airtable credentials have expired. Please re-authenticate."
|
||||
# Normalize "undefined" strings to None (from frontend)
|
||||
if start_date == "undefined" or start_date == "":
|
||||
start_date = None
|
||||
if end_date == "undefined" or end_date == "":
|
||||
end_date = None
|
||||
|
||||
# Calculate date range for indexing
|
||||
start_date_str, end_date_str = calculate_date_range(
|
||||
|
|
@ -166,8 +98,9 @@ async def index_airtable_records(
|
|||
f"from {start_date_str} to {end_date_str}"
|
||||
)
|
||||
|
||||
# Initialize Airtable connector
|
||||
airtable_connector = AirtableConnector(credentials)
|
||||
# Initialize Airtable history connector with auto-refresh capability
|
||||
airtable_history = AirtableHistoryConnector(session, connector_id)
|
||||
airtable_connector = await airtable_history._get_connector()
|
||||
total_processed = 0
|
||||
|
||||
try:
|
||||
|
|
@ -459,47 +392,56 @@ async def index_airtable_records(
|
|||
documents_skipped += 1
|
||||
continue # Skip this message and continue with others
|
||||
|
||||
# Update the last_indexed_at timestamp for the connector only if requested
|
||||
total_processed = documents_indexed
|
||||
if total_processed > 0:
|
||||
await update_connector_last_indexed(
|
||||
session, connector, update_last_indexed
|
||||
)
|
||||
# Accumulate total processed across all tables
|
||||
total_processed += documents_indexed
|
||||
|
||||
# Final commit for any remaining documents not yet committed in batches
|
||||
logger.info(
|
||||
f"Final commit: Total {documents_indexed} Airtable records processed"
|
||||
)
|
||||
await session.commit()
|
||||
logger.info(
|
||||
"Successfully committed all Airtable document changes to database"
|
||||
)
|
||||
if documents_indexed > 0:
|
||||
logger.info(
|
||||
f"Final commit for table {table_name}: {documents_indexed} Airtable records processed"
|
||||
)
|
||||
await session.commit()
|
||||
logger.info(
|
||||
f"Successfully committed all Airtable document changes for table {table_name}"
|
||||
)
|
||||
|
||||
# Log success
|
||||
await task_logger.log_task_success(
|
||||
log_entry,
|
||||
f"Successfully completed Airtable indexing for connector {connector_id}",
|
||||
{
|
||||
"events_processed": total_processed,
|
||||
"documents_indexed": documents_indexed,
|
||||
"documents_skipped": documents_skipped,
|
||||
"skipped_messages_count": len(skipped_messages),
|
||||
},
|
||||
)
|
||||
# Update the last_indexed_at timestamp for the connector only if requested
|
||||
# (after all tables in all bases are processed)
|
||||
if total_processed > 0:
|
||||
await update_connector_last_indexed(
|
||||
session, connector, update_last_indexed
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Airtable indexing completed: {documents_indexed} new records, {documents_skipped} skipped"
|
||||
)
|
||||
return (
|
||||
total_processed,
|
||||
None,
|
||||
) # Return None as the error message to indicate success
|
||||
# Log success after processing all bases and tables
|
||||
await task_logger.log_task_success(
|
||||
log_entry,
|
||||
f"Successfully completed Airtable indexing for connector {connector_id}",
|
||||
{
|
||||
"events_processed": total_processed,
|
||||
"documents_indexed": total_processed,
|
||||
},
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Airtable indexing completed: {total_processed} total records processed"
|
||||
)
|
||||
return (
|
||||
total_processed,
|
||||
None,
|
||||
) # Return None as the error message to indicate success
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Fetching Airtable bases for connector {connector_id} failed: {e!s}",
|
||||
exc_info=True,
|
||||
)
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Failed to fetch Airtable bases for connector {connector_id}",
|
||||
str(e),
|
||||
{"error_type": type(e).__name__},
|
||||
)
|
||||
return 0, f"Failed to fetch Airtable bases: {e!s}"
|
||||
|
||||
except SQLAlchemyError as db_error:
|
||||
await session.rollback()
|
||||
|
|
|
|||
|
|
@ -2,13 +2,14 @@
|
|||
ClickUp connector indexer.
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.config import config
|
||||
from app.connectors.clickup_connector import ClickUpConnector
|
||||
from app.connectors.clickup_history import ClickUpHistoryConnector
|
||||
from app.db import Document, DocumentType, SearchSourceConnectorType
|
||||
from app.services.llm_service import get_user_long_context_llm
|
||||
from app.services.task_logging_service import TaskLoggingService
|
||||
|
|
@ -82,26 +83,30 @@ async def index_clickup_tasks(
|
|||
)
|
||||
return 0, error_msg
|
||||
|
||||
# Extract ClickUp configuration
|
||||
clickup_api_token = connector.config.get("CLICKUP_API_TOKEN")
|
||||
# Check if using OAuth (has access_token in config) or legacy (has CLICKUP_API_TOKEN)
|
||||
has_oauth = connector.config.get("access_token") is not None
|
||||
has_legacy = connector.config.get("CLICKUP_API_TOKEN") is not None
|
||||
|
||||
if not clickup_api_token:
|
||||
error_msg = "ClickUp API token not found in connector configuration"
|
||||
if not has_oauth and not has_legacy:
|
||||
error_msg = "ClickUp credentials not found in connector configuration (neither OAuth nor API token)"
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"ClickUp API token not found in connector config for connector {connector_id}",
|
||||
"Missing ClickUp token",
|
||||
{"error_type": "MissingToken"},
|
||||
f"ClickUp credentials not found in connector config for connector {connector_id}",
|
||||
"Missing ClickUp credentials",
|
||||
{"error_type": "MissingCredentials"},
|
||||
)
|
||||
return 0, error_msg
|
||||
|
||||
await task_logger.log_task_progress(
|
||||
log_entry,
|
||||
f"Initializing ClickUp client for connector {connector_id}",
|
||||
f"Initializing ClickUp client for connector {connector_id} ({'OAuth' if has_oauth else 'API Token'})",
|
||||
{"stage": "client_initialization"},
|
||||
)
|
||||
|
||||
clickup_client = ClickUpConnector(api_token=clickup_api_token)
|
||||
# Use history connector which supports both OAuth and legacy API tokens
|
||||
clickup_client = ClickUpHistoryConnector(
|
||||
session=session, connector_id=connector_id
|
||||
)
|
||||
|
||||
# Get authorized workspaces
|
||||
await task_logger.log_task_progress(
|
||||
|
|
@ -110,7 +115,7 @@ async def index_clickup_tasks(
|
|||
{"stage": "workspace_fetching"},
|
||||
)
|
||||
|
||||
workspaces_response = clickup_client.get_authorized_workspaces()
|
||||
workspaces_response = await clickup_client.get_authorized_workspaces()
|
||||
workspaces = workspaces_response.get("teams", [])
|
||||
|
||||
if not workspaces:
|
||||
|
|
@ -141,7 +146,7 @@ async def index_clickup_tasks(
|
|||
|
||||
# Fetch tasks for date range if provided
|
||||
if start_date and end_date:
|
||||
tasks, error = clickup_client.get_tasks_in_date_range(
|
||||
tasks, error = await clickup_client.get_tasks_in_date_range(
|
||||
workspace_id=workspace_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
|
|
@ -153,7 +158,7 @@ async def index_clickup_tasks(
|
|||
)
|
||||
continue
|
||||
else:
|
||||
tasks = clickup_client.get_workspace_tasks(
|
||||
tasks = await clickup_client.get_workspace_tasks(
|
||||
workspace_id=workspace_id, include_closed=True
|
||||
)
|
||||
|
||||
|
|
@ -393,10 +398,21 @@ async def index_clickup_tasks(
|
|||
logger.info(
|
||||
f"clickup indexing completed: {documents_indexed} new tasks, {documents_skipped} skipped"
|
||||
)
|
||||
|
||||
# Close client connection
|
||||
try:
|
||||
await clickup_client.close()
|
||||
except Exception as e:
|
||||
logger.warning(f"Error closing ClickUp client: {e!s}")
|
||||
|
||||
return total_processed, None
|
||||
|
||||
except SQLAlchemyError as db_error:
|
||||
await session.rollback()
|
||||
# Clean up the connector in case of error
|
||||
if "clickup_client" in locals():
|
||||
with contextlib.suppress(Exception):
|
||||
await clickup_client.close()
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Database error during ClickUp indexing for connector {connector_id}",
|
||||
|
|
@ -407,6 +423,10 @@ async def index_clickup_tasks(
|
|||
return 0, f"Database error: {db_error!s}"
|
||||
except Exception as e:
|
||||
await session.rollback()
|
||||
# Clean up the connector in case of error
|
||||
if "clickup_client" in locals():
|
||||
with contextlib.suppress(Exception):
|
||||
await clickup_client.close()
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Failed to index ClickUp tasks for connector {connector_id}",
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
Confluence connector indexer.
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
|
@ -142,10 +143,8 @@ async def index_confluence_pages(
|
|||
)
|
||||
# Close client before returning
|
||||
if confluence_client:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
await confluence_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
return 0, None
|
||||
else:
|
||||
await task_logger.log_task_failure(
|
||||
|
|
@ -156,10 +155,8 @@ async def index_confluence_pages(
|
|||
)
|
||||
# Close client on error
|
||||
if confluence_client:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
await confluence_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
return 0, f"Failed to get Confluence pages: {error}"
|
||||
|
||||
logger.info(f"Retrieved {len(pages)} pages from Confluence API")
|
||||
|
|
@ -168,10 +165,8 @@ async def index_confluence_pages(
|
|||
logger.error(f"Error fetching Confluence pages: {e!s}", exc_info=True)
|
||||
# Close client on error
|
||||
if confluence_client:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
await confluence_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
return 0, f"Error fetching Confluence pages: {e!s}"
|
||||
|
||||
# Process and index each page
|
||||
|
|
@ -437,10 +432,8 @@ async def index_confluence_pages(
|
|||
await session.rollback()
|
||||
# Close client if it exists
|
||||
if confluence_client:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
await confluence_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Database error during Confluence indexing for connector {connector_id}",
|
||||
|
|
@ -453,10 +446,8 @@ async def index_confluence_pages(
|
|||
await session.rollback()
|
||||
# Close client if it exists
|
||||
if confluence_client:
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
await confluence_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Failed to index Confluence pages for connector {connector_id}",
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
Jira connector indexer.
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
|
@ -413,10 +414,8 @@ async def index_jira_issues(
|
|||
logger.error(f"Database error: {db_error!s}", exc_info=True)
|
||||
# Clean up the connector in case of error
|
||||
if "jira_client" in locals():
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
await jira_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
return 0, f"Database error: {db_error!s}"
|
||||
except Exception as e:
|
||||
await session.rollback()
|
||||
|
|
@ -429,8 +428,6 @@ async def index_jira_issues(
|
|||
logger.error(f"Failed to index JIRA issues: {e!s}", exc_info=True)
|
||||
# Clean up the connector in case of error
|
||||
if "jira_client" in locals():
|
||||
try:
|
||||
with contextlib.suppress(Exception):
|
||||
await jira_client.close()
|
||||
except Exception:
|
||||
pass
|
||||
return 0, f"Failed to index JIRA issues: {e!s}"
|
||||
|
|
|
|||
|
|
@ -551,7 +551,7 @@ def validate_connector_config(
|
|||
# ],
|
||||
# "validators": {},
|
||||
# },
|
||||
"CLICKUP_CONNECTOR": {"required": ["CLICKUP_API_TOKEN"], "validators": {}},
|
||||
# "CLICKUP_CONNECTOR": {"required": ["CLICKUP_API_TOKEN"], "validators": {}},
|
||||
# "GOOGLE_CALENDAR_CONNECTOR": {
|
||||
# "required": ["token", "refresh_token", "token_uri", "client_id", "expiry", "scopes", "client_secret"],
|
||||
# "validators": {},
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue