mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-05-15 18:25:18 +02:00
Merge remote-tracking branch 'upstream/dev' into feat/inbox
This commit is contained in:
commit
3d4a8f981c
40 changed files with 7263 additions and 3110 deletions
|
|
@ -85,6 +85,11 @@ TEAMS_CLIENT_ID=your_teams_client_id_here
|
|||
TEAMS_CLIENT_SECRET=your_teams_client_secret_here
|
||||
TEAMS_REDIRECT_URI=http://localhost:8000/api/v1/auth/teams/connector/callback
|
||||
|
||||
#Composio Coonnector
|
||||
COMPOSIO_API_KEY=your_api_key_here
|
||||
COMPOSIO_ENABLED=TRUE
|
||||
COMPOSIO_REDIRECT_URI=http://localhost:8000/api/v1/auth/composio/connector/callback
|
||||
|
||||
# Embedding Model
|
||||
# Examples:
|
||||
# # Get sentence transformers embeddings
|
||||
|
|
|
|||
135
surfsense_backend/alembic/versions/73_add_user_memories_table.py
Normal file
135
surfsense_backend/alembic/versions/73_add_user_memories_table.py
Normal file
|
|
@ -0,0 +1,135 @@
|
|||
"""Add user_memories table for AI memory feature
|
||||
|
||||
Revision ID: 73
|
||||
Revises: 72
|
||||
Create Date: 2026-01-20
|
||||
|
||||
This migration adds the user_memories table which enables Claude-like memory
|
||||
functionality - allowing the AI to remember facts, preferences, and context
|
||||
about users across conversations.
|
||||
"""
|
||||
|
||||
from collections.abc import Sequence
|
||||
|
||||
from alembic import op
|
||||
from app.config import config
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "73"
|
||||
down_revision: str | None = "72"
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
# Get embedding dimension from config
|
||||
EMBEDDING_DIM = config.embedding_model_instance.dimension
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Create user_memories table and MemoryCategory enum."""
|
||||
|
||||
# Create the MemoryCategory enum type
|
||||
op.execute(
|
||||
"""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'memorycategory') THEN
|
||||
CREATE TYPE memorycategory AS ENUM (
|
||||
'preference',
|
||||
'fact',
|
||||
'instruction',
|
||||
'context'
|
||||
);
|
||||
END IF;
|
||||
END$$;
|
||||
"""
|
||||
)
|
||||
|
||||
# Create user_memories table
|
||||
op.execute(
|
||||
f"""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT FROM information_schema.tables
|
||||
WHERE table_name = 'user_memories'
|
||||
) THEN
|
||||
CREATE TABLE user_memories (
|
||||
id SERIAL PRIMARY KEY,
|
||||
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
|
||||
user_id UUID NOT NULL REFERENCES "user"(id) ON DELETE CASCADE,
|
||||
search_space_id INTEGER REFERENCES searchspaces(id) ON DELETE CASCADE,
|
||||
memory_text TEXT NOT NULL,
|
||||
category memorycategory NOT NULL DEFAULT 'fact',
|
||||
embedding vector({EMBEDDING_DIM}),
|
||||
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
|
||||
);
|
||||
END IF;
|
||||
END$$;
|
||||
"""
|
||||
)
|
||||
|
||||
# Create indexes for efficient querying
|
||||
op.execute(
|
||||
"""
|
||||
DO $$
|
||||
BEGIN
|
||||
-- Index on user_id for filtering memories by user
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_indexes
|
||||
WHERE tablename = 'user_memories' AND indexname = 'ix_user_memories_user_id'
|
||||
) THEN
|
||||
CREATE INDEX ix_user_memories_user_id ON user_memories(user_id);
|
||||
END IF;
|
||||
|
||||
-- Index on search_space_id for filtering memories by search space
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_indexes
|
||||
WHERE tablename = 'user_memories' AND indexname = 'ix_user_memories_search_space_id'
|
||||
) THEN
|
||||
CREATE INDEX ix_user_memories_search_space_id ON user_memories(search_space_id);
|
||||
END IF;
|
||||
|
||||
-- Index on updated_at for ordering by recency
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_indexes
|
||||
WHERE tablename = 'user_memories' AND indexname = 'ix_user_memories_updated_at'
|
||||
) THEN
|
||||
CREATE INDEX ix_user_memories_updated_at ON user_memories(updated_at);
|
||||
END IF;
|
||||
|
||||
-- Index on category for filtering by memory type
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_indexes
|
||||
WHERE tablename = 'user_memories' AND indexname = 'ix_user_memories_category'
|
||||
) THEN
|
||||
CREATE INDEX ix_user_memories_category ON user_memories(category);
|
||||
END IF;
|
||||
|
||||
-- Composite index for common query pattern (user + search space)
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_indexes
|
||||
WHERE tablename = 'user_memories' AND indexname = 'ix_user_memories_user_search_space'
|
||||
) THEN
|
||||
CREATE INDEX ix_user_memories_user_search_space ON user_memories(user_id, search_space_id);
|
||||
END IF;
|
||||
END$$;
|
||||
"""
|
||||
)
|
||||
|
||||
# Create vector index for semantic search
|
||||
op.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS user_memories_vector_index
|
||||
ON user_memories USING hnsw (embedding public.vector_cosine_ops);
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Drop user_memories table and MemoryCategory enum."""
|
||||
|
||||
# Drop the table
|
||||
op.execute("DROP TABLE IF EXISTS user_memories CASCADE;")
|
||||
|
||||
# Drop the enum type
|
||||
op.execute("DROP TYPE IF EXISTS memorycategory;")
|
||||
|
|
@ -0,0 +1,81 @@
|
|||
"""Add COMPOSIO_CONNECTOR to SearchSourceConnectorType and DocumentType enums
|
||||
|
||||
Revision ID: 74
|
||||
Revises: 73
|
||||
Create Date: 2026-01-21
|
||||
|
||||
This migration adds the COMPOSIO_CONNECTOR enum value to both:
|
||||
- searchsourceconnectortype (for connector type tracking)
|
||||
- documenttype (for document type tracking)
|
||||
|
||||
Composio is a managed OAuth integration service that allows connecting
|
||||
to various third-party services (Google Drive, Gmail, Calendar, etc.)
|
||||
without requiring separate OAuth app verification.
|
||||
"""
|
||||
|
||||
from collections.abc import Sequence
|
||||
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "74"
|
||||
down_revision: str | None = "73"
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
# Define the ENUM type names and the new value
|
||||
CONNECTOR_ENUM = "searchsourceconnectortype"
|
||||
CONNECTOR_NEW_VALUE = "COMPOSIO_CONNECTOR"
|
||||
DOCUMENT_ENUM = "documenttype"
|
||||
DOCUMENT_NEW_VALUE = "COMPOSIO_CONNECTOR"
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema - add COMPOSIO_CONNECTOR to connector and document enums safely."""
|
||||
# Add COMPOSIO_CONNECTOR to searchsourceconnectortype only if not exists
|
||||
op.execute(
|
||||
f"""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_enum
|
||||
WHERE enumlabel = '{CONNECTOR_NEW_VALUE}'
|
||||
AND enumtypid = (SELECT oid FROM pg_type WHERE typname = '{CONNECTOR_ENUM}')
|
||||
) THEN
|
||||
ALTER TYPE {CONNECTOR_ENUM} ADD VALUE '{CONNECTOR_NEW_VALUE}';
|
||||
END IF;
|
||||
END$$;
|
||||
"""
|
||||
)
|
||||
|
||||
# Add COMPOSIO_CONNECTOR to documenttype only if not exists
|
||||
op.execute(
|
||||
f"""
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM pg_enum
|
||||
WHERE enumlabel = '{DOCUMENT_NEW_VALUE}'
|
||||
AND enumtypid = (SELECT oid FROM pg_type WHERE typname = '{DOCUMENT_ENUM}')
|
||||
) THEN
|
||||
ALTER TYPE {DOCUMENT_ENUM} ADD VALUE '{DOCUMENT_NEW_VALUE}';
|
||||
END IF;
|
||||
END$$;
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema - remove COMPOSIO_CONNECTOR from connector and document enums.
|
||||
|
||||
Note: PostgreSQL does not support removing enum values directly.
|
||||
To properly downgrade, you would need to:
|
||||
1. Delete any rows using the COMPOSIO_CONNECTOR value
|
||||
2. Create new enums without COMPOSIO_CONNECTOR
|
||||
3. Alter the columns to use the new enums
|
||||
4. Drop the old enums
|
||||
|
||||
This is left as a no-op since removing enum values is complex
|
||||
and typically not needed in practice.
|
||||
"""
|
||||
pass
|
||||
|
|
@ -34,6 +34,7 @@ async def create_surfsense_deep_agent(
|
|||
db_session: AsyncSession,
|
||||
connector_service: ConnectorService,
|
||||
checkpointer: Checkpointer,
|
||||
user_id: str | None = None,
|
||||
agent_config: AgentConfig | None = None,
|
||||
enabled_tools: list[str] | None = None,
|
||||
disabled_tools: list[str] | None = None,
|
||||
|
|
@ -49,6 +50,8 @@ async def create_surfsense_deep_agent(
|
|||
- link_preview: Fetch rich previews for URLs
|
||||
- display_image: Display images in chat
|
||||
- scrape_webpage: Extract content from webpages
|
||||
- save_memory: Store facts/preferences about the user
|
||||
- recall_memory: Retrieve relevant user memories
|
||||
|
||||
The agent also includes TodoListMiddleware by default (via create_deep_agent) which provides:
|
||||
- write_todos: Create and update planning/todo lists for complex tasks
|
||||
|
|
@ -64,6 +67,7 @@ async def create_surfsense_deep_agent(
|
|||
connector_service: Initialized connector service for knowledge base search
|
||||
checkpointer: LangGraph checkpointer for conversation state persistence.
|
||||
Use AsyncPostgresSaver for production or MemorySaver for testing.
|
||||
user_id: The current user's UUID string (required for memory tools)
|
||||
agent_config: Optional AgentConfig from NewLLMConfig for prompt configuration.
|
||||
If None, uses default system prompt with citations enabled.
|
||||
enabled_tools: Explicit list of tool names to enable. If None, all default tools
|
||||
|
|
@ -118,6 +122,7 @@ async def create_surfsense_deep_agent(
|
|||
"db_session": db_session,
|
||||
"connector_service": connector_service,
|
||||
"firecrawl_api_key": firecrawl_api_key,
|
||||
"user_id": user_id, # Required for memory tools
|
||||
}
|
||||
|
||||
# Build tools using the async registry (includes MCP tools)
|
||||
|
|
|
|||
|
|
@ -116,6 +116,45 @@ You have access to the following tools:
|
|||
* This makes your response more visual and engaging.
|
||||
* Prioritize showing: diagrams, charts, infographics, key illustrations, or images that help explain the content.
|
||||
* Don't show every image - just the most relevant 1-3 images that enhance understanding.
|
||||
|
||||
6. save_memory: Save facts, preferences, or context about the user for personalized responses.
|
||||
- Use this when the user explicitly or implicitly shares information worth remembering.
|
||||
- Trigger scenarios:
|
||||
* User says "remember this", "keep this in mind", "note that", or similar
|
||||
* User shares personal preferences (e.g., "I prefer Python over JavaScript")
|
||||
* User shares facts about themselves (e.g., "I'm a senior developer at Company X")
|
||||
* User gives standing instructions (e.g., "always respond in bullet points")
|
||||
* User shares project context (e.g., "I'm working on migrating our codebase to TypeScript")
|
||||
- Args:
|
||||
- content: The fact/preference to remember. Phrase it clearly:
|
||||
* "User prefers dark mode for all interfaces"
|
||||
* "User is a senior Python developer"
|
||||
* "User wants responses in bullet point format"
|
||||
* "User is working on project called ProjectX"
|
||||
- category: Type of memory:
|
||||
* "preference": User preferences (coding style, tools, formats)
|
||||
* "fact": Facts about the user (role, expertise, background)
|
||||
* "instruction": Standing instructions (response format, communication style)
|
||||
* "context": Current context (ongoing projects, goals, challenges)
|
||||
- Returns: Confirmation of saved memory
|
||||
- IMPORTANT: Only save information that would be genuinely useful for future conversations.
|
||||
Don't save trivial or temporary information.
|
||||
|
||||
7. recall_memory: Retrieve relevant memories about the user for personalized responses.
|
||||
- Use this to access stored information about the user.
|
||||
- Trigger scenarios:
|
||||
* You need user context to give a better, more personalized answer
|
||||
* User references something they mentioned before
|
||||
* User asks "what do you know about me?" or similar
|
||||
* Personalization would significantly improve response quality
|
||||
* Before making recommendations that should consider user preferences
|
||||
- Args:
|
||||
- query: Optional search query to find specific memories (e.g., "programming preferences")
|
||||
- category: Optional filter by category ("preference", "fact", "instruction", "context")
|
||||
- top_k: Number of memories to retrieve (default: 5)
|
||||
- Returns: Relevant memories formatted as context
|
||||
- IMPORTANT: Use the recalled memories naturally in your response without explicitly
|
||||
stating "Based on your memory..." - integrate the context seamlessly.
|
||||
</tools>
|
||||
<tool_call_examples>
|
||||
- User: "How do I install SurfSense?"
|
||||
|
|
@ -136,6 +175,23 @@ You have access to the following tools:
|
|||
- User: "What did I discuss on Slack last week about the React migration?"
|
||||
- Call: `search_knowledge_base(query="React migration", connectors_to_search=["SLACK_CONNECTOR"], start_date="YYYY-MM-DD", end_date="YYYY-MM-DD")`
|
||||
|
||||
- User: "Remember that I prefer TypeScript over JavaScript"
|
||||
- Call: `save_memory(content="User prefers TypeScript over JavaScript for development", category="preference")`
|
||||
|
||||
- User: "I'm a data scientist working on ML pipelines"
|
||||
- Call: `save_memory(content="User is a data scientist working on ML pipelines", category="fact")`
|
||||
|
||||
- User: "Always give me code examples in Python"
|
||||
- Call: `save_memory(content="User wants code examples to be written in Python", category="instruction")`
|
||||
|
||||
- User: "What programming language should I use for this project?"
|
||||
- First recall: `recall_memory(query="programming language preferences")`
|
||||
- Then provide a personalized recommendation based on their preferences
|
||||
|
||||
- User: "What do you know about me?"
|
||||
- Call: `recall_memory(top_k=10)`
|
||||
- Then summarize the stored memories
|
||||
|
||||
- User: "Give me a podcast about AI trends based on what we discussed"
|
||||
- First search for relevant content, then call: `generate_podcast(source_content="Based on our conversation and search results: [detailed summary of chat + search findings]", podcast_title="AI Trends Podcast")`
|
||||
|
||||
|
|
|
|||
|
|
@ -11,6 +11,8 @@ Available tools:
|
|||
- link_preview: Fetch rich previews for URLs
|
||||
- display_image: Display images in chat
|
||||
- scrape_webpage: Extract content from webpages
|
||||
- save_memory: Store facts/preferences about the user
|
||||
- recall_memory: Retrieve relevant user memories
|
||||
"""
|
||||
|
||||
# Registry exports
|
||||
|
|
@ -33,6 +35,7 @@ from .registry import (
|
|||
)
|
||||
from .scrape_webpage import create_scrape_webpage_tool
|
||||
from .search_surfsense_docs import create_search_surfsense_docs_tool
|
||||
from .user_memory import create_recall_memory_tool, create_save_memory_tool
|
||||
|
||||
__all__ = [
|
||||
# Registry
|
||||
|
|
@ -43,6 +46,8 @@ __all__ = [
|
|||
"create_display_image_tool",
|
||||
"create_generate_podcast_tool",
|
||||
"create_link_preview_tool",
|
||||
"create_recall_memory_tool",
|
||||
"create_save_memory_tool",
|
||||
"create_scrape_webpage_tool",
|
||||
"create_search_knowledge_base_tool",
|
||||
"create_search_surfsense_docs_tool",
|
||||
|
|
|
|||
|
|
@ -50,6 +50,7 @@ from .mcp_tool import load_mcp_tools
|
|||
from .podcast import create_generate_podcast_tool
|
||||
from .scrape_webpage import create_scrape_webpage_tool
|
||||
from .search_surfsense_docs import create_search_surfsense_docs_tool
|
||||
from .user_memory import create_recall_memory_tool, create_save_memory_tool
|
||||
|
||||
# =============================================================================
|
||||
# Tool Definition
|
||||
|
|
@ -138,6 +139,31 @@ BUILTIN_TOOLS: list[ToolDefinition] = [
|
|||
requires=["db_session"],
|
||||
),
|
||||
# =========================================================================
|
||||
# USER MEMORY TOOLS - Claude-like memory feature
|
||||
# =========================================================================
|
||||
# Save memory tool - stores facts/preferences about the user
|
||||
ToolDefinition(
|
||||
name="save_memory",
|
||||
description="Save facts, preferences, or context about the user for personalized responses",
|
||||
factory=lambda deps: create_save_memory_tool(
|
||||
user_id=deps["user_id"],
|
||||
search_space_id=deps["search_space_id"],
|
||||
db_session=deps["db_session"],
|
||||
),
|
||||
requires=["user_id", "search_space_id", "db_session"],
|
||||
),
|
||||
# Recall memory tool - retrieves relevant user memories
|
||||
ToolDefinition(
|
||||
name="recall_memory",
|
||||
description="Recall user memories for personalized and contextual responses",
|
||||
factory=lambda deps: create_recall_memory_tool(
|
||||
user_id=deps["user_id"],
|
||||
search_space_id=deps["search_space_id"],
|
||||
db_session=deps["db_session"],
|
||||
),
|
||||
requires=["user_id", "search_space_id", "db_session"],
|
||||
),
|
||||
# =========================================================================
|
||||
# ADD YOUR CUSTOM TOOLS BELOW
|
||||
# =========================================================================
|
||||
# Example:
|
||||
|
|
|
|||
352
surfsense_backend/app/agents/new_chat/tools/user_memory.py
Normal file
352
surfsense_backend/app/agents/new_chat/tools/user_memory.py
Normal file
|
|
@ -0,0 +1,352 @@
|
|||
"""
|
||||
User memory tools for the SurfSense agent.
|
||||
|
||||
This module provides tools for storing and retrieving user memories,
|
||||
enabling personalized AI responses similar to Claude's memory feature.
|
||||
|
||||
Features:
|
||||
- save_memory: Store facts, preferences, and context about the user
|
||||
- recall_memory: Retrieve relevant memories using semantic search
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from langchain_core.tools import tool
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.config import config
|
||||
from app.db import MemoryCategory, UserMemory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Constants
|
||||
# =============================================================================
|
||||
|
||||
# Default number of memories to retrieve
|
||||
DEFAULT_RECALL_TOP_K = 5
|
||||
|
||||
# Maximum number of memories per user (to prevent unbounded growth)
|
||||
MAX_MEMORIES_PER_USER = 100
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Helper Functions
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def _to_uuid(user_id: str) -> UUID:
|
||||
"""Convert a string user_id to a UUID object."""
|
||||
if isinstance(user_id, UUID):
|
||||
return user_id
|
||||
return UUID(user_id)
|
||||
|
||||
|
||||
async def get_user_memory_count(
|
||||
db_session: AsyncSession,
|
||||
user_id: str,
|
||||
search_space_id: int | None = None,
|
||||
) -> int:
|
||||
"""Get the count of memories for a user."""
|
||||
uuid_user_id = _to_uuid(user_id)
|
||||
query = select(UserMemory).where(UserMemory.user_id == uuid_user_id)
|
||||
if search_space_id is not None:
|
||||
query = query.where(
|
||||
(UserMemory.search_space_id == search_space_id)
|
||||
| (UserMemory.search_space_id.is_(None))
|
||||
)
|
||||
result = await db_session.execute(query)
|
||||
return len(result.scalars().all())
|
||||
|
||||
|
||||
async def delete_oldest_memory(
|
||||
db_session: AsyncSession,
|
||||
user_id: str,
|
||||
search_space_id: int | None = None,
|
||||
) -> None:
|
||||
"""Delete the oldest memory for a user to make room for new ones."""
|
||||
uuid_user_id = _to_uuid(user_id)
|
||||
query = (
|
||||
select(UserMemory)
|
||||
.where(UserMemory.user_id == uuid_user_id)
|
||||
.order_by(UserMemory.updated_at.asc())
|
||||
.limit(1)
|
||||
)
|
||||
if search_space_id is not None:
|
||||
query = query.where(
|
||||
(UserMemory.search_space_id == search_space_id)
|
||||
| (UserMemory.search_space_id.is_(None))
|
||||
)
|
||||
result = await db_session.execute(query)
|
||||
oldest_memory = result.scalars().first()
|
||||
if oldest_memory:
|
||||
await db_session.delete(oldest_memory)
|
||||
await db_session.commit()
|
||||
|
||||
|
||||
def format_memories_for_context(memories: list[dict[str, Any]]) -> str:
|
||||
"""Format retrieved memories into a readable context string for the LLM."""
|
||||
if not memories:
|
||||
return "No relevant memories found for this user."
|
||||
|
||||
parts = ["<user_memories>"]
|
||||
for memory in memories:
|
||||
category = memory.get("category", "unknown")
|
||||
text = memory.get("memory_text", "")
|
||||
updated = memory.get("updated_at", "")
|
||||
parts.append(
|
||||
f" <memory category='{category}' updated='{updated}'>{text}</memory>"
|
||||
)
|
||||
parts.append("</user_memories>")
|
||||
|
||||
return "\n".join(parts)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Tool Factory Functions
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def create_save_memory_tool(
|
||||
user_id: str,
|
||||
search_space_id: int,
|
||||
db_session: AsyncSession,
|
||||
):
|
||||
"""
|
||||
Factory function to create the save_memory tool.
|
||||
|
||||
Args:
|
||||
user_id: The user's UUID
|
||||
search_space_id: The search space ID (for space-specific memories)
|
||||
db_session: Database session for executing queries
|
||||
|
||||
Returns:
|
||||
A configured tool function for saving user memories
|
||||
"""
|
||||
|
||||
@tool
|
||||
async def save_memory(
|
||||
content: str,
|
||||
category: str = "fact",
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Save a fact, preference, or context about the user for future reference.
|
||||
|
||||
Use this tool when:
|
||||
- User explicitly says "remember this", "keep this in mind", or similar
|
||||
- User shares personal preferences (e.g., "I prefer Python over JavaScript")
|
||||
- User shares important facts about themselves (name, role, interests, projects)
|
||||
- User gives standing instructions (e.g., "always respond in bullet points")
|
||||
- User shares relevant context (e.g., "I'm working on project X")
|
||||
|
||||
The saved information will be available in future conversations to provide
|
||||
more personalized and contextual responses.
|
||||
|
||||
Args:
|
||||
content: The fact/preference/context to remember.
|
||||
Phrase it clearly, e.g., "User prefers dark mode",
|
||||
"User is a senior Python developer", "User is working on an AI project"
|
||||
category: Type of memory. One of:
|
||||
- "preference": User preferences (e.g., coding style, tools, formats)
|
||||
- "fact": Facts about the user (e.g., name, role, expertise)
|
||||
- "instruction": Standing instructions (e.g., response format preferences)
|
||||
- "context": Current context (e.g., ongoing projects, goals)
|
||||
|
||||
Returns:
|
||||
A dictionary with the save status and memory details
|
||||
"""
|
||||
# Normalize and validate category (LLMs may send uppercase)
|
||||
category = category.lower() if category else "fact"
|
||||
valid_categories = ["preference", "fact", "instruction", "context"]
|
||||
if category not in valid_categories:
|
||||
category = "fact"
|
||||
|
||||
try:
|
||||
# Convert user_id to UUID
|
||||
uuid_user_id = _to_uuid(user_id)
|
||||
|
||||
# Check if we've hit the memory limit
|
||||
memory_count = await get_user_memory_count(
|
||||
db_session, user_id, search_space_id
|
||||
)
|
||||
if memory_count >= MAX_MEMORIES_PER_USER:
|
||||
# Delete oldest memory to make room
|
||||
await delete_oldest_memory(db_session, user_id, search_space_id)
|
||||
|
||||
# Generate embedding for the memory
|
||||
embedding = config.embedding_model_instance.embed(content)
|
||||
|
||||
# Create new memory using ORM
|
||||
# The pgvector Vector column type handles embedding conversion automatically
|
||||
new_memory = UserMemory(
|
||||
user_id=uuid_user_id,
|
||||
search_space_id=search_space_id,
|
||||
memory_text=content,
|
||||
category=MemoryCategory(category), # Convert string to enum
|
||||
embedding=embedding, # Pass embedding directly (list or numpy array)
|
||||
)
|
||||
|
||||
db_session.add(new_memory)
|
||||
await db_session.commit()
|
||||
await db_session.refresh(new_memory)
|
||||
|
||||
return {
|
||||
"status": "saved",
|
||||
"memory_id": new_memory.id,
|
||||
"memory_text": content,
|
||||
"category": category,
|
||||
"message": f"I'll remember: {content}",
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to save memory for user {user_id}: {e}")
|
||||
# Rollback the session to clear any failed transaction state
|
||||
await db_session.rollback()
|
||||
return {
|
||||
"status": "error",
|
||||
"error": str(e),
|
||||
"message": "Failed to save memory. Please try again.",
|
||||
}
|
||||
|
||||
return save_memory
|
||||
|
||||
|
||||
def create_recall_memory_tool(
|
||||
user_id: str,
|
||||
search_space_id: int,
|
||||
db_session: AsyncSession,
|
||||
):
|
||||
"""
|
||||
Factory function to create the recall_memory tool.
|
||||
|
||||
Args:
|
||||
user_id: The user's UUID
|
||||
search_space_id: The search space ID
|
||||
db_session: Database session for executing queries
|
||||
|
||||
Returns:
|
||||
A configured tool function for recalling user memories
|
||||
"""
|
||||
|
||||
@tool
|
||||
async def recall_memory(
|
||||
query: str | None = None,
|
||||
category: str | None = None,
|
||||
top_k: int = DEFAULT_RECALL_TOP_K,
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Recall relevant memories about the user to provide personalized responses.
|
||||
|
||||
Use this tool when:
|
||||
- You need user context to give a better, more personalized answer
|
||||
- User asks about their preferences or past information they shared
|
||||
- User references something they told you before
|
||||
- Personalization would significantly improve the response quality
|
||||
- User asks "what do you know about me?" or similar
|
||||
|
||||
Args:
|
||||
query: Optional search query to find specific memories.
|
||||
If not provided, returns the most recent memories.
|
||||
Example: "programming preferences", "current projects"
|
||||
category: Optional category filter. One of:
|
||||
"preference", "fact", "instruction", "context"
|
||||
If not provided, searches all categories.
|
||||
top_k: Number of memories to retrieve (default: 5, max: 20)
|
||||
|
||||
Returns:
|
||||
A dictionary containing relevant memories and formatted context
|
||||
"""
|
||||
top_k = min(max(top_k, 1), 20) # Clamp between 1 and 20
|
||||
|
||||
try:
|
||||
# Convert user_id to UUID
|
||||
uuid_user_id = _to_uuid(user_id)
|
||||
|
||||
if query:
|
||||
# Semantic search using embeddings
|
||||
query_embedding = config.embedding_model_instance.embed(query)
|
||||
|
||||
# Build query with vector similarity
|
||||
stmt = (
|
||||
select(UserMemory)
|
||||
.where(UserMemory.user_id == uuid_user_id)
|
||||
.where(
|
||||
(UserMemory.search_space_id == search_space_id)
|
||||
| (UserMemory.search_space_id.is_(None))
|
||||
)
|
||||
)
|
||||
|
||||
# Add category filter if specified
|
||||
if category and category in [
|
||||
"preference",
|
||||
"fact",
|
||||
"instruction",
|
||||
"context",
|
||||
]:
|
||||
stmt = stmt.where(UserMemory.category == MemoryCategory(category))
|
||||
|
||||
# Order by vector similarity
|
||||
stmt = stmt.order_by(
|
||||
UserMemory.embedding.op("<=>")(query_embedding)
|
||||
).limit(top_k)
|
||||
|
||||
else:
|
||||
# No query - return most recent memories
|
||||
stmt = (
|
||||
select(UserMemory)
|
||||
.where(UserMemory.user_id == uuid_user_id)
|
||||
.where(
|
||||
(UserMemory.search_space_id == search_space_id)
|
||||
| (UserMemory.search_space_id.is_(None))
|
||||
)
|
||||
)
|
||||
|
||||
# Add category filter if specified
|
||||
if category and category in [
|
||||
"preference",
|
||||
"fact",
|
||||
"instruction",
|
||||
"context",
|
||||
]:
|
||||
stmt = stmt.where(UserMemory.category == MemoryCategory(category))
|
||||
|
||||
stmt = stmt.order_by(UserMemory.updated_at.desc()).limit(top_k)
|
||||
|
||||
result = await db_session.execute(stmt)
|
||||
memories = result.scalars().all()
|
||||
|
||||
# Format memories for response
|
||||
memory_list = [
|
||||
{
|
||||
"id": m.id,
|
||||
"memory_text": m.memory_text,
|
||||
"category": m.category.value if m.category else "unknown",
|
||||
"updated_at": m.updated_at.isoformat() if m.updated_at else None,
|
||||
}
|
||||
for m in memories
|
||||
]
|
||||
|
||||
formatted_context = format_memories_for_context(memory_list)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"count": len(memory_list),
|
||||
"memories": memory_list,
|
||||
"formatted_context": formatted_context,
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to recall memories for user {user_id}: {e}")
|
||||
await db_session.rollback()
|
||||
return {
|
||||
"status": "error",
|
||||
"error": str(e),
|
||||
"memories": [],
|
||||
"formatted_context": "Failed to recall memories.",
|
||||
}
|
||||
|
||||
return recall_memory
|
||||
|
|
@ -127,6 +127,12 @@ class Config:
|
|||
CLICKUP_CLIENT_SECRET = os.getenv("CLICKUP_CLIENT_SECRET")
|
||||
CLICKUP_REDIRECT_URI = os.getenv("CLICKUP_REDIRECT_URI")
|
||||
|
||||
# Composio Configuration (for managed OAuth integrations)
|
||||
# Get your API key from https://app.composio.dev
|
||||
COMPOSIO_API_KEY = os.getenv("COMPOSIO_API_KEY")
|
||||
COMPOSIO_ENABLED = os.getenv("COMPOSIO_ENABLED", "FALSE").upper() == "TRUE"
|
||||
COMPOSIO_REDIRECT_URI = os.getenv("COMPOSIO_REDIRECT_URI")
|
||||
|
||||
# LLM instances are now managed per-user through the LLMConfig system
|
||||
# Legacy environment variables removed in favor of user-specific configurations
|
||||
|
||||
|
|
|
|||
388
surfsense_backend/app/connectors/composio_connector.py
Normal file
388
surfsense_backend/app/connectors/composio_connector.py
Normal file
|
|
@ -0,0 +1,388 @@
|
|||
"""
|
||||
Composio Connector Module.
|
||||
|
||||
Provides a unified interface for interacting with various services via Composio,
|
||||
primarily used during indexing operations.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.db import SearchSourceConnector
|
||||
from app.services.composio_service import ComposioService, INDEXABLE_TOOLKITS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ComposioConnector:
|
||||
"""
|
||||
Generic Composio connector for data retrieval.
|
||||
|
||||
Wraps the ComposioService to provide toolkit-specific data access
|
||||
for indexing operations.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
session: AsyncSession,
|
||||
connector_id: int,
|
||||
):
|
||||
"""
|
||||
Initialize the Composio connector.
|
||||
|
||||
Args:
|
||||
session: Database session for updating connector.
|
||||
connector_id: ID of the SearchSourceConnector.
|
||||
"""
|
||||
self._session = session
|
||||
self._connector_id = connector_id
|
||||
self._service: ComposioService | None = None
|
||||
self._connector: SearchSourceConnector | None = None
|
||||
self._config: dict[str, Any] | None = None
|
||||
|
||||
async def _load_connector(self) -> SearchSourceConnector:
|
||||
"""Load connector from database."""
|
||||
if self._connector is None:
|
||||
result = await self._session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.id == self._connector_id
|
||||
)
|
||||
)
|
||||
self._connector = result.scalars().first()
|
||||
if not self._connector:
|
||||
raise ValueError(f"Connector {self._connector_id} not found")
|
||||
self._config = self._connector.config or {}
|
||||
return self._connector
|
||||
|
||||
async def _get_service(self) -> ComposioService:
|
||||
"""Get or create the Composio service instance."""
|
||||
if self._service is None:
|
||||
self._service = ComposioService()
|
||||
return self._service
|
||||
|
||||
async def get_config(self) -> dict[str, Any]:
|
||||
"""Get the connector configuration."""
|
||||
await self._load_connector()
|
||||
return self._config or {}
|
||||
|
||||
async def get_toolkit_id(self) -> str:
|
||||
"""Get the toolkit ID for this connector."""
|
||||
config = await self.get_config()
|
||||
return config.get("toolkit_id", "")
|
||||
|
||||
async def get_connected_account_id(self) -> str | None:
|
||||
"""Get the Composio connected account ID."""
|
||||
config = await self.get_config()
|
||||
return config.get("composio_connected_account_id")
|
||||
|
||||
async def get_entity_id(self) -> str:
|
||||
"""Get the Composio entity ID (user identifier)."""
|
||||
await self._load_connector()
|
||||
# Entity ID is constructed from the connector's user_id
|
||||
return f"surfsense_{self._connector.user_id}"
|
||||
|
||||
async def is_indexable(self) -> bool:
|
||||
"""Check if this connector's toolkit supports indexing."""
|
||||
toolkit_id = await self.get_toolkit_id()
|
||||
return toolkit_id in INDEXABLE_TOOLKITS
|
||||
|
||||
# ===== Google Drive Methods =====
|
||||
|
||||
async def list_drive_files(
|
||||
self,
|
||||
folder_id: str | None = None,
|
||||
page_token: str | None = None,
|
||||
page_size: int = 100,
|
||||
) -> tuple[list[dict[str, Any]], str | None, str | None]:
|
||||
"""
|
||||
List files from Google Drive via Composio.
|
||||
|
||||
Args:
|
||||
folder_id: Optional folder ID to list contents of.
|
||||
page_token: Pagination token.
|
||||
page_size: Number of files per page.
|
||||
|
||||
Returns:
|
||||
Tuple of (files list, next_page_token, error message).
|
||||
"""
|
||||
connected_account_id = await self.get_connected_account_id()
|
||||
if not connected_account_id:
|
||||
return [], None, "No connected account ID found"
|
||||
|
||||
entity_id = await self.get_entity_id()
|
||||
service = await self._get_service()
|
||||
return await service.get_drive_files(
|
||||
connected_account_id=connected_account_id,
|
||||
entity_id=entity_id,
|
||||
folder_id=folder_id,
|
||||
page_token=page_token,
|
||||
page_size=page_size,
|
||||
)
|
||||
|
||||
async def get_drive_file_content(
|
||||
self, file_id: str
|
||||
) -> tuple[bytes | None, str | None]:
|
||||
"""
|
||||
Download file content from Google Drive via Composio.
|
||||
|
||||
Args:
|
||||
file_id: Google Drive file ID.
|
||||
|
||||
Returns:
|
||||
Tuple of (file content bytes, error message).
|
||||
"""
|
||||
connected_account_id = await self.get_connected_account_id()
|
||||
if not connected_account_id:
|
||||
return None, "No connected account ID found"
|
||||
|
||||
entity_id = await self.get_entity_id()
|
||||
service = await self._get_service()
|
||||
return await service.get_drive_file_content(
|
||||
connected_account_id=connected_account_id,
|
||||
entity_id=entity_id,
|
||||
file_id=file_id,
|
||||
)
|
||||
|
||||
# ===== Gmail Methods =====
|
||||
|
||||
async def list_gmail_messages(
|
||||
self,
|
||||
query: str = "",
|
||||
max_results: int = 100,
|
||||
) -> tuple[list[dict[str, Any]], str | None]:
|
||||
"""
|
||||
List Gmail messages via Composio.
|
||||
|
||||
Args:
|
||||
query: Gmail search query.
|
||||
max_results: Maximum number of messages.
|
||||
|
||||
Returns:
|
||||
Tuple of (messages list, error message).
|
||||
"""
|
||||
connected_account_id = await self.get_connected_account_id()
|
||||
if not connected_account_id:
|
||||
return [], "No connected account ID found"
|
||||
|
||||
entity_id = await self.get_entity_id()
|
||||
service = await self._get_service()
|
||||
return await service.get_gmail_messages(
|
||||
connected_account_id=connected_account_id,
|
||||
entity_id=entity_id,
|
||||
query=query,
|
||||
max_results=max_results,
|
||||
)
|
||||
|
||||
async def get_gmail_message_detail(
|
||||
self, message_id: str
|
||||
) -> tuple[dict[str, Any] | None, str | None]:
|
||||
"""
|
||||
Get full details of a Gmail message via Composio.
|
||||
|
||||
Args:
|
||||
message_id: Gmail message ID.
|
||||
|
||||
Returns:
|
||||
Tuple of (message details, error message).
|
||||
"""
|
||||
connected_account_id = await self.get_connected_account_id()
|
||||
if not connected_account_id:
|
||||
return None, "No connected account ID found"
|
||||
|
||||
entity_id = await self.get_entity_id()
|
||||
service = await self._get_service()
|
||||
return await service.get_gmail_message_detail(
|
||||
connected_account_id=connected_account_id,
|
||||
entity_id=entity_id,
|
||||
message_id=message_id,
|
||||
)
|
||||
|
||||
# ===== Google Calendar Methods =====
|
||||
|
||||
async def list_calendar_events(
|
||||
self,
|
||||
time_min: str | None = None,
|
||||
time_max: str | None = None,
|
||||
max_results: int = 250,
|
||||
) -> tuple[list[dict[str, Any]], str | None]:
|
||||
"""
|
||||
List Google Calendar events via Composio.
|
||||
|
||||
Args:
|
||||
time_min: Start time (RFC3339 format).
|
||||
time_max: End time (RFC3339 format).
|
||||
max_results: Maximum number of events.
|
||||
|
||||
Returns:
|
||||
Tuple of (events list, error message).
|
||||
"""
|
||||
connected_account_id = await self.get_connected_account_id()
|
||||
if not connected_account_id:
|
||||
return [], "No connected account ID found"
|
||||
|
||||
entity_id = await self.get_entity_id()
|
||||
service = await self._get_service()
|
||||
return await service.get_calendar_events(
|
||||
connected_account_id=connected_account_id,
|
||||
entity_id=entity_id,
|
||||
time_min=time_min,
|
||||
time_max=time_max,
|
||||
max_results=max_results,
|
||||
)
|
||||
|
||||
# ===== Utility Methods =====
|
||||
|
||||
def format_gmail_message_to_markdown(self, message: dict[str, Any]) -> str:
|
||||
"""
|
||||
Format a Gmail message to markdown.
|
||||
|
||||
Args:
|
||||
message: Message object from Composio's GMAIL_FETCH_EMAILS response.
|
||||
Composio structure: messageId, messageText, messageTimestamp,
|
||||
payload.headers, labelIds, attachmentList
|
||||
|
||||
Returns:
|
||||
Formatted markdown string.
|
||||
"""
|
||||
try:
|
||||
# Composio uses 'messageId' (camelCase)
|
||||
message_id = message.get("messageId", "") or message.get("id", "")
|
||||
label_ids = message.get("labelIds", [])
|
||||
|
||||
# Extract headers from payload
|
||||
payload = message.get("payload", {})
|
||||
headers = payload.get("headers", [])
|
||||
|
||||
# Parse headers into a dict
|
||||
header_dict = {}
|
||||
for header in headers:
|
||||
name = header.get("name", "").lower()
|
||||
value = header.get("value", "")
|
||||
header_dict[name] = value
|
||||
|
||||
# Extract key information
|
||||
subject = header_dict.get("subject", "No Subject")
|
||||
from_email = header_dict.get("from", "Unknown Sender")
|
||||
to_email = header_dict.get("to", "Unknown Recipient")
|
||||
# Composio provides messageTimestamp directly
|
||||
date_str = message.get("messageTimestamp", "") or header_dict.get("date", "Unknown Date")
|
||||
|
||||
# Build markdown content
|
||||
markdown_content = f"# {subject}\n\n"
|
||||
markdown_content += f"**From:** {from_email}\n"
|
||||
markdown_content += f"**To:** {to_email}\n"
|
||||
markdown_content += f"**Date:** {date_str}\n"
|
||||
|
||||
if label_ids:
|
||||
markdown_content += f"**Labels:** {', '.join(label_ids)}\n"
|
||||
|
||||
markdown_content += "\n---\n\n"
|
||||
|
||||
# Composio provides full message text in 'messageText'
|
||||
message_text = message.get("messageText", "")
|
||||
if message_text:
|
||||
markdown_content += f"## Content\n\n{message_text}\n\n"
|
||||
else:
|
||||
# Fallback to snippet if no messageText
|
||||
snippet = message.get("snippet", "")
|
||||
if snippet:
|
||||
markdown_content += f"## Preview\n\n{snippet}\n\n"
|
||||
|
||||
# Add attachment info if present
|
||||
attachments = message.get("attachmentList", [])
|
||||
if attachments:
|
||||
markdown_content += "## Attachments\n\n"
|
||||
for att in attachments:
|
||||
att_name = att.get("filename", att.get("name", "Unknown"))
|
||||
markdown_content += f"- {att_name}\n"
|
||||
markdown_content += "\n"
|
||||
|
||||
# Add message metadata
|
||||
markdown_content += "## Message Details\n\n"
|
||||
markdown_content += f"- **Message ID:** {message_id}\n"
|
||||
|
||||
return markdown_content
|
||||
|
||||
except Exception as e:
|
||||
return f"Error formatting message to markdown: {e!s}"
|
||||
|
||||
def format_calendar_event_to_markdown(self, event: dict[str, Any]) -> str:
|
||||
"""
|
||||
Format a Google Calendar event to markdown.
|
||||
|
||||
Args:
|
||||
event: Event object from Google Calendar API.
|
||||
|
||||
Returns:
|
||||
Formatted markdown string.
|
||||
"""
|
||||
from datetime import datetime
|
||||
|
||||
try:
|
||||
# Extract basic event information
|
||||
summary = event.get("summary", "No Title")
|
||||
description = event.get("description", "")
|
||||
location = event.get("location", "")
|
||||
|
||||
# Extract start and end times
|
||||
start = event.get("start", {})
|
||||
end = event.get("end", {})
|
||||
|
||||
start_time = start.get("dateTime") or start.get("date", "")
|
||||
end_time = end.get("dateTime") or end.get("date", "")
|
||||
|
||||
# Format times for display
|
||||
def format_time(time_str: str) -> str:
|
||||
if not time_str:
|
||||
return "Unknown"
|
||||
try:
|
||||
if "T" in time_str:
|
||||
dt = datetime.fromisoformat(time_str.replace("Z", "+00:00"))
|
||||
return dt.strftime("%Y-%m-%d %H:%M")
|
||||
return time_str
|
||||
except Exception:
|
||||
return time_str
|
||||
|
||||
start_formatted = format_time(start_time)
|
||||
end_formatted = format_time(end_time)
|
||||
|
||||
# Extract attendees
|
||||
attendees = event.get("attendees", [])
|
||||
attendee_list = []
|
||||
for attendee in attendees:
|
||||
email = attendee.get("email", "")
|
||||
display_name = attendee.get("displayName", email)
|
||||
response_status = attendee.get("responseStatus", "")
|
||||
attendee_list.append(f"- {display_name} ({response_status})")
|
||||
|
||||
# Build markdown content
|
||||
markdown_content = f"# {summary}\n\n"
|
||||
markdown_content += f"**Start:** {start_formatted}\n"
|
||||
markdown_content += f"**End:** {end_formatted}\n"
|
||||
|
||||
if location:
|
||||
markdown_content += f"**Location:** {location}\n"
|
||||
|
||||
markdown_content += "\n"
|
||||
|
||||
if description:
|
||||
markdown_content += f"## Description\n\n{description}\n\n"
|
||||
|
||||
if attendee_list:
|
||||
markdown_content += "## Attendees\n\n"
|
||||
markdown_content += "\n".join(attendee_list)
|
||||
markdown_content += "\n\n"
|
||||
|
||||
# Add event metadata
|
||||
markdown_content += "## Event Details\n\n"
|
||||
markdown_content += f"- **Event ID:** {event.get('id', 'Unknown')}\n"
|
||||
markdown_content += f"- **Created:** {event.get('created', 'Unknown')}\n"
|
||||
markdown_content += f"- **Updated:** {event.get('updated', 'Unknown')}\n"
|
||||
|
||||
return markdown_content
|
||||
|
||||
except Exception as e:
|
||||
return f"Error formatting event to markdown: {e!s}"
|
||||
|
|
@ -54,6 +54,7 @@ class DocumentType(str, Enum):
|
|||
BOOKSTACK_CONNECTOR = "BOOKSTACK_CONNECTOR"
|
||||
CIRCLEBACK = "CIRCLEBACK"
|
||||
NOTE = "NOTE"
|
||||
COMPOSIO_CONNECTOR = "COMPOSIO_CONNECTOR" # Generic Composio integration
|
||||
|
||||
|
||||
class SearchSourceConnectorType(str, Enum):
|
||||
|
|
@ -81,6 +82,7 @@ class SearchSourceConnectorType(str, Enum):
|
|||
BOOKSTACK_CONNECTOR = "BOOKSTACK_CONNECTOR"
|
||||
CIRCLEBACK_CONNECTOR = "CIRCLEBACK_CONNECTOR"
|
||||
MCP_CONNECTOR = "MCP_CONNECTOR" # Model Context Protocol - User-defined API tools
|
||||
COMPOSIO_CONNECTOR = "COMPOSIO_CONNECTOR" # Generic Composio integration (Google, Slack, etc.)
|
||||
|
||||
|
||||
class LiteLLMProvider(str, Enum):
|
||||
|
|
@ -472,6 +474,66 @@ class ChatCommentMention(BaseModel, TimestampMixin):
|
|||
mentioned_user = relationship("User")
|
||||
|
||||
|
||||
class MemoryCategory(str, Enum):
|
||||
"""Categories for user memories."""
|
||||
|
||||
# Using lowercase keys to match PostgreSQL enum values
|
||||
preference = "preference" # User preferences (e.g., "prefers dark mode")
|
||||
fact = "fact" # Facts about the user (e.g., "is a Python developer")
|
||||
instruction = (
|
||||
"instruction" # Standing instructions (e.g., "always respond in bullet points")
|
||||
)
|
||||
context = "context" # Contextual information (e.g., "working on project X")
|
||||
|
||||
|
||||
class UserMemory(BaseModel, TimestampMixin):
|
||||
"""
|
||||
Stores facts, preferences, and context about users for personalized AI responses.
|
||||
Similar to Claude's memory feature - enables the AI to remember user information
|
||||
across conversations.
|
||||
"""
|
||||
|
||||
__tablename__ = "user_memories"
|
||||
|
||||
user_id = Column(
|
||||
UUID(as_uuid=True),
|
||||
ForeignKey("user.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
index=True,
|
||||
)
|
||||
# Optional association with a search space (if memory is space-specific)
|
||||
search_space_id = Column(
|
||||
Integer,
|
||||
ForeignKey("searchspaces.id", ondelete="CASCADE"),
|
||||
nullable=True,
|
||||
index=True,
|
||||
)
|
||||
|
||||
# The actual memory content
|
||||
memory_text = Column(Text, nullable=False)
|
||||
# Category for organization and filtering
|
||||
category = Column(
|
||||
SQLAlchemyEnum(MemoryCategory),
|
||||
nullable=False,
|
||||
default=MemoryCategory.fact,
|
||||
)
|
||||
# Vector embedding for semantic search
|
||||
embedding = Column(Vector(config.embedding_model_instance.dimension))
|
||||
|
||||
# Track when memory was last updated
|
||||
updated_at = Column(
|
||||
TIMESTAMP(timezone=True),
|
||||
nullable=False,
|
||||
default=lambda: datetime.now(UTC),
|
||||
onupdate=lambda: datetime.now(UTC),
|
||||
index=True,
|
||||
)
|
||||
|
||||
# Relationships
|
||||
user = relationship("User", back_populates="memories")
|
||||
search_space = relationship("SearchSpace", back_populates="user_memories")
|
||||
|
||||
|
||||
class Document(BaseModel, TimestampMixin):
|
||||
__tablename__ = "documents"
|
||||
|
||||
|
|
@ -659,6 +721,14 @@ class SearchSpace(BaseModel, TimestampMixin):
|
|||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
# User memories associated with this search space
|
||||
user_memories = relationship(
|
||||
"UserMemory",
|
||||
back_populates="search_space",
|
||||
order_by="UserMemory.updated_at.desc()",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
|
||||
class SearchSourceConnector(BaseModel, TimestampMixin):
|
||||
__tablename__ = "search_source_connectors"
|
||||
|
|
@ -967,6 +1037,14 @@ if config.AUTH_TYPE == "GOOGLE":
|
|||
passive_deletes=True,
|
||||
)
|
||||
|
||||
# User memories for personalized AI responses
|
||||
memories = relationship(
|
||||
"UserMemory",
|
||||
back_populates="user",
|
||||
order_by="UserMemory.updated_at.desc()",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
# Page usage tracking for ETL services
|
||||
pages_limit = Column(
|
||||
Integer,
|
||||
|
|
@ -1010,6 +1088,14 @@ else:
|
|||
passive_deletes=True,
|
||||
)
|
||||
|
||||
# User memories for personalized AI responses
|
||||
memories = relationship(
|
||||
"UserMemory",
|
||||
back_populates="user",
|
||||
order_by="UserMemory.updated_at.desc()",
|
||||
cascade="all, delete-orphan",
|
||||
)
|
||||
|
||||
# Page usage tracking for ETL services
|
||||
pages_limit = Column(
|
||||
Integer,
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ from .airtable_add_connector_route import (
|
|||
from .chat_comments_routes import router as chat_comments_router
|
||||
from .circleback_webhook_route import router as circleback_webhook_router
|
||||
from .clickup_add_connector_route import router as clickup_add_connector_router
|
||||
from .composio_routes import router as composio_router
|
||||
from .confluence_add_connector_route import router as confluence_add_connector_router
|
||||
from .discord_add_connector_route import router as discord_add_connector_router
|
||||
from .documents_routes import router as documents_router
|
||||
|
|
@ -65,3 +66,4 @@ router.include_router(logs_router)
|
|||
router.include_router(circleback_webhook_router) # Circleback meeting webhooks
|
||||
router.include_router(surfsense_docs_router) # Surfsense documentation for citations
|
||||
router.include_router(notifications_router) # Notifications with Electric SQL sync
|
||||
router.include_router(composio_router) # Composio OAuth and toolkit management
|
||||
|
|
|
|||
333
surfsense_backend/app/routes/composio_routes.py
Normal file
333
surfsense_backend/app/routes/composio_routes.py
Normal file
|
|
@ -0,0 +1,333 @@
|
|||
"""
|
||||
Composio Connector OAuth Routes.
|
||||
|
||||
Handles OAuth flow for Composio-based integrations (Google Drive, Gmail, Calendar, etc.).
|
||||
This provides a single connector that can connect to any Composio toolkit.
|
||||
|
||||
Endpoints:
|
||||
- GET /composio/toolkits - List available Composio toolkits
|
||||
- GET /auth/composio/connector/add - Initiate OAuth for a specific toolkit
|
||||
- GET /auth/composio/connector/callback - Handle OAuth callback
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from fastapi.responses import RedirectResponse
|
||||
from pydantic import ValidationError
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.config import config
|
||||
from app.db import (
|
||||
SearchSourceConnector,
|
||||
SearchSourceConnectorType,
|
||||
User,
|
||||
get_async_session,
|
||||
)
|
||||
from app.services.composio_service import (
|
||||
COMPOSIO_TOOLKIT_NAMES,
|
||||
INDEXABLE_TOOLKITS,
|
||||
ComposioService,
|
||||
)
|
||||
from app.users import current_active_user
|
||||
from app.utils.connector_naming import (
|
||||
check_duplicate_connector,
|
||||
generate_unique_connector_name,
|
||||
)
|
||||
from app.utils.oauth_security import OAuthStateManager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# Initialize security utilities
|
||||
_state_manager = None
|
||||
|
||||
|
||||
def get_state_manager() -> OAuthStateManager:
|
||||
"""Get or create OAuth state manager instance."""
|
||||
global _state_manager
|
||||
if _state_manager is None:
|
||||
if not config.SECRET_KEY:
|
||||
raise ValueError("SECRET_KEY must be set for OAuth security")
|
||||
_state_manager = OAuthStateManager(config.SECRET_KEY)
|
||||
return _state_manager
|
||||
|
||||
|
||||
@router.get("/composio/toolkits")
|
||||
async def list_composio_toolkits(user: User = Depends(current_active_user)):
|
||||
"""
|
||||
List available Composio toolkits.
|
||||
|
||||
Returns:
|
||||
JSON with list of available toolkits and their metadata.
|
||||
"""
|
||||
if not ComposioService.is_enabled():
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="Composio integration is not enabled. Set COMPOSIO_ENABLED=TRUE and provide COMPOSIO_API_KEY.",
|
||||
)
|
||||
|
||||
try:
|
||||
service = ComposioService()
|
||||
toolkits = service.list_available_toolkits()
|
||||
return {"toolkits": toolkits}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list Composio toolkits: {e!s}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to list toolkits: {e!s}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/auth/composio/connector/add")
|
||||
async def initiate_composio_auth(
|
||||
space_id: int,
|
||||
toolkit_id: str = Query(..., description="Composio toolkit ID (e.g., 'googledrive', 'gmail')"),
|
||||
user: User = Depends(current_active_user),
|
||||
):
|
||||
"""
|
||||
Initiate Composio OAuth flow for a specific toolkit.
|
||||
|
||||
Query params:
|
||||
space_id: Search space ID to add connector to
|
||||
toolkit_id: Composio toolkit ID (e.g., "googledrive", "gmail", "googlecalendar")
|
||||
|
||||
Returns:
|
||||
JSON with auth_url to redirect user to Composio authorization
|
||||
"""
|
||||
if not ComposioService.is_enabled():
|
||||
raise HTTPException(
|
||||
status_code=503,
|
||||
detail="Composio integration is not enabled.",
|
||||
)
|
||||
|
||||
if not space_id:
|
||||
raise HTTPException(status_code=400, detail="space_id is required")
|
||||
|
||||
if toolkit_id not in COMPOSIO_TOOLKIT_NAMES:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Unknown toolkit: {toolkit_id}. Available: {list(COMPOSIO_TOOLKIT_NAMES.keys())}",
|
||||
)
|
||||
|
||||
if not config.SECRET_KEY:
|
||||
raise HTTPException(
|
||||
status_code=500, detail="SECRET_KEY not configured for OAuth security."
|
||||
)
|
||||
|
||||
try:
|
||||
# Generate secure state parameter with HMAC signature
|
||||
state_manager = get_state_manager()
|
||||
state_encoded = state_manager.generate_secure_state(
|
||||
space_id, user.id, toolkit_id=toolkit_id
|
||||
)
|
||||
|
||||
# Build callback URL
|
||||
callback_url = config.COMPOSIO_REDIRECT_URI
|
||||
if not callback_url:
|
||||
# Fallback: construct from BACKEND_URL
|
||||
backend_url = config.BACKEND_URL or "http://localhost:8000"
|
||||
callback_url = f"{backend_url}/api/v1/auth/composio/connector/callback"
|
||||
|
||||
# Initiate Composio OAuth
|
||||
service = ComposioService()
|
||||
# Use user.id as the entity ID in Composio (converted to string for Composio)
|
||||
entity_id = f"surfsense_{user.id}"
|
||||
|
||||
connection_result = await service.initiate_connection(
|
||||
user_id=entity_id,
|
||||
toolkit_id=toolkit_id,
|
||||
redirect_uri=f"{callback_url}?state={state_encoded}",
|
||||
)
|
||||
|
||||
auth_url = connection_result.get("redirect_url")
|
||||
if not auth_url:
|
||||
raise HTTPException(
|
||||
status_code=500, detail="Failed to get authorization URL from Composio"
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Initiating Composio OAuth for user {user.id}, toolkit {toolkit_id}, space {space_id}"
|
||||
)
|
||||
return {"auth_url": auth_url}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initiate Composio OAuth: {e!s}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to initiate Composio OAuth: {e!s}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/auth/composio/connector/callback")
|
||||
async def composio_callback(
|
||||
state: str | None = None,
|
||||
connectedAccountId: str | None = None, # Composio sends camelCase
|
||||
connected_account_id: str | None = None, # Fallback snake_case
|
||||
error: str | None = None,
|
||||
session: AsyncSession = Depends(get_async_session),
|
||||
):
|
||||
"""
|
||||
Handle Composio OAuth callback.
|
||||
|
||||
Query params:
|
||||
state: Encoded state with space_id, user_id, and toolkit_id
|
||||
connected_account_id: Composio connected account ID (may not be present)
|
||||
error: OAuth error (if user denied access or error occurred)
|
||||
|
||||
Returns:
|
||||
Redirect to frontend success page
|
||||
"""
|
||||
try:
|
||||
# Handle OAuth errors
|
||||
if error:
|
||||
logger.warning(f"Composio OAuth error: {error}")
|
||||
space_id = None
|
||||
if state:
|
||||
try:
|
||||
state_manager = get_state_manager()
|
||||
data = state_manager.validate_state(state)
|
||||
space_id = data.get("space_id")
|
||||
except Exception:
|
||||
logger.warning("Failed to validate state in error handler")
|
||||
|
||||
if space_id:
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=composio_oauth_denied"
|
||||
)
|
||||
else:
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard?error=composio_oauth_denied"
|
||||
)
|
||||
|
||||
# Validate required parameters
|
||||
if not state:
|
||||
raise HTTPException(status_code=400, detail="Missing state parameter")
|
||||
|
||||
# Validate and decode state with signature verification
|
||||
state_manager = get_state_manager()
|
||||
try:
|
||||
data = state_manager.validate_state(state)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Invalid state parameter: {e!s}"
|
||||
) from e
|
||||
|
||||
user_id = UUID(data["user_id"])
|
||||
space_id = data["space_id"]
|
||||
toolkit_id = data.get("toolkit_id")
|
||||
|
||||
if not toolkit_id:
|
||||
raise HTTPException(status_code=400, detail="Missing toolkit_id in state")
|
||||
|
||||
toolkit_name = COMPOSIO_TOOLKIT_NAMES.get(toolkit_id, toolkit_id)
|
||||
|
||||
logger.info(
|
||||
f"Processing Composio callback for user {user_id}, toolkit {toolkit_id}, space {space_id}"
|
||||
)
|
||||
|
||||
# Initialize Composio service
|
||||
service = ComposioService()
|
||||
entity_id = f"surfsense_{user_id}"
|
||||
|
||||
# Use camelCase param if provided (Composio's format), fallback to snake_case
|
||||
final_connected_account_id = connectedAccountId or connected_account_id
|
||||
|
||||
# DEBUG: Log all query parameters received
|
||||
logger.info(f"DEBUG: Callback received - connectedAccountId: {connectedAccountId}, connected_account_id: {connected_account_id}, using: {final_connected_account_id}")
|
||||
|
||||
# If we still don't have a connected_account_id, warn but continue
|
||||
# (the connector will be created but indexing won't work until updated)
|
||||
if not final_connected_account_id:
|
||||
logger.warning(
|
||||
f"Could not find connected_account_id for toolkit {toolkit_id}. "
|
||||
"The connector will be created but indexing may not work."
|
||||
)
|
||||
else:
|
||||
logger.info(f"Successfully got connected_account_id: {final_connected_account_id}")
|
||||
|
||||
# Build connector config
|
||||
connector_config = {
|
||||
"composio_connected_account_id": final_connected_account_id,
|
||||
"toolkit_id": toolkit_id,
|
||||
"toolkit_name": toolkit_name,
|
||||
"is_indexable": toolkit_id in INDEXABLE_TOOLKITS,
|
||||
}
|
||||
|
||||
# Check for duplicate connector
|
||||
# For Composio, we use toolkit_id + connected_account_id as unique identifier
|
||||
identifier = final_connected_account_id or f"{toolkit_id}_{user_id}"
|
||||
|
||||
is_duplicate = await check_duplicate_connector(
|
||||
session,
|
||||
SearchSourceConnectorType.COMPOSIO_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
identifier,
|
||||
)
|
||||
if is_duplicate:
|
||||
logger.warning(
|
||||
f"Duplicate Composio connector detected for user {user_id} with toolkit {toolkit_id}"
|
||||
)
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&error=duplicate_account&connector=composio-connector"
|
||||
)
|
||||
|
||||
try:
|
||||
# Generate a unique, user-friendly connector name
|
||||
connector_name = await generate_unique_connector_name(
|
||||
session,
|
||||
SearchSourceConnectorType.COMPOSIO_CONNECTOR,
|
||||
space_id,
|
||||
user_id,
|
||||
f"{toolkit_name} (Composio)",
|
||||
)
|
||||
|
||||
db_connector = SearchSourceConnector(
|
||||
name=connector_name,
|
||||
connector_type=SearchSourceConnectorType.COMPOSIO_CONNECTOR,
|
||||
config=connector_config,
|
||||
search_space_id=space_id,
|
||||
user_id=user_id,
|
||||
is_indexable=toolkit_id in INDEXABLE_TOOLKITS,
|
||||
)
|
||||
|
||||
session.add(db_connector)
|
||||
await session.commit()
|
||||
await session.refresh(db_connector)
|
||||
|
||||
logger.info(
|
||||
f"Successfully created Composio connector {db_connector.id} for user {user_id}, toolkit {toolkit_id}"
|
||||
)
|
||||
|
||||
return RedirectResponse(
|
||||
url=f"{config.NEXT_FRONTEND_URL}/dashboard/{space_id}/new-chat?modal=connectors&tab=all&success=true&connector=composio-connector&connectorId={db_connector.id}"
|
||||
)
|
||||
|
||||
except IntegrityError as e:
|
||||
await session.rollback()
|
||||
logger.error(f"Database integrity error: {e!s}")
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail=f"Database integrity error: {e!s}",
|
||||
) from e
|
||||
except ValidationError as e:
|
||||
await session.rollback()
|
||||
logger.error(f"Validation error: {e!s}")
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Invalid connector configuration: {e!s}"
|
||||
) from e
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in Composio callback: {e!s}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to complete Composio OAuth: {e!s}"
|
||||
) from e
|
||||
|
|
@ -990,6 +990,7 @@ async def handle_new_chat(
|
|||
search_space_id=request.search_space_id,
|
||||
chat_id=request.chat_id,
|
||||
session=session,
|
||||
user_id=str(user.id), # Pass user ID for memory tools
|
||||
llm_config_id=llm_config_id,
|
||||
attachments=request.attachments,
|
||||
mentioned_document_ids=request.mentioned_document_ids,
|
||||
|
|
|
|||
|
|
@ -868,6 +868,19 @@ async def index_connector_content(
|
|||
)
|
||||
response_message = "Web page indexing started in the background."
|
||||
|
||||
elif connector.connector_type == SearchSourceConnectorType.COMPOSIO_CONNECTOR:
|
||||
from app.tasks.celery_tasks.connector_tasks import (
|
||||
index_composio_connector_task,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Triggering Composio connector indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}"
|
||||
)
|
||||
index_composio_connector_task.delay(
|
||||
connector_id, search_space_id, str(user.id), indexing_from, indexing_to
|
||||
)
|
||||
response_message = "Composio connector indexing started in the background."
|
||||
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
|
|
|
|||
607
surfsense_backend/app/services/composio_service.py
Normal file
607
surfsense_backend/app/services/composio_service.py
Normal file
|
|
@ -0,0 +1,607 @@
|
|||
"""
|
||||
Composio Service Module.
|
||||
|
||||
Provides a wrapper around the Composio SDK for managing OAuth connections
|
||||
and executing tools for various integrations (Google Drive, Gmail, Calendar, etc.).
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from composio import Composio
|
||||
|
||||
from app.config import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Mapping of toolkit IDs to their Composio auth config IDs
|
||||
# These use Composio's managed OAuth (no custom credentials needed)
|
||||
COMPOSIO_TOOLKIT_AUTH_CONFIGS = {
|
||||
"googledrive": "default", # Uses Composio's managed Google OAuth
|
||||
"gmail": "default",
|
||||
"googlecalendar": "default",
|
||||
"slack": "default",
|
||||
"notion": "default",
|
||||
"github": "default",
|
||||
}
|
||||
|
||||
# Mapping of toolkit IDs to their display names
|
||||
COMPOSIO_TOOLKIT_NAMES = {
|
||||
"googledrive": "Google Drive",
|
||||
"gmail": "Gmail",
|
||||
"googlecalendar": "Google Calendar",
|
||||
"slack": "Slack",
|
||||
"notion": "Notion",
|
||||
"github": "GitHub",
|
||||
}
|
||||
|
||||
# Toolkits that support indexing (Phase 1: Google services only)
|
||||
INDEXABLE_TOOLKITS = {"googledrive", "gmail", "googlecalendar"}
|
||||
|
||||
|
||||
class ComposioService:
|
||||
"""Service for interacting with Composio API."""
|
||||
|
||||
def __init__(self, api_key: str | None = None):
|
||||
"""
|
||||
Initialize the Composio service.
|
||||
|
||||
Args:
|
||||
api_key: Composio API key. If not provided, uses config.COMPOSIO_API_KEY.
|
||||
"""
|
||||
self.api_key = api_key or config.COMPOSIO_API_KEY
|
||||
if not self.api_key:
|
||||
raise ValueError("COMPOSIO_API_KEY is required but not configured")
|
||||
self.client = Composio(api_key=self.api_key)
|
||||
|
||||
@staticmethod
|
||||
def is_enabled() -> bool:
|
||||
"""Check if Composio integration is enabled."""
|
||||
return config.COMPOSIO_ENABLED and bool(config.COMPOSIO_API_KEY)
|
||||
|
||||
def list_available_toolkits(self) -> list[dict[str, Any]]:
|
||||
"""
|
||||
List all available Composio toolkits for the UI.
|
||||
|
||||
Returns:
|
||||
List of toolkit metadata dictionaries.
|
||||
"""
|
||||
toolkits = []
|
||||
for toolkit_id, display_name in COMPOSIO_TOOLKIT_NAMES.items():
|
||||
toolkits.append(
|
||||
{
|
||||
"id": toolkit_id,
|
||||
"name": display_name,
|
||||
"is_indexable": toolkit_id in INDEXABLE_TOOLKITS,
|
||||
"description": f"Connect to {display_name} via Composio",
|
||||
}
|
||||
)
|
||||
return toolkits
|
||||
|
||||
def _get_auth_config_for_toolkit(self, toolkit_id: str) -> str | None:
|
||||
"""
|
||||
Get the auth_config_id for a specific toolkit.
|
||||
|
||||
Args:
|
||||
toolkit_id: The toolkit ID (e.g., "googledrive", "gmail").
|
||||
|
||||
Returns:
|
||||
The auth_config_id or None if not found.
|
||||
"""
|
||||
try:
|
||||
# List all auth configs and find the one matching our toolkit
|
||||
auth_configs = self.client.auth_configs.list()
|
||||
for auth_config in auth_configs.items:
|
||||
# Get toolkit - it may be an object with a 'slug' or 'name' attribute, or a string
|
||||
config_toolkit = getattr(auth_config, "toolkit", None)
|
||||
if config_toolkit is None:
|
||||
continue
|
||||
|
||||
# Extract toolkit name/slug from the object
|
||||
toolkit_name = None
|
||||
if isinstance(config_toolkit, str):
|
||||
toolkit_name = config_toolkit
|
||||
elif hasattr(config_toolkit, "slug"):
|
||||
toolkit_name = config_toolkit.slug
|
||||
elif hasattr(config_toolkit, "name"):
|
||||
toolkit_name = config_toolkit.name
|
||||
elif hasattr(config_toolkit, "id"):
|
||||
toolkit_name = config_toolkit.id
|
||||
|
||||
# Compare case-insensitively
|
||||
if toolkit_name and toolkit_name.lower() == toolkit_id.lower():
|
||||
logger.info(f"Found auth config {auth_config.id} for toolkit {toolkit_id}")
|
||||
return auth_config.id
|
||||
|
||||
# Log available auth configs for debugging
|
||||
logger.warning(f"No auth config found for toolkit '{toolkit_id}'. Available auth configs:")
|
||||
for auth_config in auth_configs.items:
|
||||
config_toolkit = getattr(auth_config, "toolkit", None)
|
||||
logger.warning(f" - {auth_config.id}: toolkit={config_toolkit}")
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list auth configs: {e!s}")
|
||||
return None
|
||||
|
||||
async def initiate_connection(
|
||||
self,
|
||||
user_id: str,
|
||||
toolkit_id: str,
|
||||
redirect_uri: str,
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Initiate OAuth flow for a Composio toolkit.
|
||||
|
||||
Args:
|
||||
user_id: Unique identifier for the user (used as entity_id in Composio).
|
||||
toolkit_id: The toolkit to connect (e.g., "googledrive", "gmail").
|
||||
redirect_uri: URL to redirect after OAuth completion.
|
||||
|
||||
Returns:
|
||||
Dictionary containing redirect_url and connection_id.
|
||||
"""
|
||||
if toolkit_id not in COMPOSIO_TOOLKIT_NAMES:
|
||||
raise ValueError(f"Unknown toolkit: {toolkit_id}")
|
||||
|
||||
try:
|
||||
# First, get the auth_config_id for this toolkit
|
||||
auth_config_id = self._get_auth_config_for_toolkit(toolkit_id)
|
||||
|
||||
if not auth_config_id:
|
||||
raise ValueError(
|
||||
f"No auth config found for toolkit '{toolkit_id}'. "
|
||||
f"Please create an auth config for {COMPOSIO_TOOLKIT_NAMES.get(toolkit_id, toolkit_id)} "
|
||||
f"in your Composio dashboard at https://app.composio.dev"
|
||||
)
|
||||
|
||||
# Initiate the connection using Composio SDK with auth_config_id
|
||||
# allow_multiple=True allows creating multiple connections per user (e.g., different Google accounts)
|
||||
connection_request = self.client.connected_accounts.initiate(
|
||||
user_id=user_id,
|
||||
auth_config_id=auth_config_id,
|
||||
callback_url=redirect_uri,
|
||||
allow_multiple=True,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Initiated Composio connection for user {user_id}, toolkit {toolkit_id}, auth_config {auth_config_id}"
|
||||
)
|
||||
|
||||
return {
|
||||
"redirect_url": connection_request.redirect_url,
|
||||
"connection_id": getattr(connection_request, "id", None),
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initiate Composio connection: {e!s}")
|
||||
raise
|
||||
|
||||
async def get_connected_account(
|
||||
self, connected_account_id: str
|
||||
) -> dict[str, Any] | None:
|
||||
"""
|
||||
Get details of a connected account.
|
||||
|
||||
Args:
|
||||
connected_account_id: The Composio connected account ID.
|
||||
|
||||
Returns:
|
||||
Connected account details or None if not found.
|
||||
"""
|
||||
try:
|
||||
# Pass connected_account_id as positional argument (not keyword)
|
||||
account = self.client.connected_accounts.get(connected_account_id)
|
||||
return {
|
||||
"id": account.id,
|
||||
"status": getattr(account, "status", None),
|
||||
"toolkit": getattr(account, "toolkit", None),
|
||||
"user_id": getattr(account, "user_id", None),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get connected account {connected_account_id}: {e!s}")
|
||||
return None
|
||||
|
||||
async def list_all_connections(self) -> list[dict[str, Any]]:
|
||||
"""
|
||||
List ALL connected accounts (for debugging).
|
||||
|
||||
Returns:
|
||||
List of all connected account details.
|
||||
"""
|
||||
try:
|
||||
accounts_response = self.client.connected_accounts.list()
|
||||
|
||||
if hasattr(accounts_response, "items"):
|
||||
accounts = accounts_response.items
|
||||
elif hasattr(accounts_response, "__iter__"):
|
||||
accounts = accounts_response
|
||||
else:
|
||||
logger.warning(f"Unexpected accounts response type: {type(accounts_response)}")
|
||||
return []
|
||||
|
||||
result = []
|
||||
for acc in accounts:
|
||||
toolkit_raw = getattr(acc, "toolkit", None)
|
||||
toolkit_info = None
|
||||
if toolkit_raw:
|
||||
if isinstance(toolkit_raw, str):
|
||||
toolkit_info = toolkit_raw
|
||||
elif hasattr(toolkit_raw, "slug"):
|
||||
toolkit_info = toolkit_raw.slug
|
||||
elif hasattr(toolkit_raw, "name"):
|
||||
toolkit_info = toolkit_raw.name
|
||||
else:
|
||||
toolkit_info = str(toolkit_raw)
|
||||
|
||||
result.append({
|
||||
"id": acc.id,
|
||||
"status": getattr(acc, "status", None),
|
||||
"toolkit": toolkit_info,
|
||||
"user_id": getattr(acc, "user_id", None),
|
||||
})
|
||||
|
||||
logger.info(f"DEBUG: Found {len(result)} TOTAL connections in Composio")
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list all connections: {e!s}")
|
||||
return []
|
||||
|
||||
async def list_user_connections(self, user_id: str) -> list[dict[str, Any]]:
|
||||
"""
|
||||
List all connected accounts for a user.
|
||||
|
||||
Args:
|
||||
user_id: The user's unique identifier.
|
||||
|
||||
Returns:
|
||||
List of connected account details.
|
||||
"""
|
||||
try:
|
||||
logger.info(f"DEBUG: Calling connected_accounts.list(user_id='{user_id}')")
|
||||
accounts_response = self.client.connected_accounts.list(user_id=user_id)
|
||||
|
||||
# Handle paginated response (may have .items attribute) or direct list
|
||||
if hasattr(accounts_response, "items"):
|
||||
accounts = accounts_response.items
|
||||
elif hasattr(accounts_response, "__iter__"):
|
||||
accounts = accounts_response
|
||||
else:
|
||||
logger.warning(f"Unexpected accounts response type: {type(accounts_response)}")
|
||||
return []
|
||||
|
||||
result = []
|
||||
for acc in accounts:
|
||||
# Extract toolkit info - might be string or object
|
||||
toolkit_raw = getattr(acc, "toolkit", None)
|
||||
toolkit_info = None
|
||||
if toolkit_raw:
|
||||
if isinstance(toolkit_raw, str):
|
||||
toolkit_info = toolkit_raw
|
||||
elif hasattr(toolkit_raw, "slug"):
|
||||
toolkit_info = toolkit_raw.slug
|
||||
elif hasattr(toolkit_raw, "name"):
|
||||
toolkit_info = toolkit_raw.name
|
||||
else:
|
||||
toolkit_info = toolkit_raw
|
||||
|
||||
result.append({
|
||||
"id": acc.id,
|
||||
"status": getattr(acc, "status", None),
|
||||
"toolkit": toolkit_info,
|
||||
})
|
||||
|
||||
logger.info(f"Found {len(result)} connections for user {user_id}: {result}")
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list connections for user {user_id}: {e!s}")
|
||||
return []
|
||||
|
||||
async def execute_tool(
|
||||
self,
|
||||
connected_account_id: str,
|
||||
tool_name: str,
|
||||
params: dict[str, Any] | None = None,
|
||||
entity_id: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Execute a Composio tool.
|
||||
|
||||
Args:
|
||||
connected_account_id: The connected account to use.
|
||||
tool_name: Name of the tool (e.g., "GOOGLEDRIVE_LIST_FILES").
|
||||
params: Parameters for the tool.
|
||||
entity_id: The entity/user ID that owns the connected account.
|
||||
|
||||
Returns:
|
||||
Tool execution result.
|
||||
"""
|
||||
try:
|
||||
# Based on Composio SDK docs:
|
||||
# - slug: tool name
|
||||
# - arguments: tool parameters
|
||||
# - connected_account_id: for authentication
|
||||
# - user_id: user identifier (SDK uses user_id, not entity_id)
|
||||
# - dangerously_skip_version_check: skip version check for manual execution
|
||||
logger.info(f"DEBUG: Executing tool {tool_name} with params: {params}")
|
||||
result = self.client.tools.execute(
|
||||
slug=tool_name,
|
||||
connected_account_id=connected_account_id,
|
||||
user_id=entity_id, # SDK expects user_id parameter
|
||||
arguments=params or {},
|
||||
dangerously_skip_version_check=True,
|
||||
)
|
||||
logger.info(f"DEBUG: Tool {tool_name} raw result type: {type(result)}")
|
||||
logger.info(f"DEBUG: Tool {tool_name} raw result: {result}")
|
||||
return {"success": True, "data": result}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to execute tool {tool_name}: {e!s}")
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
# ===== Google Drive specific methods =====
|
||||
|
||||
async def get_drive_files(
|
||||
self,
|
||||
connected_account_id: str,
|
||||
entity_id: str,
|
||||
folder_id: str | None = None,
|
||||
page_token: str | None = None,
|
||||
page_size: int = 100,
|
||||
) -> tuple[list[dict[str, Any]], str | None, str | None]:
|
||||
"""
|
||||
List files from Google Drive via Composio.
|
||||
|
||||
Args:
|
||||
connected_account_id: Composio connected account ID.
|
||||
entity_id: The entity/user ID that owns the connected account.
|
||||
folder_id: Optional folder ID to list contents of.
|
||||
page_token: Pagination token.
|
||||
page_size: Number of files per page.
|
||||
|
||||
Returns:
|
||||
Tuple of (files list, next_page_token, error message).
|
||||
"""
|
||||
try:
|
||||
# Composio uses snake_case for parameters
|
||||
params = {
|
||||
"page_size": min(page_size, 100),
|
||||
}
|
||||
if folder_id:
|
||||
params["folder_id"] = folder_id
|
||||
if page_token:
|
||||
params["page_token"] = page_token
|
||||
|
||||
result = await self.execute_tool(
|
||||
connected_account_id=connected_account_id,
|
||||
tool_name="GOOGLEDRIVE_LIST_FILES",
|
||||
params=params,
|
||||
entity_id=entity_id,
|
||||
)
|
||||
|
||||
if not result.get("success"):
|
||||
return [], None, result.get("error", "Unknown error")
|
||||
|
||||
data = result.get("data", {})
|
||||
logger.info(f"DEBUG: Drive data type: {type(data)}, keys: {data.keys() if isinstance(data, dict) else 'N/A'}")
|
||||
|
||||
# Handle nested response structure from Composio
|
||||
files = []
|
||||
next_token = None
|
||||
if isinstance(data, dict):
|
||||
# Try direct access first, then nested
|
||||
files = data.get("files", []) or data.get("data", {}).get("files", [])
|
||||
next_token = data.get("nextPageToken") or data.get("next_page_token") or data.get("data", {}).get("nextPageToken")
|
||||
elif isinstance(data, list):
|
||||
files = data
|
||||
|
||||
logger.info(f"DEBUG: Extracted {len(files)} drive files")
|
||||
return files, next_token, None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list Drive files: {e!s}")
|
||||
return [], None, str(e)
|
||||
|
||||
async def get_drive_file_content(
|
||||
self, connected_account_id: str, entity_id: str, file_id: str
|
||||
) -> tuple[bytes | None, str | None]:
|
||||
"""
|
||||
Download file content from Google Drive via Composio.
|
||||
|
||||
Args:
|
||||
connected_account_id: Composio connected account ID.
|
||||
entity_id: The entity/user ID that owns the connected account.
|
||||
file_id: Google Drive file ID.
|
||||
|
||||
Returns:
|
||||
Tuple of (file content bytes, error message).
|
||||
"""
|
||||
try:
|
||||
result = await self.execute_tool(
|
||||
connected_account_id=connected_account_id,
|
||||
tool_name="GOOGLEDRIVE_DOWNLOAD_FILE",
|
||||
params={"file_id": file_id}, # snake_case
|
||||
entity_id=entity_id,
|
||||
)
|
||||
|
||||
if not result.get("success"):
|
||||
return None, result.get("error", "Unknown error")
|
||||
|
||||
content = result.get("data")
|
||||
if isinstance(content, str):
|
||||
content = content.encode("utf-8")
|
||||
|
||||
return content, None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get Drive file content: {e!s}")
|
||||
return None, str(e)
|
||||
|
||||
# ===== Gmail specific methods =====
|
||||
|
||||
async def get_gmail_messages(
|
||||
self,
|
||||
connected_account_id: str,
|
||||
entity_id: str,
|
||||
query: str = "",
|
||||
max_results: int = 100,
|
||||
) -> tuple[list[dict[str, Any]], str | None]:
|
||||
"""
|
||||
List Gmail messages via Composio.
|
||||
|
||||
Args:
|
||||
connected_account_id: Composio connected account ID.
|
||||
entity_id: The entity/user ID that owns the connected account.
|
||||
query: Gmail search query.
|
||||
max_results: Maximum number of messages to return.
|
||||
|
||||
Returns:
|
||||
Tuple of (messages list, error message).
|
||||
"""
|
||||
try:
|
||||
# Composio uses snake_case for parameters, max is 500
|
||||
params = {"max_results": min(max_results, 500)}
|
||||
if query:
|
||||
params["query"] = query # Composio uses 'query' not 'q'
|
||||
|
||||
result = await self.execute_tool(
|
||||
connected_account_id=connected_account_id,
|
||||
tool_name="GMAIL_FETCH_EMAILS",
|
||||
params=params,
|
||||
entity_id=entity_id,
|
||||
)
|
||||
|
||||
if not result.get("success"):
|
||||
return [], result.get("error", "Unknown error")
|
||||
|
||||
data = result.get("data", {})
|
||||
logger.info(f"DEBUG: Gmail data type: {type(data)}, keys: {data.keys() if isinstance(data, dict) else 'N/A'}")
|
||||
logger.info(f"DEBUG: Gmail full data: {data}")
|
||||
|
||||
# Try different possible response structures
|
||||
messages = []
|
||||
if isinstance(data, dict):
|
||||
messages = data.get("messages", []) or data.get("data", {}).get("messages", []) or data.get("emails", [])
|
||||
elif isinstance(data, list):
|
||||
messages = data
|
||||
|
||||
logger.info(f"DEBUG: Extracted {len(messages)} messages")
|
||||
return messages, None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list Gmail messages: {e!s}")
|
||||
return [], str(e)
|
||||
|
||||
async def get_gmail_message_detail(
|
||||
self, connected_account_id: str, entity_id: str, message_id: str
|
||||
) -> tuple[dict[str, Any] | None, str | None]:
|
||||
"""
|
||||
Get full details of a Gmail message via Composio.
|
||||
|
||||
Args:
|
||||
connected_account_id: Composio connected account ID.
|
||||
entity_id: The entity/user ID that owns the connected account.
|
||||
message_id: Gmail message ID.
|
||||
|
||||
Returns:
|
||||
Tuple of (message details, error message).
|
||||
"""
|
||||
try:
|
||||
result = await self.execute_tool(
|
||||
connected_account_id=connected_account_id,
|
||||
tool_name="GMAIL_GET_MESSAGE_BY_MESSAGE_ID",
|
||||
params={"message_id": message_id}, # snake_case
|
||||
entity_id=entity_id,
|
||||
)
|
||||
|
||||
if not result.get("success"):
|
||||
return None, result.get("error", "Unknown error")
|
||||
|
||||
return result.get("data"), None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get Gmail message detail: {e!s}")
|
||||
return None, str(e)
|
||||
|
||||
# ===== Google Calendar specific methods =====
|
||||
|
||||
async def get_calendar_events(
|
||||
self,
|
||||
connected_account_id: str,
|
||||
entity_id: str,
|
||||
time_min: str | None = None,
|
||||
time_max: str | None = None,
|
||||
max_results: int = 250,
|
||||
) -> tuple[list[dict[str, Any]], str | None]:
|
||||
"""
|
||||
List Google Calendar events via Composio.
|
||||
|
||||
Args:
|
||||
connected_account_id: Composio connected account ID.
|
||||
entity_id: The entity/user ID that owns the connected account.
|
||||
time_min: Start time (RFC3339 format).
|
||||
time_max: End time (RFC3339 format).
|
||||
max_results: Maximum number of events.
|
||||
|
||||
Returns:
|
||||
Tuple of (events list, error message).
|
||||
"""
|
||||
try:
|
||||
# Composio uses snake_case for parameters
|
||||
params = {
|
||||
"max_results": min(max_results, 250),
|
||||
"single_events": True,
|
||||
"order_by": "startTime",
|
||||
}
|
||||
if time_min:
|
||||
params["time_min"] = time_min
|
||||
if time_max:
|
||||
params["time_max"] = time_max
|
||||
|
||||
result = await self.execute_tool(
|
||||
connected_account_id=connected_account_id,
|
||||
tool_name="GOOGLECALENDAR_EVENTS_LIST",
|
||||
params=params,
|
||||
entity_id=entity_id,
|
||||
)
|
||||
|
||||
if not result.get("success"):
|
||||
return [], result.get("error", "Unknown error")
|
||||
|
||||
data = result.get("data", {})
|
||||
logger.info(f"DEBUG: Calendar data type: {type(data)}, keys: {data.keys() if isinstance(data, dict) else 'N/A'}")
|
||||
logger.info(f"DEBUG: Calendar full data: {data}")
|
||||
|
||||
# Try different possible response structures
|
||||
events = []
|
||||
if isinstance(data, dict):
|
||||
events = data.get("items", []) or data.get("data", {}).get("items", []) or data.get("events", [])
|
||||
elif isinstance(data, list):
|
||||
events = data
|
||||
|
||||
logger.info(f"DEBUG: Extracted {len(events)} calendar events")
|
||||
return events, None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list Calendar events: {e!s}")
|
||||
return [], str(e)
|
||||
|
||||
|
||||
# Singleton instance
|
||||
_composio_service: ComposioService | None = None
|
||||
|
||||
|
||||
def get_composio_service() -> ComposioService:
|
||||
"""
|
||||
Get or create the Composio service singleton.
|
||||
|
||||
Returns:
|
||||
ComposioService instance.
|
||||
|
||||
Raises:
|
||||
ValueError: If Composio is not properly configured.
|
||||
"""
|
||||
global _composio_service
|
||||
if _composio_service is None:
|
||||
_composio_service = ComposioService()
|
||||
return _composio_service
|
||||
|
|
@ -759,3 +759,45 @@ async def _index_bookstack_pages(
|
|||
await run_bookstack_indexing(
|
||||
session, connector_id, search_space_id, user_id, start_date, end_date
|
||||
)
|
||||
|
||||
|
||||
@celery_app.task(name="index_composio_connector", bind=True)
|
||||
def index_composio_connector_task(
|
||||
self,
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
):
|
||||
"""Celery task to index Composio connector content (Google Drive, Gmail, Calendar via Composio)."""
|
||||
import asyncio
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
try:
|
||||
loop.run_until_complete(
|
||||
_index_composio_connector(
|
||||
connector_id, search_space_id, user_id, start_date, end_date
|
||||
)
|
||||
)
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
|
||||
async def _index_composio_connector(
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
):
|
||||
"""Index Composio connector content with new session."""
|
||||
# Import from tasks folder (not connector_indexers) to avoid circular import
|
||||
from app.tasks.composio_indexer import index_composio_connector
|
||||
|
||||
async with get_celery_session_maker()() as session:
|
||||
await index_composio_connector(
|
||||
session, connector_id, search_space_id, user_id, start_date, end_date
|
||||
)
|
||||
|
|
|
|||
|
|
@ -149,6 +149,7 @@ async def stream_new_chat(
|
|||
search_space_id: int,
|
||||
chat_id: int,
|
||||
session: AsyncSession,
|
||||
user_id: str | None = None,
|
||||
llm_config_id: int = -1,
|
||||
attachments: list[ChatAttachment] | None = None,
|
||||
mentioned_document_ids: list[int] | None = None,
|
||||
|
|
@ -166,6 +167,7 @@ async def stream_new_chat(
|
|||
search_space_id: The search space ID
|
||||
chat_id: The chat ID (used as LangGraph thread_id for memory)
|
||||
session: The database session
|
||||
user_id: The current user's UUID string (for memory tools)
|
||||
llm_config_id: The LLM configuration ID (default: -1 for first global config)
|
||||
messages: Optional chat history from frontend (list of ChatMessage)
|
||||
attachments: Optional attachments with extracted content
|
||||
|
|
@ -243,6 +245,7 @@ async def stream_new_chat(
|
|||
db_session=session,
|
||||
connector_service=connector_service,
|
||||
checkpointer=checkpointer,
|
||||
user_id=user_id, # Pass user ID for memory tools
|
||||
agent_config=agent_config, # Pass prompt configuration
|
||||
firecrawl_api_key=firecrawl_api_key, # Pass Firecrawl API key if configured
|
||||
)
|
||||
|
|
|
|||
878
surfsense_backend/app/tasks/composio_indexer.py
Normal file
878
surfsense_backend/app/tasks/composio_indexer.py
Normal file
|
|
@ -0,0 +1,878 @@
|
|||
"""
|
||||
Composio connector indexer.
|
||||
|
||||
Routes indexing requests to toolkit-specific handlers (Google Drive, Gmail, Calendar).
|
||||
|
||||
Note: This module is intentionally placed in app/tasks/ (not in connector_indexers/)
|
||||
to avoid circular import issues with the connector_indexers package.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from app.config import config
|
||||
from app.connectors.composio_connector import ComposioConnector
|
||||
from app.db import (
|
||||
Document,
|
||||
DocumentType,
|
||||
SearchSourceConnector,
|
||||
SearchSourceConnectorType,
|
||||
)
|
||||
from app.services.composio_service import INDEXABLE_TOOLKITS
|
||||
from app.services.llm_service import get_user_long_context_llm
|
||||
from app.services.task_logging_service import TaskLoggingService
|
||||
from app.utils.document_converters import (
|
||||
create_document_chunks,
|
||||
generate_content_hash,
|
||||
generate_document_summary,
|
||||
generate_unique_identifier_hash,
|
||||
)
|
||||
|
||||
# Set up logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# ============ Utility functions (copied from connector_indexers.base to avoid circular imports) ============
|
||||
|
||||
|
||||
def get_current_timestamp() -> datetime:
|
||||
"""Get the current timestamp with timezone for updated_at field."""
|
||||
return datetime.now(UTC)
|
||||
|
||||
|
||||
async def check_document_by_unique_identifier(
|
||||
session: AsyncSession, unique_identifier_hash: str
|
||||
) -> Document | None:
|
||||
"""Check if a document with the given unique identifier hash already exists."""
|
||||
existing_doc_result = await session.execute(
|
||||
select(Document)
|
||||
.options(selectinload(Document.chunks))
|
||||
.where(Document.unique_identifier_hash == unique_identifier_hash)
|
||||
)
|
||||
return existing_doc_result.scalars().first()
|
||||
|
||||
|
||||
async def get_connector_by_id(
|
||||
session: AsyncSession, connector_id: int, connector_type: SearchSourceConnectorType
|
||||
) -> SearchSourceConnector | None:
|
||||
"""Get a connector by ID and type from the database."""
|
||||
result = await session.execute(
|
||||
select(SearchSourceConnector).filter(
|
||||
SearchSourceConnector.id == connector_id,
|
||||
SearchSourceConnector.connector_type == connector_type,
|
||||
)
|
||||
)
|
||||
return result.scalars().first()
|
||||
|
||||
|
||||
async def update_connector_last_indexed(
|
||||
session: AsyncSession,
|
||||
connector: SearchSourceConnector,
|
||||
update_last_indexed: bool = True,
|
||||
) -> None:
|
||||
"""Update the last_indexed_at timestamp for a connector."""
|
||||
if update_last_indexed:
|
||||
connector.last_indexed_at = datetime.now()
|
||||
logger.info(f"Updated last_indexed_at to {connector.last_indexed_at}")
|
||||
|
||||
|
||||
# ============ Main indexer function ============
|
||||
|
||||
|
||||
async def index_composio_connector(
|
||||
session: AsyncSession,
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str | None = None,
|
||||
end_date: str | None = None,
|
||||
update_last_indexed: bool = True,
|
||||
max_items: int = 1000,
|
||||
) -> tuple[int, str]:
|
||||
"""
|
||||
Index content from a Composio connector.
|
||||
|
||||
Routes to toolkit-specific indexing based on the connector's toolkit_id.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
connector_id: ID of the Composio connector
|
||||
search_space_id: ID of the search space
|
||||
user_id: ID of the user
|
||||
start_date: Start date for filtering (YYYY-MM-DD format)
|
||||
end_date: End date for filtering (YYYY-MM-DD format)
|
||||
update_last_indexed: Whether to update the last_indexed_at timestamp
|
||||
max_items: Maximum number of items to fetch
|
||||
|
||||
Returns:
|
||||
Tuple of (number_of_indexed_items, error_message or None)
|
||||
"""
|
||||
task_logger = TaskLoggingService(session, search_space_id)
|
||||
|
||||
# Log task start
|
||||
log_entry = await task_logger.log_task_start(
|
||||
task_name="composio_connector_indexing",
|
||||
source="connector_indexing_task",
|
||||
message=f"Starting Composio connector indexing for connector {connector_id}",
|
||||
metadata={
|
||||
"connector_id": connector_id,
|
||||
"user_id": str(user_id),
|
||||
"max_items": max_items,
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
},
|
||||
)
|
||||
|
||||
try:
|
||||
# Get connector by id
|
||||
connector = await get_connector_by_id(
|
||||
session, connector_id, SearchSourceConnectorType.COMPOSIO_CONNECTOR
|
||||
)
|
||||
|
||||
if not connector:
|
||||
error_msg = f"Composio connector with ID {connector_id} not found"
|
||||
await task_logger.log_task_failure(
|
||||
log_entry, error_msg, {"error_type": "ConnectorNotFound"}
|
||||
)
|
||||
return 0, error_msg
|
||||
|
||||
# Get toolkit ID from config
|
||||
toolkit_id = connector.config.get("toolkit_id")
|
||||
if not toolkit_id:
|
||||
error_msg = f"Composio connector {connector_id} has no toolkit_id configured"
|
||||
await task_logger.log_task_failure(
|
||||
log_entry, error_msg, {"error_type": "MissingToolkitId"}
|
||||
)
|
||||
return 0, error_msg
|
||||
|
||||
# Check if toolkit is indexable
|
||||
if toolkit_id not in INDEXABLE_TOOLKITS:
|
||||
error_msg = f"Toolkit '{toolkit_id}' does not support indexing yet"
|
||||
await task_logger.log_task_failure(
|
||||
log_entry, error_msg, {"error_type": "ToolkitNotIndexable"}
|
||||
)
|
||||
return 0, error_msg
|
||||
|
||||
# Route to toolkit-specific indexer
|
||||
if toolkit_id == "googledrive":
|
||||
return await _index_composio_google_drive(
|
||||
session=session,
|
||||
connector=connector,
|
||||
connector_id=connector_id,
|
||||
search_space_id=search_space_id,
|
||||
user_id=user_id,
|
||||
task_logger=task_logger,
|
||||
log_entry=log_entry,
|
||||
update_last_indexed=update_last_indexed,
|
||||
max_items=max_items,
|
||||
)
|
||||
elif toolkit_id == "gmail":
|
||||
return await _index_composio_gmail(
|
||||
session=session,
|
||||
connector=connector,
|
||||
connector_id=connector_id,
|
||||
search_space_id=search_space_id,
|
||||
user_id=user_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
task_logger=task_logger,
|
||||
log_entry=log_entry,
|
||||
update_last_indexed=update_last_indexed,
|
||||
max_items=max_items,
|
||||
)
|
||||
elif toolkit_id == "googlecalendar":
|
||||
return await _index_composio_google_calendar(
|
||||
session=session,
|
||||
connector=connector,
|
||||
connector_id=connector_id,
|
||||
search_space_id=search_space_id,
|
||||
user_id=user_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
task_logger=task_logger,
|
||||
log_entry=log_entry,
|
||||
update_last_indexed=update_last_indexed,
|
||||
max_items=max_items,
|
||||
)
|
||||
else:
|
||||
error_msg = f"No indexer implemented for toolkit: {toolkit_id}"
|
||||
await task_logger.log_task_failure(
|
||||
log_entry, error_msg, {"error_type": "NoIndexerImplemented"}
|
||||
)
|
||||
return 0, error_msg
|
||||
|
||||
except SQLAlchemyError as db_error:
|
||||
await session.rollback()
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Database error during Composio indexing for connector {connector_id}",
|
||||
str(db_error),
|
||||
{"error_type": "SQLAlchemyError"},
|
||||
)
|
||||
logger.error(f"Database error: {db_error!s}", exc_info=True)
|
||||
return 0, f"Database error: {db_error!s}"
|
||||
except Exception as e:
|
||||
await session.rollback()
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Failed to index Composio connector {connector_id}",
|
||||
str(e),
|
||||
{"error_type": type(e).__name__},
|
||||
)
|
||||
logger.error(f"Failed to index Composio connector: {e!s}", exc_info=True)
|
||||
return 0, f"Failed to index Composio connector: {e!s}"
|
||||
|
||||
|
||||
async def _index_composio_google_drive(
|
||||
session: AsyncSession,
|
||||
connector,
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
task_logger: TaskLoggingService,
|
||||
log_entry,
|
||||
update_last_indexed: bool = True,
|
||||
max_items: int = 1000,
|
||||
) -> tuple[int, str]:
|
||||
"""Index Google Drive files via Composio."""
|
||||
try:
|
||||
composio_connector = ComposioConnector(session, connector_id)
|
||||
|
||||
await task_logger.log_task_progress(
|
||||
log_entry,
|
||||
f"Fetching Google Drive files via Composio for connector {connector_id}",
|
||||
{"stage": "fetching_files"},
|
||||
)
|
||||
|
||||
# Fetch files
|
||||
all_files = []
|
||||
page_token = None
|
||||
|
||||
while len(all_files) < max_items:
|
||||
files, next_token, error = await composio_connector.list_drive_files(
|
||||
page_token=page_token,
|
||||
page_size=min(100, max_items - len(all_files)),
|
||||
)
|
||||
|
||||
if error:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry, f"Failed to fetch Drive files: {error}", {}
|
||||
)
|
||||
return 0, f"Failed to fetch Drive files: {error}"
|
||||
|
||||
all_files.extend(files)
|
||||
|
||||
if not next_token:
|
||||
break
|
||||
page_token = next_token
|
||||
|
||||
if not all_files:
|
||||
success_msg = "No Google Drive files found"
|
||||
await task_logger.log_task_success(
|
||||
log_entry, success_msg, {"files_count": 0}
|
||||
)
|
||||
return 0, success_msg
|
||||
|
||||
logger.info(f"Found {len(all_files)} Google Drive files to index via Composio")
|
||||
|
||||
documents_indexed = 0
|
||||
documents_skipped = 0
|
||||
|
||||
for file_info in all_files:
|
||||
try:
|
||||
# Handle both standard Google API and potential Composio variations
|
||||
file_id = file_info.get("id", "") or file_info.get("fileId", "")
|
||||
file_name = file_info.get("name", "") or file_info.get("fileName", "") or "Untitled"
|
||||
mime_type = file_info.get("mimeType", "") or file_info.get("mime_type", "")
|
||||
|
||||
if not file_id:
|
||||
documents_skipped += 1
|
||||
continue
|
||||
|
||||
# Skip folders
|
||||
if mime_type == "application/vnd.google-apps.folder":
|
||||
continue
|
||||
|
||||
# Generate unique identifier hash
|
||||
unique_identifier_hash = generate_unique_identifier_hash(
|
||||
DocumentType.COMPOSIO_CONNECTOR, f"drive_{file_id}", search_space_id
|
||||
)
|
||||
|
||||
# Check if document exists
|
||||
existing_document = await check_document_by_unique_identifier(
|
||||
session, unique_identifier_hash
|
||||
)
|
||||
|
||||
# Get file content
|
||||
content, content_error = await composio_connector.get_drive_file_content(
|
||||
file_id
|
||||
)
|
||||
|
||||
if content_error or not content:
|
||||
logger.warning(f"Could not get content for file {file_name}: {content_error}")
|
||||
# Use metadata as content fallback
|
||||
markdown_content = f"# {file_name}\n\n"
|
||||
markdown_content += f"**File ID:** {file_id}\n"
|
||||
markdown_content += f"**Type:** {mime_type}\n"
|
||||
else:
|
||||
try:
|
||||
markdown_content = content.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
markdown_content = f"# {file_name}\n\n[Binary file content]\n"
|
||||
|
||||
content_hash = generate_content_hash(markdown_content, search_space_id)
|
||||
|
||||
if existing_document:
|
||||
if existing_document.content_hash == content_hash:
|
||||
documents_skipped += 1
|
||||
continue
|
||||
|
||||
# Update existing document
|
||||
user_llm = await get_user_long_context_llm(
|
||||
session, user_id, search_space_id
|
||||
)
|
||||
|
||||
if user_llm:
|
||||
document_metadata = {
|
||||
"file_id": file_id,
|
||||
"file_name": file_name,
|
||||
"mime_type": mime_type,
|
||||
"document_type": "Google Drive File (Composio)",
|
||||
}
|
||||
summary_content, summary_embedding = await generate_document_summary(
|
||||
markdown_content, user_llm, document_metadata
|
||||
)
|
||||
else:
|
||||
summary_content = f"Google Drive File: {file_name}\n\nType: {mime_type}"
|
||||
summary_embedding = config.embedding_model_instance.embed(summary_content)
|
||||
|
||||
chunks = await create_document_chunks(markdown_content)
|
||||
|
||||
existing_document.title = f"Drive: {file_name}"
|
||||
existing_document.content = summary_content
|
||||
existing_document.content_hash = content_hash
|
||||
existing_document.embedding = summary_embedding
|
||||
existing_document.document_metadata = {
|
||||
"file_id": file_id,
|
||||
"file_name": file_name,
|
||||
"mime_type": mime_type,
|
||||
"connector_id": connector_id,
|
||||
"source": "composio",
|
||||
}
|
||||
existing_document.chunks = chunks
|
||||
existing_document.updated_at = get_current_timestamp()
|
||||
|
||||
documents_indexed += 1
|
||||
continue
|
||||
|
||||
# Create new document
|
||||
user_llm = await get_user_long_context_llm(
|
||||
session, user_id, search_space_id
|
||||
)
|
||||
|
||||
if user_llm:
|
||||
document_metadata = {
|
||||
"file_id": file_id,
|
||||
"file_name": file_name,
|
||||
"mime_type": mime_type,
|
||||
"document_type": "Google Drive File (Composio)",
|
||||
}
|
||||
summary_content, summary_embedding = await generate_document_summary(
|
||||
markdown_content, user_llm, document_metadata
|
||||
)
|
||||
else:
|
||||
summary_content = f"Google Drive File: {file_name}\n\nType: {mime_type}"
|
||||
summary_embedding = config.embedding_model_instance.embed(summary_content)
|
||||
|
||||
chunks = await create_document_chunks(markdown_content)
|
||||
|
||||
document = Document(
|
||||
search_space_id=search_space_id,
|
||||
title=f"Drive: {file_name}",
|
||||
document_type=DocumentType.COMPOSIO_CONNECTOR,
|
||||
document_metadata={
|
||||
"file_id": file_id,
|
||||
"file_name": file_name,
|
||||
"mime_type": mime_type,
|
||||
"connector_id": connector_id,
|
||||
"toolkit_id": "googledrive",
|
||||
"source": "composio",
|
||||
},
|
||||
content=summary_content,
|
||||
content_hash=content_hash,
|
||||
unique_identifier_hash=unique_identifier_hash,
|
||||
embedding=summary_embedding,
|
||||
chunks=chunks,
|
||||
updated_at=get_current_timestamp(),
|
||||
)
|
||||
session.add(document)
|
||||
documents_indexed += 1
|
||||
|
||||
if documents_indexed % 10 == 0:
|
||||
await session.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing Drive file: {e!s}", exc_info=True)
|
||||
documents_skipped += 1
|
||||
continue
|
||||
|
||||
if documents_indexed > 0:
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
await session.commit()
|
||||
|
||||
await task_logger.log_task_success(
|
||||
log_entry,
|
||||
f"Successfully completed Google Drive indexing via Composio for connector {connector_id}",
|
||||
{
|
||||
"documents_indexed": documents_indexed,
|
||||
"documents_skipped": documents_skipped,
|
||||
},
|
||||
)
|
||||
|
||||
return documents_indexed, None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to index Google Drive via Composio: {e!s}", exc_info=True)
|
||||
return 0, f"Failed to index Google Drive via Composio: {e!s}"
|
||||
|
||||
|
||||
async def _index_composio_gmail(
|
||||
session: AsyncSession,
|
||||
connector,
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str | None,
|
||||
end_date: str | None,
|
||||
task_logger: TaskLoggingService,
|
||||
log_entry,
|
||||
update_last_indexed: bool = True,
|
||||
max_items: int = 1000,
|
||||
) -> tuple[int, str]:
|
||||
"""Index Gmail messages via Composio."""
|
||||
try:
|
||||
composio_connector = ComposioConnector(session, connector_id)
|
||||
|
||||
await task_logger.log_task_progress(
|
||||
log_entry,
|
||||
f"Fetching Gmail messages via Composio for connector {connector_id}",
|
||||
{"stage": "fetching_messages"},
|
||||
)
|
||||
|
||||
# Build query with date range
|
||||
query_parts = []
|
||||
if start_date:
|
||||
query_parts.append(f"after:{start_date.replace('-', '/')}")
|
||||
if end_date:
|
||||
query_parts.append(f"before:{end_date.replace('-', '/')}")
|
||||
query = " ".join(query_parts)
|
||||
|
||||
messages, error = await composio_connector.list_gmail_messages(
|
||||
query=query,
|
||||
max_results=max_items,
|
||||
)
|
||||
|
||||
if error:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry, f"Failed to fetch Gmail messages: {error}", {}
|
||||
)
|
||||
return 0, f"Failed to fetch Gmail messages: {error}"
|
||||
|
||||
if not messages:
|
||||
success_msg = "No Gmail messages found in the specified date range"
|
||||
await task_logger.log_task_success(
|
||||
log_entry, success_msg, {"messages_count": 0}
|
||||
)
|
||||
return 0, success_msg
|
||||
|
||||
logger.info(f"Found {len(messages)} Gmail messages to index via Composio")
|
||||
|
||||
documents_indexed = 0
|
||||
documents_skipped = 0
|
||||
|
||||
for message in messages:
|
||||
try:
|
||||
# Composio uses 'messageId' (camelCase), not 'id'
|
||||
message_id = message.get("messageId", "") or message.get("id", "")
|
||||
if not message_id:
|
||||
documents_skipped += 1
|
||||
continue
|
||||
|
||||
# Composio's GMAIL_FETCH_EMAILS already returns full message content
|
||||
# No need for a separate detail API call
|
||||
|
||||
# Extract message info from Composio response
|
||||
# Composio structure: messageId, messageText, messageTimestamp, payload.headers, labelIds
|
||||
payload = message.get("payload", {})
|
||||
headers = payload.get("headers", [])
|
||||
|
||||
subject = "No Subject"
|
||||
sender = "Unknown Sender"
|
||||
date_str = message.get("messageTimestamp", "Unknown Date")
|
||||
|
||||
for header in headers:
|
||||
name = header.get("name", "").lower()
|
||||
value = header.get("value", "")
|
||||
if name == "subject":
|
||||
subject = value
|
||||
elif name == "from":
|
||||
sender = value
|
||||
elif name == "date":
|
||||
date_str = value
|
||||
|
||||
# Format to markdown using the full message data
|
||||
markdown_content = composio_connector.format_gmail_message_to_markdown(message)
|
||||
|
||||
# Generate unique identifier
|
||||
unique_identifier_hash = generate_unique_identifier_hash(
|
||||
DocumentType.COMPOSIO_CONNECTOR, f"gmail_{message_id}", search_space_id
|
||||
)
|
||||
|
||||
content_hash = generate_content_hash(markdown_content, search_space_id)
|
||||
|
||||
existing_document = await check_document_by_unique_identifier(
|
||||
session, unique_identifier_hash
|
||||
)
|
||||
|
||||
# Get label IDs from Composio response
|
||||
label_ids = message.get("labelIds", [])
|
||||
|
||||
if existing_document:
|
||||
if existing_document.content_hash == content_hash:
|
||||
documents_skipped += 1
|
||||
continue
|
||||
|
||||
# Update existing
|
||||
user_llm = await get_user_long_context_llm(
|
||||
session, user_id, search_space_id
|
||||
)
|
||||
|
||||
if user_llm:
|
||||
document_metadata = {
|
||||
"message_id": message_id,
|
||||
"subject": subject,
|
||||
"sender": sender,
|
||||
"document_type": "Gmail Message (Composio)",
|
||||
}
|
||||
summary_content, summary_embedding = await generate_document_summary(
|
||||
markdown_content, user_llm, document_metadata
|
||||
)
|
||||
else:
|
||||
summary_content = f"Gmail: {subject}\n\nFrom: {sender}\nDate: {date_str}"
|
||||
summary_embedding = config.embedding_model_instance.embed(summary_content)
|
||||
|
||||
chunks = await create_document_chunks(markdown_content)
|
||||
|
||||
existing_document.title = f"Gmail: {subject}"
|
||||
existing_document.content = summary_content
|
||||
existing_document.content_hash = content_hash
|
||||
existing_document.embedding = summary_embedding
|
||||
existing_document.document_metadata = {
|
||||
"message_id": message_id,
|
||||
"subject": subject,
|
||||
"sender": sender,
|
||||
"date": date_str,
|
||||
"labels": label_ids,
|
||||
"connector_id": connector_id,
|
||||
"source": "composio",
|
||||
}
|
||||
existing_document.chunks = chunks
|
||||
existing_document.updated_at = get_current_timestamp()
|
||||
|
||||
documents_indexed += 1
|
||||
continue
|
||||
|
||||
# Create new document
|
||||
user_llm = await get_user_long_context_llm(
|
||||
session, user_id, search_space_id
|
||||
)
|
||||
|
||||
if user_llm:
|
||||
document_metadata = {
|
||||
"message_id": message_id,
|
||||
"subject": subject,
|
||||
"sender": sender,
|
||||
"document_type": "Gmail Message (Composio)",
|
||||
}
|
||||
summary_content, summary_embedding = await generate_document_summary(
|
||||
markdown_content, user_llm, document_metadata
|
||||
)
|
||||
else:
|
||||
summary_content = f"Gmail: {subject}\n\nFrom: {sender}\nDate: {date_str}"
|
||||
summary_embedding = config.embedding_model_instance.embed(summary_content)
|
||||
|
||||
chunks = await create_document_chunks(markdown_content)
|
||||
|
||||
document = Document(
|
||||
search_space_id=search_space_id,
|
||||
title=f"Gmail: {subject}",
|
||||
document_type=DocumentType.COMPOSIO_CONNECTOR,
|
||||
document_metadata={
|
||||
"message_id": message_id,
|
||||
"subject": subject,
|
||||
"sender": sender,
|
||||
"date": date_str,
|
||||
"labels": label_ids,
|
||||
"connector_id": connector_id,
|
||||
"toolkit_id": "gmail",
|
||||
"source": "composio",
|
||||
},
|
||||
content=summary_content,
|
||||
content_hash=content_hash,
|
||||
unique_identifier_hash=unique_identifier_hash,
|
||||
embedding=summary_embedding,
|
||||
chunks=chunks,
|
||||
updated_at=get_current_timestamp(),
|
||||
)
|
||||
session.add(document)
|
||||
documents_indexed += 1
|
||||
|
||||
if documents_indexed % 10 == 0:
|
||||
await session.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing Gmail message: {e!s}", exc_info=True)
|
||||
documents_skipped += 1
|
||||
continue
|
||||
|
||||
if documents_indexed > 0:
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
await session.commit()
|
||||
|
||||
await task_logger.log_task_success(
|
||||
log_entry,
|
||||
f"Successfully completed Gmail indexing via Composio for connector {connector_id}",
|
||||
{
|
||||
"documents_indexed": documents_indexed,
|
||||
"documents_skipped": documents_skipped,
|
||||
},
|
||||
)
|
||||
|
||||
return documents_indexed, None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to index Gmail via Composio: {e!s}", exc_info=True)
|
||||
return 0, f"Failed to index Gmail via Composio: {e!s}"
|
||||
|
||||
|
||||
async def _index_composio_google_calendar(
|
||||
session: AsyncSession,
|
||||
connector,
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str | None,
|
||||
end_date: str | None,
|
||||
task_logger: TaskLoggingService,
|
||||
log_entry,
|
||||
update_last_indexed: bool = True,
|
||||
max_items: int = 2500,
|
||||
) -> tuple[int, str]:
|
||||
"""Index Google Calendar events via Composio."""
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
try:
|
||||
composio_connector = ComposioConnector(session, connector_id)
|
||||
|
||||
await task_logger.log_task_progress(
|
||||
log_entry,
|
||||
f"Fetching Google Calendar events via Composio for connector {connector_id}",
|
||||
{"stage": "fetching_events"},
|
||||
)
|
||||
|
||||
# Build time range
|
||||
if start_date:
|
||||
time_min = f"{start_date}T00:00:00Z"
|
||||
else:
|
||||
# Default to 365 days ago
|
||||
default_start = datetime.now() - timedelta(days=365)
|
||||
time_min = default_start.strftime("%Y-%m-%dT00:00:00Z")
|
||||
|
||||
if end_date:
|
||||
time_max = f"{end_date}T23:59:59Z"
|
||||
else:
|
||||
time_max = datetime.now().strftime("%Y-%m-%dT23:59:59Z")
|
||||
|
||||
events, error = await composio_connector.list_calendar_events(
|
||||
time_min=time_min,
|
||||
time_max=time_max,
|
||||
max_results=max_items,
|
||||
)
|
||||
|
||||
if error:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry, f"Failed to fetch Calendar events: {error}", {}
|
||||
)
|
||||
return 0, f"Failed to fetch Calendar events: {error}"
|
||||
|
||||
if not events:
|
||||
success_msg = "No Google Calendar events found in the specified date range"
|
||||
await task_logger.log_task_success(
|
||||
log_entry, success_msg, {"events_count": 0}
|
||||
)
|
||||
return 0, success_msg
|
||||
|
||||
logger.info(f"Found {len(events)} Google Calendar events to index via Composio")
|
||||
|
||||
documents_indexed = 0
|
||||
documents_skipped = 0
|
||||
|
||||
for event in events:
|
||||
try:
|
||||
# Handle both standard Google API and potential Composio variations
|
||||
event_id = event.get("id", "") or event.get("eventId", "")
|
||||
summary = event.get("summary", "") or event.get("title", "") or "No Title"
|
||||
|
||||
if not event_id:
|
||||
documents_skipped += 1
|
||||
continue
|
||||
|
||||
# Format to markdown
|
||||
markdown_content = composio_connector.format_calendar_event_to_markdown(event)
|
||||
|
||||
# Generate unique identifier
|
||||
unique_identifier_hash = generate_unique_identifier_hash(
|
||||
DocumentType.COMPOSIO_CONNECTOR, f"calendar_{event_id}", search_space_id
|
||||
)
|
||||
|
||||
content_hash = generate_content_hash(markdown_content, search_space_id)
|
||||
|
||||
existing_document = await check_document_by_unique_identifier(
|
||||
session, unique_identifier_hash
|
||||
)
|
||||
|
||||
# Extract event times
|
||||
start = event.get("start", {})
|
||||
end = event.get("end", {})
|
||||
start_time = start.get("dateTime") or start.get("date", "")
|
||||
end_time = end.get("dateTime") or end.get("date", "")
|
||||
location = event.get("location", "")
|
||||
|
||||
if existing_document:
|
||||
if existing_document.content_hash == content_hash:
|
||||
documents_skipped += 1
|
||||
continue
|
||||
|
||||
# Update existing
|
||||
user_llm = await get_user_long_context_llm(
|
||||
session, user_id, search_space_id
|
||||
)
|
||||
|
||||
if user_llm:
|
||||
document_metadata = {
|
||||
"event_id": event_id,
|
||||
"summary": summary,
|
||||
"start_time": start_time,
|
||||
"document_type": "Google Calendar Event (Composio)",
|
||||
}
|
||||
summary_content, summary_embedding = await generate_document_summary(
|
||||
markdown_content, user_llm, document_metadata
|
||||
)
|
||||
else:
|
||||
summary_content = f"Calendar: {summary}\n\nStart: {start_time}\nEnd: {end_time}"
|
||||
if location:
|
||||
summary_content += f"\nLocation: {location}"
|
||||
summary_embedding = config.embedding_model_instance.embed(summary_content)
|
||||
|
||||
chunks = await create_document_chunks(markdown_content)
|
||||
|
||||
existing_document.title = f"Calendar: {summary}"
|
||||
existing_document.content = summary_content
|
||||
existing_document.content_hash = content_hash
|
||||
existing_document.embedding = summary_embedding
|
||||
existing_document.document_metadata = {
|
||||
"event_id": event_id,
|
||||
"summary": summary,
|
||||
"start_time": start_time,
|
||||
"end_time": end_time,
|
||||
"location": location,
|
||||
"connector_id": connector_id,
|
||||
"source": "composio",
|
||||
}
|
||||
existing_document.chunks = chunks
|
||||
existing_document.updated_at = get_current_timestamp()
|
||||
|
||||
documents_indexed += 1
|
||||
continue
|
||||
|
||||
# Create new document
|
||||
user_llm = await get_user_long_context_llm(
|
||||
session, user_id, search_space_id
|
||||
)
|
||||
|
||||
if user_llm:
|
||||
document_metadata = {
|
||||
"event_id": event_id,
|
||||
"summary": summary,
|
||||
"start_time": start_time,
|
||||
"document_type": "Google Calendar Event (Composio)",
|
||||
}
|
||||
summary_content, summary_embedding = await generate_document_summary(
|
||||
markdown_content, user_llm, document_metadata
|
||||
)
|
||||
else:
|
||||
summary_content = f"Calendar: {summary}\n\nStart: {start_time}\nEnd: {end_time}"
|
||||
if location:
|
||||
summary_content += f"\nLocation: {location}"
|
||||
summary_embedding = config.embedding_model_instance.embed(summary_content)
|
||||
|
||||
chunks = await create_document_chunks(markdown_content)
|
||||
|
||||
document = Document(
|
||||
search_space_id=search_space_id,
|
||||
title=f"Calendar: {summary}",
|
||||
document_type=DocumentType.COMPOSIO_CONNECTOR,
|
||||
document_metadata={
|
||||
"event_id": event_id,
|
||||
"summary": summary,
|
||||
"start_time": start_time,
|
||||
"end_time": end_time,
|
||||
"location": location,
|
||||
"connector_id": connector_id,
|
||||
"toolkit_id": "googlecalendar",
|
||||
"source": "composio",
|
||||
},
|
||||
content=summary_content,
|
||||
content_hash=content_hash,
|
||||
unique_identifier_hash=unique_identifier_hash,
|
||||
embedding=summary_embedding,
|
||||
chunks=chunks,
|
||||
updated_at=get_current_timestamp(),
|
||||
)
|
||||
session.add(document)
|
||||
documents_indexed += 1
|
||||
|
||||
if documents_indexed % 10 == 0:
|
||||
await session.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing Calendar event: {e!s}", exc_info=True)
|
||||
documents_skipped += 1
|
||||
continue
|
||||
|
||||
if documents_indexed > 0:
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
await session.commit()
|
||||
|
||||
await task_logger.log_task_success(
|
||||
log_entry,
|
||||
f"Successfully completed Google Calendar indexing via Composio for connector {connector_id}",
|
||||
{
|
||||
"documents_indexed": documents_indexed,
|
||||
"documents_skipped": documents_skipped,
|
||||
},
|
||||
)
|
||||
|
||||
return documents_indexed, None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to index Google Calendar via Composio: {e!s}", exc_info=True)
|
||||
return 0, f"Failed to index Google Calendar via Composio: {e!s}"
|
||||
|
|
@ -26,6 +26,7 @@ Available indexers:
|
|||
# Calendar and scheduling
|
||||
from .airtable_indexer import index_airtable_records
|
||||
from .bookstack_indexer import index_bookstack_pages
|
||||
# Note: composio_indexer is imported directly in connector_tasks.py to avoid circular imports
|
||||
from .clickup_indexer import index_clickup_tasks
|
||||
from .confluence_indexer import index_confluence_pages
|
||||
from .discord_indexer import index_discord_messages
|
||||
|
|
@ -50,6 +51,7 @@ from .webcrawler_indexer import index_crawled_urls
|
|||
__all__ = [ # noqa: RUF022
|
||||
"index_airtable_records",
|
||||
"index_bookstack_pages",
|
||||
# "index_composio_connector", # Imported directly in connector_tasks.py to avoid circular imports
|
||||
"index_clickup_tasks",
|
||||
"index_confluence_pages",
|
||||
"index_discord_messages",
|
||||
|
|
|
|||
|
|
@ -61,6 +61,7 @@ dependencies = [
|
|||
"starlette>=0.40.0,<0.51.0",
|
||||
"sse-starlette>=3.1.1,<3.1.2",
|
||||
"gitingest>=0.3.1",
|
||||
"composio>=0.10.9",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
|
|
|
|||
6080
surfsense_backend/uv.lock
generated
6080
surfsense_backend/uv.lock
generated
File diff suppressed because it is too large
Load diff
|
|
@ -32,6 +32,7 @@ import { DisplayImageToolUI } from "@/components/tool-ui/display-image";
|
|||
import { GeneratePodcastToolUI } from "@/components/tool-ui/generate-podcast";
|
||||
import { LinkPreviewToolUI } from "@/components/tool-ui/link-preview";
|
||||
import { ScrapeWebpageToolUI } from "@/components/tool-ui/scrape-webpage";
|
||||
import { SaveMemoryToolUI, RecallMemoryToolUI } from "@/components/tool-ui/user-memory";
|
||||
// import { WriteTodosToolUI } from "@/components/tool-ui/write-todos";
|
||||
import { getBearerToken } from "@/lib/auth-utils";
|
||||
import { createAttachmentAdapter, extractAttachmentContent } from "@/lib/chat/attachment-adapter";
|
||||
|
|
@ -1056,6 +1057,8 @@ export default function NewChatPage() {
|
|||
<LinkPreviewToolUI />
|
||||
<DisplayImageToolUI />
|
||||
<ScrapeWebpageToolUI />
|
||||
<SaveMemoryToolUI />
|
||||
<RecallMemoryToolUI />
|
||||
{/* <WriteTodosToolUI /> Disabled for now */}
|
||||
<div className="flex flex-col h-[calc(100vh-64px)] overflow-hidden">
|
||||
<Thread
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ import { useConnectorDialog } from "./connector-popup/hooks/use-connector-dialog
|
|||
import { useIndexingConnectors } from "./connector-popup/hooks/use-indexing-connectors";
|
||||
import { ActiveConnectorsTab } from "./connector-popup/tabs/active-connectors-tab";
|
||||
import { AllConnectorsTab } from "./connector-popup/tabs/all-connectors-tab";
|
||||
import { ComposioToolkitView } from "./connector-popup/views/composio-toolkit-view";
|
||||
import { ConnectorAccountsListView } from "./connector-popup/views/connector-accounts-list-view";
|
||||
import { YouTubeCrawlerView } from "./connector-popup/views/youtube-crawler-view";
|
||||
|
||||
|
|
@ -87,6 +88,12 @@ export const ConnectorIndicator: FC = () => {
|
|||
setConnectorConfig,
|
||||
setIndexingConnectorConfig,
|
||||
setConnectorName,
|
||||
// Composio
|
||||
viewingComposio,
|
||||
connectingComposioToolkit,
|
||||
handleOpenComposio,
|
||||
handleBackFromComposio,
|
||||
handleConnectComposioToolkit,
|
||||
} = useConnectorDialog();
|
||||
|
||||
// Fetch connectors using Electric SQL + PGlite for real-time updates
|
||||
|
|
@ -176,6 +183,20 @@ export const ConnectorIndicator: FC = () => {
|
|||
{/* YouTube Crawler View - shown when adding YouTube videos */}
|
||||
{isYouTubeView && searchSpaceId ? (
|
||||
<YouTubeCrawlerView searchSpaceId={searchSpaceId} onBack={handleBackFromYouTube} />
|
||||
) : viewingComposio && searchSpaceId ? (
|
||||
<ComposioToolkitView
|
||||
searchSpaceId={searchSpaceId}
|
||||
connectedToolkits={
|
||||
(connectors || [])
|
||||
.filter((c: SearchSourceConnector) => c.connector_type === "COMPOSIO_CONNECTOR")
|
||||
.map((c: SearchSourceConnector) => c.config?.toolkit_id as string)
|
||||
.filter(Boolean)
|
||||
}
|
||||
onBack={handleBackFromComposio}
|
||||
onConnectToolkit={handleConnectComposioToolkit}
|
||||
isConnecting={connectingComposioToolkit !== null}
|
||||
connectingToolkitId={connectingComposioToolkit}
|
||||
/>
|
||||
) : viewingMCPList ? (
|
||||
<ConnectorAccountsListView
|
||||
connectorType="MCP_CONNECTOR"
|
||||
|
|
@ -312,6 +333,7 @@ export const ConnectorIndicator: FC = () => {
|
|||
onCreateYouTubeCrawler={handleCreateYouTubeCrawler}
|
||||
onManage={handleStartEdit}
|
||||
onViewAccountsList={handleViewAccountsList}
|
||||
onOpenComposio={handleOpenComposio}
|
||||
/>
|
||||
</TabsContent>
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,78 @@
|
|||
"use client";
|
||||
|
||||
import { Zap } from "lucide-react";
|
||||
import Image from "next/image";
|
||||
import type { FC } from "react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
interface ComposioConnectorCardProps {
|
||||
id: string;
|
||||
title: string;
|
||||
description: string;
|
||||
connectorCount?: number;
|
||||
onConnect: () => void;
|
||||
}
|
||||
|
||||
export const ComposioConnectorCard: FC<ComposioConnectorCardProps> = ({
|
||||
id,
|
||||
title,
|
||||
description,
|
||||
connectorCount = 0,
|
||||
onConnect,
|
||||
}) => {
|
||||
const hasConnections = connectorCount > 0;
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
"group relative flex items-center gap-4 p-4 rounded-xl text-left transition-all duration-200 w-full border",
|
||||
"border-violet-500/20 bg-gradient-to-br from-violet-500/5 to-purple-500/5",
|
||||
"hover:border-violet-500/40 hover:from-violet-500/10 hover:to-purple-500/10"
|
||||
)}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
"flex h-12 w-12 items-center justify-center rounded-lg transition-colors shrink-0 border",
|
||||
"bg-gradient-to-br from-violet-500/10 to-purple-500/10 border-violet-500/20"
|
||||
)}
|
||||
>
|
||||
<Image
|
||||
src="/connectors/composio.svg"
|
||||
alt="Composio"
|
||||
width={24}
|
||||
height={24}
|
||||
className="size-6"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex items-center gap-1.5">
|
||||
<span className="text-[14px] font-semibold leading-tight truncate">{title}</span>
|
||||
<Zap className="size-3.5 text-violet-500" />
|
||||
</div>
|
||||
{hasConnections ? (
|
||||
<p className="text-[10px] text-muted-foreground mt-1 flex items-center gap-1.5">
|
||||
<span>
|
||||
{connectorCount} {connectorCount === 1 ? "connection" : "connections"}
|
||||
</span>
|
||||
</p>
|
||||
) : (
|
||||
<p className="text-[10px] text-muted-foreground mt-1">{description}</p>
|
||||
)}
|
||||
</div>
|
||||
<Button
|
||||
size="sm"
|
||||
variant={hasConnections ? "secondary" : "default"}
|
||||
className={cn(
|
||||
"h-8 text-[11px] px-3 rounded-lg shrink-0 font-medium shadow-xs",
|
||||
!hasConnections && "bg-violet-600 hover:bg-violet-700 text-white",
|
||||
hasConnections &&
|
||||
"bg-white text-slate-700 hover:bg-slate-50 border-0 dark:bg-secondary dark:text-secondary-foreground dark:hover:bg-secondary/80"
|
||||
)}
|
||||
onClick={onConnect}
|
||||
>
|
||||
{hasConnections ? "Manage" : "Browse"}
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
|
@ -0,0 +1,160 @@
|
|||
"use client";
|
||||
|
||||
import { ExternalLink, Info, Zap } from "lucide-react";
|
||||
import Image from "next/image";
|
||||
import type { FC } from "react";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import type { SearchSourceConnector } from "@/contracts/types/connector.types";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
interface ComposioConfigProps {
|
||||
connector: SearchSourceConnector;
|
||||
onConfigChange?: (config: Record<string, unknown>) => void;
|
||||
onNameChange?: (name: string) => void;
|
||||
}
|
||||
|
||||
// Get toolkit display info
|
||||
const getToolkitInfo = (toolkitId: string): { name: string; icon: string; description: string } => {
|
||||
switch (toolkitId) {
|
||||
case "googledrive":
|
||||
return {
|
||||
name: "Google Drive",
|
||||
icon: "/connectors/google-drive.svg",
|
||||
description: "Files and documents from Google Drive",
|
||||
};
|
||||
case "gmail":
|
||||
return {
|
||||
name: "Gmail",
|
||||
icon: "/connectors/google-gmail.svg",
|
||||
description: "Emails from Gmail",
|
||||
};
|
||||
case "googlecalendar":
|
||||
return {
|
||||
name: "Google Calendar",
|
||||
icon: "/connectors/google-calendar.svg",
|
||||
description: "Events from Google Calendar",
|
||||
};
|
||||
case "slack":
|
||||
return {
|
||||
name: "Slack",
|
||||
icon: "/connectors/slack.svg",
|
||||
description: "Messages from Slack",
|
||||
};
|
||||
case "notion":
|
||||
return {
|
||||
name: "Notion",
|
||||
icon: "/connectors/notion.svg",
|
||||
description: "Pages from Notion",
|
||||
};
|
||||
case "github":
|
||||
return {
|
||||
name: "GitHub",
|
||||
icon: "/connectors/github.svg",
|
||||
description: "Repositories from GitHub",
|
||||
};
|
||||
default:
|
||||
return {
|
||||
name: toolkitId,
|
||||
icon: "/connectors/composio.svg",
|
||||
description: "Connected via Composio",
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
export const ComposioConfig: FC<ComposioConfigProps> = ({ connector }) => {
|
||||
const toolkitId = connector.config?.toolkit_id as string;
|
||||
const toolkitName = connector.config?.toolkit_name as string;
|
||||
const isIndexable = connector.config?.is_indexable as boolean;
|
||||
const composioAccountId = connector.config?.composio_connected_account_id as string;
|
||||
|
||||
const toolkitInfo = getToolkitInfo(toolkitId);
|
||||
|
||||
return (
|
||||
<div className="space-y-6">
|
||||
{/* Toolkit Info Card */}
|
||||
<div className="rounded-xl border border-violet-500/20 bg-gradient-to-br from-violet-500/5 to-purple-500/5 p-4">
|
||||
<div className="flex items-start gap-4">
|
||||
<div className="flex h-12 w-12 items-center justify-center rounded-lg bg-gradient-to-br from-violet-500/10 to-purple-500/10 border border-violet-500/20 shrink-0">
|
||||
<Image
|
||||
src={toolkitInfo.icon}
|
||||
alt={toolkitInfo.name}
|
||||
width={24}
|
||||
height={24}
|
||||
className="size-6"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex items-center gap-2 mb-1">
|
||||
<h3 className="text-sm font-semibold">{toolkitName || toolkitInfo.name}</h3>
|
||||
<Badge
|
||||
variant="secondary"
|
||||
className="text-[10px] px-1.5 py-0 h-5 bg-violet-500/10 text-violet-600 dark:text-violet-400 border-violet-500/20"
|
||||
>
|
||||
<Zap className="size-3 mr-0.5" />
|
||||
Composio
|
||||
</Badge>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground">{toolkitInfo.description}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Connection Details */}
|
||||
<div className="space-y-3">
|
||||
<h4 className="text-xs font-medium text-muted-foreground uppercase tracking-wider">
|
||||
Connection Details
|
||||
</h4>
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center justify-between py-2 px-3 rounded-lg bg-muted/50">
|
||||
<span className="text-xs text-muted-foreground">Toolkit</span>
|
||||
<span className="text-xs font-medium">{toolkitId}</span>
|
||||
</div>
|
||||
<div className="flex items-center justify-between py-2 px-3 rounded-lg bg-muted/50">
|
||||
<span className="text-xs text-muted-foreground">Indexing Supported</span>
|
||||
<Badge
|
||||
variant={isIndexable ? "default" : "secondary"}
|
||||
className={cn(
|
||||
"text-[10px] px-1.5 py-0 h-5",
|
||||
isIndexable
|
||||
? "bg-emerald-500/10 text-emerald-600 dark:text-emerald-400 border-emerald-500/20"
|
||||
: "bg-amber-500/10 text-amber-600 dark:text-amber-400 border-amber-500/20"
|
||||
)}
|
||||
>
|
||||
{isIndexable ? "Yes" : "Coming Soon"}
|
||||
</Badge>
|
||||
</div>
|
||||
{composioAccountId && (
|
||||
<div className="flex items-center justify-between py-2 px-3 rounded-lg bg-muted/50">
|
||||
<span className="text-xs text-muted-foreground">Account ID</span>
|
||||
<span className="text-xs font-mono text-muted-foreground truncate max-w-[150px]">
|
||||
{composioAccountId}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Info Banner */}
|
||||
<div className="rounded-lg border border-border/50 bg-muted/30 p-3">
|
||||
<div className="flex items-start gap-2.5">
|
||||
<Info className="size-4 text-muted-foreground shrink-0 mt-0.5" />
|
||||
<div className="space-y-1">
|
||||
<p className="text-xs text-muted-foreground leading-relaxed">
|
||||
This connection uses Composio's managed OAuth, which means you don't need to
|
||||
wait for app verification. Your data is securely accessed through Composio.
|
||||
</p>
|
||||
<a
|
||||
href="https://composio.dev"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="inline-flex items-center gap-1 text-xs text-violet-600 dark:text-violet-400 hover:underline"
|
||||
>
|
||||
Learn more about Composio
|
||||
<ExternalLink className="size-3" />
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
|
@ -5,6 +5,7 @@ import type { SearchSourceConnector } from "@/contracts/types/connector.types";
|
|||
import { BaiduSearchApiConfig } from "./components/baidu-search-api-config";
|
||||
import { BookStackConfig } from "./components/bookstack-config";
|
||||
import { CirclebackConfig } from "./components/circleback-config";
|
||||
import { ComposioConfig } from "./components/composio-config";
|
||||
import { ClickUpConfig } from "./components/clickup-config";
|
||||
import { ConfluenceConfig } from "./components/confluence-config";
|
||||
import { DiscordConfig } from "./components/discord-config";
|
||||
|
|
@ -73,6 +74,8 @@ export function getConnectorConfigComponent(
|
|||
return CirclebackConfig;
|
||||
case "MCP_CONNECTOR":
|
||||
return MCPConfig;
|
||||
case "COMPOSIO_CONNECTOR":
|
||||
return ComposioConfig;
|
||||
// OAuth connectors (Gmail, Calendar, Airtable, Notion) and others don't need special config UI
|
||||
default:
|
||||
return null;
|
||||
|
|
|
|||
|
|
@ -168,5 +168,56 @@ export const OTHER_CONNECTORS = [
|
|||
},
|
||||
] as const;
|
||||
|
||||
// Composio Connector (Single entry that opens toolkit selector)
|
||||
export const COMPOSIO_CONNECTORS = [
|
||||
{
|
||||
id: "composio-connector",
|
||||
title: "Composio",
|
||||
description: "Connect 100+ apps via Composio (Google, Slack, Notion, etc.)",
|
||||
connectorType: EnumConnectorName.COMPOSIO_CONNECTOR,
|
||||
// No authEndpoint - handled via toolkit selector view
|
||||
},
|
||||
] as const;
|
||||
|
||||
// Composio Toolkits (available integrations via Composio)
|
||||
export const COMPOSIO_TOOLKITS = [
|
||||
{
|
||||
id: "googledrive",
|
||||
name: "Google Drive",
|
||||
description: "Search your Drive files",
|
||||
isIndexable: true,
|
||||
},
|
||||
{
|
||||
id: "gmail",
|
||||
name: "Gmail",
|
||||
description: "Search through your emails",
|
||||
isIndexable: true,
|
||||
},
|
||||
{
|
||||
id: "googlecalendar",
|
||||
name: "Google Calendar",
|
||||
description: "Search through your events",
|
||||
isIndexable: true,
|
||||
},
|
||||
{
|
||||
id: "slack",
|
||||
name: "Slack",
|
||||
description: "Search Slack messages",
|
||||
isIndexable: false,
|
||||
},
|
||||
{
|
||||
id: "notion",
|
||||
name: "Notion",
|
||||
description: "Search Notion pages",
|
||||
isIndexable: false,
|
||||
},
|
||||
{
|
||||
id: "github",
|
||||
name: "GitHub",
|
||||
description: "Search repositories",
|
||||
isIndexable: false,
|
||||
},
|
||||
] as const;
|
||||
|
||||
// Re-export IndexingConfigState from schemas for backward compatibility
|
||||
export type { IndexingConfigState } from "./connector-popup.schemas";
|
||||
|
|
|
|||
|
|
@ -83,6 +83,10 @@ export const useConnectorDialog = () => {
|
|||
// MCP list view state (for managing multiple MCP connectors)
|
||||
const [viewingMCPList, setViewingMCPList] = useState(false);
|
||||
|
||||
// Composio toolkit view state
|
||||
const [viewingComposio, setViewingComposio] = useState(false);
|
||||
const [connectingComposioToolkit, setConnectingComposioToolkit] = useState<string | null>(null);
|
||||
|
||||
// Track if we came from accounts list when entering edit mode
|
||||
const [cameFromAccountsList, setCameFromAccountsList] = useState<{
|
||||
connectorType: string;
|
||||
|
|
@ -155,6 +159,17 @@ export const useConnectorDialog = () => {
|
|||
setViewingMCPList(true);
|
||||
}
|
||||
|
||||
// Clear Composio view if view is not "composio" anymore
|
||||
if (params.view !== "composio" && viewingComposio) {
|
||||
setViewingComposio(false);
|
||||
setConnectingComposioToolkit(null);
|
||||
}
|
||||
|
||||
// Handle Composio view
|
||||
if (params.view === "composio" && !viewingComposio) {
|
||||
setViewingComposio(true);
|
||||
}
|
||||
|
||||
// Handle connect view
|
||||
if (params.view === "connect" && params.connectorType && !connectingConnectorType) {
|
||||
setConnectingConnectorType(params.connectorType);
|
||||
|
|
@ -846,6 +861,63 @@ export const useConnectorDialog = () => {
|
|||
router.replace(url.pathname + url.search, { scroll: false });
|
||||
}, [router]);
|
||||
|
||||
// Handle opening Composio toolkit view
|
||||
const handleOpenComposio = useCallback(() => {
|
||||
if (!searchSpaceId) return;
|
||||
|
||||
setViewingComposio(true);
|
||||
|
||||
// Update URL to show Composio view
|
||||
const url = new URL(window.location.href);
|
||||
url.searchParams.set("modal", "connectors");
|
||||
url.searchParams.set("view", "composio");
|
||||
window.history.pushState({ modal: true }, "", url.toString());
|
||||
}, [searchSpaceId]);
|
||||
|
||||
// Handle going back from Composio view
|
||||
const handleBackFromComposio = useCallback(() => {
|
||||
setViewingComposio(false);
|
||||
setConnectingComposioToolkit(null);
|
||||
const url = new URL(window.location.href);
|
||||
url.searchParams.set("modal", "connectors");
|
||||
url.searchParams.delete("view");
|
||||
router.replace(url.pathname + url.search, { scroll: false });
|
||||
}, [router]);
|
||||
|
||||
// Handle connecting a Composio toolkit
|
||||
const handleConnectComposioToolkit = useCallback(
|
||||
async (toolkitId: string) => {
|
||||
if (!searchSpaceId) return;
|
||||
|
||||
setConnectingComposioToolkit(toolkitId);
|
||||
|
||||
try {
|
||||
const response = await authenticatedFetch(
|
||||
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/auth/composio/connector/add?space_id=${searchSpaceId}&toolkit_id=${toolkitId}`,
|
||||
{ method: "GET" }
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to initiate Composio OAuth for ${toolkitId}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (data.auth_url) {
|
||||
// Redirect to Composio OAuth
|
||||
window.location.href = data.auth_url;
|
||||
} else {
|
||||
throw new Error("No authorization URL received from Composio");
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error connecting Composio toolkit:", error);
|
||||
toast.error(`Failed to connect ${toolkitId}. Please try again.`);
|
||||
setConnectingComposioToolkit(null);
|
||||
}
|
||||
},
|
||||
[searchSpaceId]
|
||||
);
|
||||
|
||||
// Handle starting indexing
|
||||
const handleStartIndexing = useCallback(
|
||||
async (refreshConnectors: () => void) => {
|
||||
|
|
@ -1506,6 +1578,7 @@ export const useConnectorDialog = () => {
|
|||
allConnectors,
|
||||
viewingAccountsType,
|
||||
viewingMCPList,
|
||||
viewingComposio,
|
||||
|
||||
// Setters
|
||||
setSearchQuery,
|
||||
|
|
@ -1541,5 +1614,12 @@ export const useConnectorDialog = () => {
|
|||
connectorConfig,
|
||||
setConnectorConfig,
|
||||
setIndexingConnectorConfig,
|
||||
|
||||
// Composio
|
||||
viewingComposio,
|
||||
connectingComposioToolkit,
|
||||
handleOpenComposio,
|
||||
handleBackFromComposio,
|
||||
handleConnectComposioToolkit,
|
||||
};
|
||||
};
|
||||
|
|
|
|||
|
|
@ -4,7 +4,8 @@ import type { FC } from "react";
|
|||
import { EnumConnectorName } from "@/contracts/enums/connector";
|
||||
import type { SearchSourceConnector } from "@/contracts/types/connector.types";
|
||||
import { ConnectorCard } from "../components/connector-card";
|
||||
import { CRAWLERS, OAUTH_CONNECTORS, OTHER_CONNECTORS } from "../constants/connector-constants";
|
||||
import { ComposioConnectorCard } from "../components/composio-connector-card";
|
||||
import { CRAWLERS, OAUTH_CONNECTORS, OTHER_CONNECTORS, COMPOSIO_CONNECTORS } from "../constants/connector-constants";
|
||||
import { getDocumentCountForConnector } from "../utils/connector-document-mapping";
|
||||
|
||||
/**
|
||||
|
|
@ -34,6 +35,7 @@ interface AllConnectorsTabProps {
|
|||
onCreateYouTubeCrawler?: () => void;
|
||||
onManage?: (connector: SearchSourceConnector) => void;
|
||||
onViewAccountsList?: (connectorType: string, connectorTitle: string) => void;
|
||||
onOpenComposio?: () => void;
|
||||
}
|
||||
|
||||
export const AllConnectorsTab: FC<AllConnectorsTabProps> = ({
|
||||
|
|
@ -49,6 +51,7 @@ export const AllConnectorsTab: FC<AllConnectorsTabProps> = ({
|
|||
onCreateYouTubeCrawler,
|
||||
onManage,
|
||||
onViewAccountsList,
|
||||
onOpenComposio,
|
||||
}) => {
|
||||
// Filter connectors based on search
|
||||
const filteredOAuth = OAUTH_CONNECTORS.filter(
|
||||
|
|
@ -69,6 +72,20 @@ export const AllConnectorsTab: FC<AllConnectorsTabProps> = ({
|
|||
c.description.toLowerCase().includes(searchQuery.toLowerCase())
|
||||
);
|
||||
|
||||
// Filter Composio connectors
|
||||
const filteredComposio = COMPOSIO_CONNECTORS.filter(
|
||||
(c) =>
|
||||
c.title.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
||||
c.description.toLowerCase().includes(searchQuery.toLowerCase())
|
||||
);
|
||||
|
||||
// Count Composio connectors
|
||||
const composioConnectorCount = allConnectors
|
||||
? allConnectors.filter(
|
||||
(c: SearchSourceConnector) => c.connector_type === EnumConnectorName.COMPOSIO_CONNECTOR
|
||||
).length
|
||||
: 0;
|
||||
|
||||
return (
|
||||
<div className="space-y-8">
|
||||
{/* Quick Connect */}
|
||||
|
|
@ -137,6 +154,30 @@ export const AllConnectorsTab: FC<AllConnectorsTabProps> = ({
|
|||
</section>
|
||||
)}
|
||||
|
||||
{/* Composio Integrations */}
|
||||
{filteredComposio.length > 0 && onOpenComposio && (
|
||||
<section>
|
||||
<div className="flex items-center gap-2 mb-4">
|
||||
<h3 className="text-sm font-semibold text-muted-foreground">Managed OAuth</h3>
|
||||
<span className="text-[10px] px-1.5 py-0.5 rounded-full bg-violet-500/10 text-violet-600 dark:text-violet-400 border border-violet-500/20 font-medium">
|
||||
No verification needed
|
||||
</span>
|
||||
</div>
|
||||
<div className="grid grid-cols-1 sm:grid-cols-2 gap-3">
|
||||
{filteredComposio.map((connector) => (
|
||||
<ComposioConnectorCard
|
||||
key={connector.id}
|
||||
id={connector.id}
|
||||
title={connector.title}
|
||||
description={connector.description}
|
||||
connectorCount={composioConnectorCount}
|
||||
onConnect={onOpenComposio}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{/* More Integrations */}
|
||||
{filteredOther.length > 0 && (
|
||||
<section>
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ export const CONNECTOR_TO_DOCUMENT_TYPE: Record<string, string> = {
|
|||
// Special mappings (connector type differs from document type)
|
||||
GOOGLE_DRIVE_CONNECTOR: "GOOGLE_DRIVE_FILE",
|
||||
WEBCRAWLER_CONNECTOR: "CRAWLED_URL",
|
||||
COMPOSIO_CONNECTOR: "COMPOSIO_CONNECTOR",
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -0,0 +1,301 @@
|
|||
"use client";
|
||||
|
||||
import {
|
||||
ArrowLeft,
|
||||
Calendar,
|
||||
Check,
|
||||
ExternalLink,
|
||||
Github,
|
||||
Loader2,
|
||||
Mail,
|
||||
HardDrive,
|
||||
MessageSquare,
|
||||
FileText,
|
||||
Zap,
|
||||
} from "lucide-react";
|
||||
import Image from "next/image";
|
||||
import type { FC } from "react";
|
||||
import { useState } from "react";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
interface ComposioToolkit {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
isIndexable: boolean;
|
||||
}
|
||||
|
||||
interface ComposioToolkitViewProps {
|
||||
searchSpaceId: string;
|
||||
connectedToolkits: string[];
|
||||
onBack: () => void;
|
||||
onConnectToolkit: (toolkitId: string) => void;
|
||||
isConnecting: boolean;
|
||||
connectingToolkitId: string | null;
|
||||
}
|
||||
|
||||
// Available Composio toolkits
|
||||
const COMPOSIO_TOOLKITS: ComposioToolkit[] = [
|
||||
{
|
||||
id: "googledrive",
|
||||
name: "Google Drive",
|
||||
description: "Search your Drive files and documents",
|
||||
isIndexable: true,
|
||||
},
|
||||
{
|
||||
id: "gmail",
|
||||
name: "Gmail",
|
||||
description: "Search through your emails",
|
||||
isIndexable: true,
|
||||
},
|
||||
{
|
||||
id: "googlecalendar",
|
||||
name: "Google Calendar",
|
||||
description: "Search through your events",
|
||||
isIndexable: true,
|
||||
},
|
||||
{
|
||||
id: "slack",
|
||||
name: "Slack",
|
||||
description: "Search Slack messages",
|
||||
isIndexable: false,
|
||||
},
|
||||
{
|
||||
id: "notion",
|
||||
name: "Notion",
|
||||
description: "Search Notion pages",
|
||||
isIndexable: false,
|
||||
},
|
||||
{
|
||||
id: "github",
|
||||
name: "GitHub",
|
||||
description: "Search repositories and code",
|
||||
isIndexable: false,
|
||||
},
|
||||
];
|
||||
|
||||
// Get icon for toolkit
|
||||
const getToolkitIcon = (toolkitId: string, className?: string) => {
|
||||
const iconClass = className || "size-5";
|
||||
|
||||
switch (toolkitId) {
|
||||
case "googledrive":
|
||||
return <Image src="/connectors/google-drive.svg" alt="Google Drive" width={20} height={20} className={iconClass} />;
|
||||
case "gmail":
|
||||
return <Image src="/connectors/google-gmail.svg" alt="Gmail" width={20} height={20} className={iconClass} />;
|
||||
case "googlecalendar":
|
||||
return <Image src="/connectors/google-calendar.svg" alt="Google Calendar" width={20} height={20} className={iconClass} />;
|
||||
case "slack":
|
||||
return <Image src="/connectors/slack.svg" alt="Slack" width={20} height={20} className={iconClass} />;
|
||||
case "notion":
|
||||
return <Image src="/connectors/notion.svg" alt="Notion" width={20} height={20} className={iconClass} />;
|
||||
case "github":
|
||||
return <Image src="/connectors/github.svg" alt="GitHub" width={20} height={20} className={iconClass} />;
|
||||
default:
|
||||
return <Zap className={iconClass} />;
|
||||
}
|
||||
};
|
||||
|
||||
export const ComposioToolkitView: FC<ComposioToolkitViewProps> = ({
|
||||
searchSpaceId,
|
||||
connectedToolkits,
|
||||
onBack,
|
||||
onConnectToolkit,
|
||||
isConnecting,
|
||||
connectingToolkitId,
|
||||
}) => {
|
||||
const [hoveredToolkit, setHoveredToolkit] = useState<string | null>(null);
|
||||
|
||||
// Separate indexable and non-indexable toolkits
|
||||
const indexableToolkits = COMPOSIO_TOOLKITS.filter((t) => t.isIndexable);
|
||||
const nonIndexableToolkits = COMPOSIO_TOOLKITS.filter((t) => !t.isIndexable);
|
||||
|
||||
return (
|
||||
<div className="flex flex-col h-full">
|
||||
{/* Header */}
|
||||
<div className="px-6 sm:px-12 pt-8 sm:pt-10 pb-4 sm:pb-6 border-b border-border/50 bg-muted">
|
||||
{/* Back button */}
|
||||
<button
|
||||
type="button"
|
||||
onClick={onBack}
|
||||
className="flex items-center gap-2 text-xs sm:text-sm text-muted-foreground hover:text-foreground mb-6 w-fit transition-colors"
|
||||
>
|
||||
<ArrowLeft className="size-4" />
|
||||
Back to connectors
|
||||
</button>
|
||||
|
||||
{/* Header content */}
|
||||
<div className="flex flex-col sm:flex-row items-start sm:items-center gap-4">
|
||||
<div className="flex gap-4 flex-1 w-full sm:w-auto">
|
||||
<div className="flex h-14 w-14 items-center justify-center rounded-xl bg-gradient-to-br from-violet-500/20 to-purple-500/20 border border-violet-500/30 shrink-0">
|
||||
<Image
|
||||
src="/connectors/composio.svg"
|
||||
alt="Composio"
|
||||
width={28}
|
||||
height={28}
|
||||
className="size-7"
|
||||
/>
|
||||
</div>
|
||||
<div className="flex-1 min-w-0">
|
||||
<h2 className="text-xl sm:text-2xl font-semibold tracking-tight">
|
||||
Composio
|
||||
</h2>
|
||||
<p className="text-xs sm:text-sm text-muted-foreground mt-1">
|
||||
Connect 100+ apps with managed OAuth - no verification needed
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
<a
|
||||
href="https://composio.dev"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="flex items-center gap-1.5 text-xs text-muted-foreground hover:text-foreground transition-colors"
|
||||
>
|
||||
<span>Powered by Composio</span>
|
||||
<ExternalLink className="size-3" />
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Content */}
|
||||
<div className="flex-1 overflow-y-auto px-6 sm:px-12 py-6 sm:py-8">
|
||||
{/* Indexable Toolkits (Google Services) */}
|
||||
<section className="mb-8">
|
||||
<div className="flex items-center gap-2 mb-4">
|
||||
<h3 className="text-sm font-semibold text-foreground">Google Services</h3>
|
||||
<Badge variant="secondary" className="text-[10px] px-1.5 py-0 h-5 bg-emerald-500/10 text-emerald-600 dark:text-emerald-400 border-emerald-500/20">
|
||||
Indexable
|
||||
</Badge>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground mb-4">
|
||||
Connect Google services via Composio's verified OAuth app. Your data will be indexed and searchable.
|
||||
</p>
|
||||
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-3 gap-3">
|
||||
{indexableToolkits.map((toolkit) => {
|
||||
const isConnected = connectedToolkits.includes(toolkit.id);
|
||||
const isThisConnecting = connectingToolkitId === toolkit.id;
|
||||
|
||||
return (
|
||||
<div
|
||||
key={toolkit.id}
|
||||
onMouseEnter={() => setHoveredToolkit(toolkit.id)}
|
||||
onMouseLeave={() => setHoveredToolkit(null)}
|
||||
className={cn(
|
||||
"group relative flex flex-col p-4 rounded-xl border transition-all duration-200",
|
||||
isConnected
|
||||
? "border-emerald-500/30 bg-emerald-500/5"
|
||||
: "border-border bg-card hover:border-violet-500/30 hover:bg-violet-500/5"
|
||||
)}
|
||||
>
|
||||
<div className="flex items-start justify-between mb-3">
|
||||
<div
|
||||
className={cn(
|
||||
"flex h-10 w-10 items-center justify-center rounded-lg border transition-colors",
|
||||
isConnected
|
||||
? "bg-emerald-500/10 border-emerald-500/20"
|
||||
: "bg-muted border-border group-hover:border-violet-500/20 group-hover:bg-violet-500/10"
|
||||
)}
|
||||
>
|
||||
{getToolkitIcon(toolkit.id, "size-5")}
|
||||
</div>
|
||||
{isConnected && (
|
||||
<Badge variant="secondary" className="text-[10px] px-1.5 py-0 h-5 bg-emerald-500/10 text-emerald-600 dark:text-emerald-400 border-emerald-500/20">
|
||||
<Check className="size-3 mr-0.5" />
|
||||
Connected
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
<h4 className="text-sm font-medium mb-1">{toolkit.name}</h4>
|
||||
<p className="text-xs text-muted-foreground mb-4 flex-1">
|
||||
{toolkit.description}
|
||||
</p>
|
||||
<Button
|
||||
size="sm"
|
||||
variant={isConnected ? "secondary" : "default"}
|
||||
className={cn(
|
||||
"w-full h-8 text-xs font-medium",
|
||||
!isConnected && "bg-violet-600 hover:bg-violet-700 text-white"
|
||||
)}
|
||||
onClick={() => onConnectToolkit(toolkit.id)}
|
||||
disabled={isConnecting || isConnected}
|
||||
>
|
||||
{isThisConnecting ? (
|
||||
<>
|
||||
<Loader2 className="size-3 mr-1.5 animate-spin" />
|
||||
Connecting...
|
||||
</>
|
||||
) : isConnected ? (
|
||||
"Connected"
|
||||
) : (
|
||||
"Connect"
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</section>
|
||||
|
||||
{/* Non-Indexable Toolkits (Coming Soon) */}
|
||||
<section>
|
||||
<div className="flex items-center gap-2 mb-4">
|
||||
<h3 className="text-sm font-semibold text-foreground">More Integrations</h3>
|
||||
<Badge variant="secondary" className="text-[10px] px-1.5 py-0 h-5 bg-amber-500/10 text-amber-600 dark:text-amber-400 border-amber-500/20">
|
||||
Coming Soon
|
||||
</Badge>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground mb-4">
|
||||
Connect these services for future indexing support. Currently available for connection only.
|
||||
</p>
|
||||
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-3 gap-3 opacity-60">
|
||||
{nonIndexableToolkits.map((toolkit) => (
|
||||
<div
|
||||
key={toolkit.id}
|
||||
className="group relative flex flex-col p-4 rounded-xl border border-border bg-card/50"
|
||||
>
|
||||
<div className="flex items-start justify-between mb-3">
|
||||
<div className="flex h-10 w-10 items-center justify-center rounded-lg border bg-muted border-border">
|
||||
{getToolkitIcon(toolkit.id, "size-5")}
|
||||
</div>
|
||||
<Badge variant="outline" className="text-[10px] px-1.5 py-0 h-5">
|
||||
Soon
|
||||
</Badge>
|
||||
</div>
|
||||
<h4 className="text-sm font-medium mb-1">{toolkit.name}</h4>
|
||||
<p className="text-xs text-muted-foreground mb-4 flex-1">
|
||||
{toolkit.description}
|
||||
</p>
|
||||
<Button
|
||||
size="sm"
|
||||
variant="outline"
|
||||
className="w-full h-8 text-xs font-medium"
|
||||
disabled
|
||||
>
|
||||
Coming Soon
|
||||
</Button>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</section>
|
||||
|
||||
{/* Info footer */}
|
||||
<div className="mt-8 p-4 rounded-xl bg-muted/50 border border-border/50">
|
||||
<div className="flex items-start gap-3">
|
||||
<div className="flex h-8 w-8 items-center justify-center rounded-lg bg-violet-500/10 border border-violet-500/20 shrink-0">
|
||||
<Zap className="size-4 text-violet-500" />
|
||||
</div>
|
||||
<div>
|
||||
<h4 className="text-sm font-medium mb-1">Why use Composio?</h4>
|
||||
<p className="text-xs text-muted-foreground leading-relaxed">
|
||||
Composio provides pre-verified OAuth apps, so you don't need to wait for Google app verification.
|
||||
Your data is securely processed through Composio's managed authentication.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
|
@ -5,6 +5,7 @@ import { useAtomValue } from "jotai";
|
|||
import {
|
||||
Bot,
|
||||
Check,
|
||||
ChevronDown,
|
||||
ChevronsUpDown,
|
||||
Key,
|
||||
Loader2,
|
||||
|
|
@ -19,6 +20,7 @@ import { z } from "zod";
|
|||
import { defaultSystemInstructionsAtom } from "@/atoms/new-llm-config/new-llm-config-query.atoms";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from "@/components/ui/collapsible";
|
||||
import {
|
||||
Command,
|
||||
CommandEmpty,
|
||||
|
|
@ -101,6 +103,8 @@ export function LLMConfigForm({
|
|||
defaultSystemInstructionsAtom
|
||||
);
|
||||
const [modelComboboxOpen, setModelComboboxOpen] = useState(false);
|
||||
const [advancedOpen, setAdvancedOpen] = useState(false);
|
||||
const [systemInstructionsOpen, setSystemInstructionsOpen] = useState(false);
|
||||
|
||||
const form = useForm<FormValues>({
|
||||
resolver: zodResolver(formSchema),
|
||||
|
|
@ -154,73 +158,6 @@ export function LLMConfigForm({
|
|||
return (
|
||||
<Form {...form}>
|
||||
<form onSubmit={form.handleSubmit(handleFormSubmit)} className="space-y-6">
|
||||
{/* System Instructions & Citations Section */}
|
||||
<div className="space-y-4">
|
||||
<div className="flex items-center gap-2 text-xs sm:text-sm font-medium text-muted-foreground">
|
||||
<MessageSquareQuote className="h-3.5 w-3.5 sm:h-4 sm:w-4" />
|
||||
System Instructions
|
||||
</div>
|
||||
|
||||
{/* System Instructions */}
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="system_instructions"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<div className="flex items-center justify-between">
|
||||
<FormLabel className="text-xs sm:text-sm">Instructions for the AI</FormLabel>
|
||||
{defaultInstructions && (
|
||||
<Button
|
||||
type="button"
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() =>
|
||||
field.onChange(defaultInstructions.default_system_instructions)
|
||||
}
|
||||
className="h-7 text-[10px] sm:text-xs text-muted-foreground hover:text-foreground"
|
||||
>
|
||||
Reset to Default
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
<FormControl>
|
||||
<Textarea
|
||||
placeholder="Enter system instructions for the AI..."
|
||||
rows={6}
|
||||
className="font-mono text-[11px] sm:text-xs resize-none"
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormDescription className="text-[10px] sm:text-xs">
|
||||
Use {"{resolved_today}"} to include today's date dynamically
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
{/* Citations Toggle */}
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="citations_enabled"
|
||||
render={({ field }) => (
|
||||
<FormItem className="flex items-center justify-between rounded-lg border p-3 bg-muted/30">
|
||||
<div className="space-y-0.5">
|
||||
<FormLabel className="text-xs sm:text-sm font-medium">Enable Citations</FormLabel>
|
||||
<FormDescription className="text-[10px] sm:text-xs">
|
||||
Include [citation:id] references to source documents
|
||||
</FormDescription>
|
||||
</div>
|
||||
<FormControl>
|
||||
<Switch checked={field.value} onCheckedChange={field.onChange} />
|
||||
</FormControl>
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<Separator />
|
||||
|
||||
{/* Model Configuration Section */}
|
||||
<div className="space-y-4">
|
||||
<div className="flex items-center gap-2 text-xs sm:text-sm font-medium text-muted-foreground">
|
||||
|
|
@ -509,31 +446,125 @@ export function LLMConfigForm({
|
|||
{showAdvanced && (
|
||||
<>
|
||||
<Separator />
|
||||
<div className="space-y-4">
|
||||
<div className="flex items-center gap-2 text-xs sm:text-sm font-medium text-muted-foreground">
|
||||
<Sparkles className="h-3.5 w-3.5 sm:h-4 sm:w-4" />
|
||||
Advanced Parameters
|
||||
</div>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="litellm_params"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormControl>
|
||||
<InferenceParamsEditor
|
||||
params={field.value || {}}
|
||||
setParams={field.onChange}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
<Collapsible open={advancedOpen} onOpenChange={setAdvancedOpen}>
|
||||
<CollapsibleTrigger asChild>
|
||||
<button
|
||||
type="button"
|
||||
className="flex w-full items-center justify-between py-2 text-xs sm:text-sm font-medium text-muted-foreground hover:text-foreground transition-colors"
|
||||
>
|
||||
<div className="flex items-center gap-2">
|
||||
<Sparkles className="h-3.5 w-3.5 sm:h-4 sm:w-4" />
|
||||
Advanced Parameters
|
||||
</div>
|
||||
<ChevronDown
|
||||
className={cn(
|
||||
"h-4 w-4 transition-transform duration-200",
|
||||
advancedOpen && "rotate-180"
|
||||
)}
|
||||
/>
|
||||
</button>
|
||||
</CollapsibleTrigger>
|
||||
<CollapsibleContent className="space-y-4 pt-2">
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="litellm_params"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormControl>
|
||||
<InferenceParamsEditor
|
||||
params={field.value || {}}
|
||||
setParams={field.onChange}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
</CollapsibleContent>
|
||||
</Collapsible>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* System Instructions & Citations Section */}
|
||||
<Separator />
|
||||
<Collapsible open={systemInstructionsOpen} onOpenChange={setSystemInstructionsOpen}>
|
||||
<CollapsibleTrigger asChild>
|
||||
<button
|
||||
type="button"
|
||||
className="flex w-full items-center justify-between py-2 text-xs sm:text-sm font-medium text-muted-foreground hover:text-foreground transition-colors"
|
||||
>
|
||||
<div className="flex items-center gap-2">
|
||||
<MessageSquareQuote className="h-3.5 w-3.5 sm:h-4 sm:w-4" />
|
||||
System Instructions
|
||||
</div>
|
||||
<ChevronDown
|
||||
className={cn(
|
||||
"h-4 w-4 transition-transform duration-200",
|
||||
systemInstructionsOpen && "rotate-180"
|
||||
)}
|
||||
/>
|
||||
</button>
|
||||
</CollapsibleTrigger>
|
||||
<CollapsibleContent className="space-y-4 pt-2">
|
||||
{/* System Instructions */}
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="system_instructions"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<div className="flex items-center justify-between">
|
||||
<FormLabel className="text-xs sm:text-sm">Instructions for the AI</FormLabel>
|
||||
{defaultInstructions && (
|
||||
<Button
|
||||
type="button"
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() =>
|
||||
field.onChange(defaultInstructions.default_system_instructions)
|
||||
}
|
||||
className="h-7 text-[10px] sm:text-xs text-muted-foreground hover:text-foreground"
|
||||
>
|
||||
Reset to Default
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
<FormControl>
|
||||
<Textarea
|
||||
placeholder="Enter system instructions for the AI..."
|
||||
rows={6}
|
||||
className="font-mono text-[11px] sm:text-xs resize-none"
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormDescription className="text-[10px] sm:text-xs">
|
||||
Use {"{resolved_today}"} to include today's date dynamically
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
{/* Citations Toggle */}
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="citations_enabled"
|
||||
render={({ field }) => (
|
||||
<FormItem className="flex items-center justify-between rounded-lg border p-3 bg-muted/30">
|
||||
<div className="space-y-0.5">
|
||||
<FormLabel className="text-xs sm:text-sm font-medium">Enable Citations</FormLabel>
|
||||
<FormDescription className="text-[10px] sm:text-xs">
|
||||
Include [citation:id] references to source documents
|
||||
</FormDescription>
|
||||
</div>
|
||||
<FormControl>
|
||||
<Switch checked={field.value} onCheckedChange={field.onChange} />
|
||||
</FormControl>
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
</CollapsibleContent>
|
||||
</Collapsible>
|
||||
|
||||
{/* Action Buttons */}
|
||||
<div
|
||||
className={cn(
|
||||
|
|
|
|||
|
|
@ -77,4 +77,17 @@ export {
|
|||
ScrapeWebpageResultSchema,
|
||||
ScrapeWebpageToolUI,
|
||||
} from "./scrape-webpage";
|
||||
export {
|
||||
type MemoryItem,
|
||||
type RecallMemoryArgs,
|
||||
RecallMemoryArgsSchema,
|
||||
type RecallMemoryResult,
|
||||
RecallMemoryResultSchema,
|
||||
RecallMemoryToolUI,
|
||||
type SaveMemoryArgs,
|
||||
SaveMemoryArgsSchema,
|
||||
type SaveMemoryResult,
|
||||
SaveMemoryResultSchema,
|
||||
SaveMemoryToolUI,
|
||||
} from "./user-memory";
|
||||
export { type WriteTodosData, WriteTodosSchema, WriteTodosToolUI } from "./write-todos";
|
||||
|
|
|
|||
283
surfsense_web/components/tool-ui/user-memory.tsx
Normal file
283
surfsense_web/components/tool-ui/user-memory.tsx
Normal file
|
|
@ -0,0 +1,283 @@
|
|||
"use client";
|
||||
|
||||
import { makeAssistantToolUI } from "@assistant-ui/react";
|
||||
import { BrainIcon, CheckIcon, Loader2Icon, SearchIcon, XIcon } from "lucide-react";
|
||||
import { z } from "zod";
|
||||
|
||||
// ============================================================================
|
||||
// Zod Schemas for save_memory tool
|
||||
// ============================================================================
|
||||
|
||||
const SaveMemoryArgsSchema = z.object({
|
||||
content: z.string(),
|
||||
category: z.string().default("fact"),
|
||||
});
|
||||
|
||||
const SaveMemoryResultSchema = z.object({
|
||||
status: z.enum(["saved", "error"]),
|
||||
memory_id: z.number().nullish(),
|
||||
memory_text: z.string().nullish(),
|
||||
category: z.string().nullish(),
|
||||
message: z.string().nullish(),
|
||||
error: z.string().nullish(),
|
||||
});
|
||||
|
||||
type SaveMemoryArgs = z.infer<typeof SaveMemoryArgsSchema>;
|
||||
type SaveMemoryResult = z.infer<typeof SaveMemoryResultSchema>;
|
||||
|
||||
// ============================================================================
|
||||
// Zod Schemas for recall_memory tool
|
||||
// ============================================================================
|
||||
|
||||
const RecallMemoryArgsSchema = z.object({
|
||||
query: z.string().nullish(),
|
||||
category: z.string().nullish(),
|
||||
top_k: z.number().default(5),
|
||||
});
|
||||
|
||||
const MemoryItemSchema = z.object({
|
||||
id: z.number(),
|
||||
memory_text: z.string(),
|
||||
category: z.string(),
|
||||
updated_at: z.string().nullish(),
|
||||
});
|
||||
|
||||
const RecallMemoryResultSchema = z.object({
|
||||
status: z.enum(["success", "error"]),
|
||||
count: z.number().nullish(),
|
||||
memories: z.array(MemoryItemSchema).nullish(),
|
||||
formatted_context: z.string().nullish(),
|
||||
error: z.string().nullish(),
|
||||
});
|
||||
|
||||
type RecallMemoryArgs = z.infer<typeof RecallMemoryArgsSchema>;
|
||||
type RecallMemoryResult = z.infer<typeof RecallMemoryResultSchema>;
|
||||
type MemoryItem = z.infer<typeof MemoryItemSchema>;
|
||||
|
||||
// ============================================================================
|
||||
// Category badge colors
|
||||
// ============================================================================
|
||||
|
||||
const categoryColors: Record<string, string> = {
|
||||
preference: "bg-blue-500/10 text-blue-600 dark:text-blue-400",
|
||||
fact: "bg-green-500/10 text-green-600 dark:text-green-400",
|
||||
instruction: "bg-purple-500/10 text-purple-600 dark:text-purple-400",
|
||||
context: "bg-orange-500/10 text-orange-600 dark:text-orange-400",
|
||||
};
|
||||
|
||||
function CategoryBadge({ category }: { category: string }) {
|
||||
const colorClass = categoryColors[category] || "bg-gray-500/10 text-gray-600 dark:text-gray-400";
|
||||
return (
|
||||
<span
|
||||
className={`inline-flex items-center rounded-full px-2 py-0.5 text-xs font-medium ${colorClass}`}
|
||||
>
|
||||
{category}
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Save Memory Tool UI
|
||||
// ============================================================================
|
||||
|
||||
export const SaveMemoryToolUI = makeAssistantToolUI<SaveMemoryArgs, SaveMemoryResult>({
|
||||
toolName: "save_memory",
|
||||
render: function SaveMemoryUI({ args, result, status }) {
|
||||
const isRunning = status.type === "running" || status.type === "requires-action";
|
||||
const isComplete = status.type === "complete";
|
||||
const isError = result?.status === "error";
|
||||
|
||||
// Parse args safely
|
||||
const parsedArgs = SaveMemoryArgsSchema.safeParse(args);
|
||||
const content = parsedArgs.success ? parsedArgs.data.content : "";
|
||||
const category = parsedArgs.success ? parsedArgs.data.category : "fact";
|
||||
|
||||
// Loading state
|
||||
if (isRunning) {
|
||||
return (
|
||||
<div className="my-3 flex items-center gap-3 rounded-lg border bg-card/60 px-4 py-3">
|
||||
<div className="flex size-8 items-center justify-center rounded-full bg-primary/10">
|
||||
<Loader2Icon className="size-4 animate-spin text-primary" />
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<span className="text-sm text-muted-foreground">Saving to memory...</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Error state
|
||||
if (isError) {
|
||||
return (
|
||||
<div className="my-3 flex items-center gap-3 rounded-lg border border-destructive/20 bg-destructive/5 px-4 py-3">
|
||||
<div className="flex size-8 items-center justify-center rounded-full bg-destructive/10">
|
||||
<XIcon className="size-4 text-destructive" />
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<span className="text-sm text-destructive">Failed to save memory</span>
|
||||
{result?.error && <p className="mt-1 text-xs text-destructive/70">{result.error}</p>}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Success state
|
||||
if (isComplete && result?.status === "saved") {
|
||||
return (
|
||||
<div className="my-3 flex items-center gap-3 rounded-lg border border-primary/20 bg-primary/5 px-4 py-3">
|
||||
<div className="flex size-8 items-center justify-center rounded-full bg-primary/10">
|
||||
<BrainIcon className="size-4 text-primary" />
|
||||
</div>
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex items-center gap-2">
|
||||
<CheckIcon className="size-3 text-green-500 shrink-0" />
|
||||
<span className="text-sm font-medium text-foreground">Memory saved</span>
|
||||
<CategoryBadge category={category} />
|
||||
</div>
|
||||
<p className="mt-1 truncate text-sm text-muted-foreground">{content}</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Default/incomplete state - show what's being saved
|
||||
if (content) {
|
||||
return (
|
||||
<div className="my-3 flex items-center gap-3 rounded-lg border bg-card/60 px-4 py-3">
|
||||
<div className="flex size-8 items-center justify-center rounded-full bg-muted">
|
||||
<BrainIcon className="size-4 text-muted-foreground" />
|
||||
</div>
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="text-sm text-muted-foreground">Saving memory</span>
|
||||
<CategoryBadge category={category} />
|
||||
</div>
|
||||
<p className="mt-1 truncate text-sm text-muted-foreground">{content}</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Recall Memory Tool UI
|
||||
// ============================================================================
|
||||
|
||||
export const RecallMemoryToolUI = makeAssistantToolUI<RecallMemoryArgs, RecallMemoryResult>({
|
||||
toolName: "recall_memory",
|
||||
render: function RecallMemoryUI({ args, result, status }) {
|
||||
const isRunning = status.type === "running" || status.type === "requires-action";
|
||||
const isComplete = status.type === "complete";
|
||||
const isError = result?.status === "error";
|
||||
|
||||
// Parse args safely
|
||||
const parsedArgs = RecallMemoryArgsSchema.safeParse(args);
|
||||
const query = parsedArgs.success ? parsedArgs.data.query : null;
|
||||
|
||||
// Loading state
|
||||
if (isRunning) {
|
||||
return (
|
||||
<div className="my-3 flex items-center gap-3 rounded-lg border bg-card/60 px-4 py-3">
|
||||
<div className="flex size-8 items-center justify-center rounded-full bg-primary/10">
|
||||
<Loader2Icon className="size-4 animate-spin text-primary" />
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{query ? `Searching memories for "${query}"...` : "Recalling memories..."}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Error state
|
||||
if (isError) {
|
||||
return (
|
||||
<div className="my-3 flex items-center gap-3 rounded-lg border border-destructive/20 bg-destructive/5 px-4 py-3">
|
||||
<div className="flex size-8 items-center justify-center rounded-full bg-destructive/10">
|
||||
<XIcon className="size-4 text-destructive" />
|
||||
</div>
|
||||
<div className="flex-1">
|
||||
<span className="text-sm text-destructive">Failed to recall memories</span>
|
||||
{result?.error && <p className="mt-1 text-xs text-destructive/70">{result.error}</p>}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Success state with memories
|
||||
if (isComplete && result?.status === "success") {
|
||||
const memories = result.memories || [];
|
||||
const count = result.count || 0;
|
||||
|
||||
if (count === 0) {
|
||||
return (
|
||||
<div className="my-3 flex items-center gap-3 rounded-lg border bg-card/60 px-4 py-3">
|
||||
<div className="flex size-8 items-center justify-center rounded-full bg-muted">
|
||||
<SearchIcon className="size-4 text-muted-foreground" />
|
||||
</div>
|
||||
<span className="text-sm text-muted-foreground">No memories found</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="my-3 rounded-lg border bg-card/60 px-4 py-3">
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<BrainIcon className="size-4 text-primary" />
|
||||
<span className="text-sm font-medium text-foreground">
|
||||
Recalled {count} {count === 1 ? "memory" : "memories"}
|
||||
</span>
|
||||
</div>
|
||||
<div className="space-y-2">
|
||||
{memories.slice(0, 5).map((memory: MemoryItem) => (
|
||||
<div
|
||||
key={memory.id}
|
||||
className="flex items-start gap-2 rounded-md bg-muted/50 px-3 py-2"
|
||||
>
|
||||
<CategoryBadge category={memory.category} />
|
||||
<span className="text-sm text-muted-foreground flex-1">{memory.memory_text}</span>
|
||||
</div>
|
||||
))}
|
||||
{memories.length > 5 && (
|
||||
<p className="text-xs text-muted-foreground">...and {memories.length - 5} more</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Default/incomplete state
|
||||
if (query) {
|
||||
return (
|
||||
<div className="my-3 flex items-center gap-3 rounded-lg border bg-card/60 px-4 py-3">
|
||||
<div className="flex size-8 items-center justify-center rounded-full bg-muted">
|
||||
<SearchIcon className="size-4 text-muted-foreground" />
|
||||
</div>
|
||||
<span className="text-sm text-muted-foreground">Searching memories for "{query}"</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Exports
|
||||
// ============================================================================
|
||||
|
||||
export {
|
||||
SaveMemoryArgsSchema,
|
||||
SaveMemoryResultSchema,
|
||||
RecallMemoryArgsSchema,
|
||||
RecallMemoryResultSchema,
|
||||
type SaveMemoryArgs,
|
||||
type SaveMemoryResult,
|
||||
type RecallMemoryArgs,
|
||||
type RecallMemoryResult,
|
||||
type MemoryItem,
|
||||
};
|
||||
|
|
@ -24,4 +24,5 @@ export enum EnumConnectorName {
|
|||
YOUTUBE_CONNECTOR = "YOUTUBE_CONNECTOR",
|
||||
CIRCLEBACK_CONNECTOR = "CIRCLEBACK_CONNECTOR",
|
||||
MCP_CONNECTOR = "MCP_CONNECTOR",
|
||||
COMPOSIO_CONNECTOR = "COMPOSIO_CONNECTOR",
|
||||
}
|
||||
|
|
|
|||
|
|
@ -66,6 +66,8 @@ export const getConnectorIcon = (connectorType: EnumConnectorName | string, clas
|
|||
return <IconUsersGroup {...iconProps} />;
|
||||
case EnumConnectorName.MCP_CONNECTOR:
|
||||
return <Image src="/connectors/modelcontextprotocol.svg" alt="MCP" {...imgProps} />;
|
||||
case EnumConnectorName.COMPOSIO_CONNECTOR:
|
||||
return <Image src="/connectors/composio.svg" alt="Composio" {...imgProps} />;
|
||||
// Additional cases for non-enum connector types
|
||||
case "YOUTUBE_CONNECTOR":
|
||||
return <Image src="/connectors/youtube.svg" alt="YouTube" {...imgProps} />;
|
||||
|
|
@ -85,6 +87,8 @@ export const getConnectorIcon = (connectorType: EnumConnectorName | string, clas
|
|||
return <File {...iconProps} />;
|
||||
case "GOOGLE_DRIVE_FILE":
|
||||
return <File {...iconProps} />;
|
||||
case "COMPOSIO_CONNECTOR":
|
||||
return <Image src="/connectors/composio.svg" alt="Composio" {...imgProps} />;
|
||||
case "NOTE":
|
||||
return <FileText {...iconProps} />;
|
||||
case "EXTENSION":
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ export const searchSourceConnectorTypeEnum = z.enum([
|
|||
"BOOKSTACK_CONNECTOR",
|
||||
"CIRCLEBACK_CONNECTOR",
|
||||
"MCP_CONNECTOR",
|
||||
"COMPOSIO_CONNECTOR",
|
||||
]);
|
||||
|
||||
export const searchSourceConnector = z.object({
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ export const documentTypeEnum = z.enum([
|
|||
"CIRCLEBACK",
|
||||
"SURFSENSE_DOCS",
|
||||
"NOTE",
|
||||
"COMPOSIO_CONNECTOR",
|
||||
]);
|
||||
|
||||
export const document = z.object({
|
||||
|
|
|
|||
12
surfsense_web/public/connectors/composio.svg
Normal file
12
surfsense_web/public/connectors/composio.svg
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none">
|
||||
<rect width="24" height="24" rx="6" fill="url(#composio-gradient)"/>
|
||||
<path d="M12 6L17 9V15L12 18L7 15V9L12 6Z" stroke="white" stroke-width="1.5" stroke-linejoin="round"/>
|
||||
<path d="M12 6V12M12 12L17 9M12 12L7 9M12 12V18" stroke="white" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<circle cx="12" cy="12" r="2" fill="white"/>
|
||||
<defs>
|
||||
<linearGradient id="composio-gradient" x1="0" y1="0" x2="24" y2="24" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#8B5CF6"/>
|
||||
<stop offset="1" stop-color="#A855F7"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 640 B |
Loading…
Add table
Add a link
Reference in a new issue