mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-05-06 06:12:40 +02:00
fix: resolve runtime crashes in tool-ui components and backend import errors
Backend: - Remove dead imports (display_image, link_preview, knowledge_base) from tools registry and __init__ - Delete orphaned elif chain (lines 784-1046) in knowledge_base.py left after asyncio.gather refactor - Add missing NotionAPIError class to notion_history.py Frontend: - Add display-image.tsx, link-preview.tsx, scrape-webpage.tsx tool UI components - Fix GeneratePodcastToolUI: add null guard for no-props render + optional chaining on status/args - Fix SaveMemoryToolUI/RecallMemoryToolUI: add null guard when rendered without props in provider - Update launch.json to use pnpm on port 3999 Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
67429c287f
commit
dc545f8028
10 changed files with 236 additions and 273 deletions
|
|
@ -23,7 +23,6 @@ from .crypto_realtime import (
|
|||
create_get_live_token_data_tool,
|
||||
create_get_live_token_price_tool,
|
||||
)
|
||||
from .display_image import create_display_image_tool
|
||||
from .generate_image import create_generate_image_tool
|
||||
from .knowledge_base import (
|
||||
CONNECTOR_DESCRIPTIONS,
|
||||
|
|
@ -57,7 +56,6 @@ __all__ = [
|
|||
"create_generate_video_presentation_tool",
|
||||
"create_get_live_token_data_tool",
|
||||
"create_get_live_token_price_tool",
|
||||
"create_link_preview_tool",
|
||||
"create_recall_memory_tool",
|
||||
"create_save_memory_tool",
|
||||
"create_scrape_webpage_tool",
|
||||
|
|
|
|||
|
|
@ -781,269 +781,6 @@ async def search_knowledge_base_raw_async(
|
|||
for docs in connector_results:
|
||||
all_documents.extend(docs)
|
||||
|
||||
elif connector == "TEAMS_CONNECTOR":
|
||||
_, chunks = await connector_service.search_teams(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "NOTION_CONNECTOR":
|
||||
_, chunks = await connector_service.search_notion(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "GITHUB_CONNECTOR":
|
||||
_, chunks = await connector_service.search_github(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "LINEAR_CONNECTOR":
|
||||
_, chunks = await connector_service.search_linear(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "TAVILY_API":
|
||||
_, chunks = await connector_service.search_tavily(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "SEARXNG_API":
|
||||
_, chunks = await connector_service.search_searxng(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "LINKUP_API":
|
||||
# Keep behavior aligned with researcher: default "standard"
|
||||
_, chunks = await connector_service.search_linkup(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
mode="standard",
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "BAIDU_SEARCH_API":
|
||||
_, chunks = await connector_service.search_baidu(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "DISCORD_CONNECTOR":
|
||||
_, chunks = await connector_service.search_discord(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "JIRA_CONNECTOR":
|
||||
_, chunks = await connector_service.search_jira(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "GOOGLE_CALENDAR_CONNECTOR":
|
||||
_, chunks = await connector_service.search_google_calendar(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "AIRTABLE_CONNECTOR":
|
||||
_, chunks = await connector_service.search_airtable(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "GOOGLE_GMAIL_CONNECTOR":
|
||||
_, chunks = await connector_service.search_google_gmail(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "GOOGLE_DRIVE_FILE":
|
||||
_, chunks = await connector_service.search_google_drive(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "CONFLUENCE_CONNECTOR":
|
||||
_, chunks = await connector_service.search_confluence(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "CLICKUP_CONNECTOR":
|
||||
_, chunks = await connector_service.search_clickup(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "LUMA_CONNECTOR":
|
||||
_, chunks = await connector_service.search_luma(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "ELASTICSEARCH_CONNECTOR":
|
||||
_, chunks = await connector_service.search_elasticsearch(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "NOTE":
|
||||
_, chunks = await connector_service.search_notes(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "BOOKSTACK_CONNECTOR":
|
||||
_, chunks = await connector_service.search_bookstack(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "CIRCLEBACK":
|
||||
_, chunks = await connector_service.search_circleback(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "OBSIDIAN_CONNECTOR":
|
||||
_, chunks = await connector_service.search_obsidian(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "DEXSCREENER_CONNECTOR":
|
||||
_, chunks = await connector_service.search_dexscreener(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
print(f"[DEBUG] DexScreener search returned {len(chunks)} chunks")
|
||||
if chunks:
|
||||
print(f"[DEBUG] First chunk metadata: {chunks[0].get('document', {}).get('metadata', {})}")
|
||||
all_documents.extend(chunks)
|
||||
|
||||
# =========================================================
|
||||
# Composio Connectors
|
||||
# =========================================================
|
||||
elif connector == "COMPOSIO_GOOGLE_DRIVE_CONNECTOR":
|
||||
_, chunks = await connector_service.search_composio_google_drive(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "COMPOSIO_GMAIL_CONNECTOR":
|
||||
_, chunks = await connector_service.search_composio_gmail(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "COMPOSIO_GOOGLE_CALENDAR_CONNECTOR":
|
||||
_, chunks = await connector_service.search_composio_google_calendar(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error searching connector {connector}: {e}")
|
||||
continue
|
||||
|
||||
# Deduplicate by content hash
|
||||
seen_doc_ids: set[Any] = set()
|
||||
seen_content_hashes: set[int] = set()
|
||||
|
|
|
|||
|
|
@ -54,7 +54,6 @@ from .crypto_realtime import (
|
|||
create_get_live_token_data_tool,
|
||||
create_get_live_token_price_tool,
|
||||
)
|
||||
from .display_image import create_display_image_tool
|
||||
from .dropbox import (
|
||||
create_create_dropbox_file_tool,
|
||||
create_delete_dropbox_file_tool,
|
||||
|
|
@ -85,8 +84,6 @@ from .linear import (
|
|||
create_delete_linear_issue_tool,
|
||||
create_update_linear_issue_tool,
|
||||
)
|
||||
from .knowledge_base import create_search_knowledge_base_tool
|
||||
from .link_preview import create_link_preview_tool
|
||||
from .mcp_tool import load_mcp_tools
|
||||
from .notion import (
|
||||
create_create_notion_page_tool,
|
||||
|
|
|
|||
|
|
@ -8,6 +8,10 @@ from notion_client.errors import APIResponseError
|
|||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
|
||||
class NotionAPIError(Exception):
|
||||
"""Raised when Notion API returns an error response."""
|
||||
|
||||
from app.config import config
|
||||
from app.db import SearchSourceConnector
|
||||
from app.schemas.notion_auth_credentials import NotionAuthCredentialsBase
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue