diff --git a/surfsense_backend/alembic/versions/4_add_linkup_api_enum.py b/surfsense_backend/alembic/versions/4_add_linkup_api_enum.py
new file mode 100644
index 000000000..8ccfac2d2
--- /dev/null
+++ b/surfsense_backend/alembic/versions/4_add_linkup_api_enum.py
@@ -0,0 +1,45 @@
+"""Add LINKUP_API to SearchSourceConnectorType enum
+
+Revision ID: 4
+Revises: 3
+Create Date: 2025-04-18 10:00:00.000000
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision: str = '4'
+down_revision: Union[str, None] = '3'
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+
+ # Manually add the command to add the enum value
+ op.execute("ALTER TYPE searchsourceconnectortype ADD VALUE 'LINKUP_API'")
+
+ # Pass for the rest, as autogenerate didn't run to add other schema details
+ pass
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+
+ # Downgrading removal of an enum value requires recreating the type
+ op.execute("ALTER TYPE searchsourceconnectortype RENAME TO searchsourceconnectortype_old")
+ op.execute("CREATE TYPE searchsourceconnectortype AS ENUM('SERPER_API', 'TAVILY_API', 'SLACK_CONNECTOR', 'NOTION_CONNECTOR', 'GITHUB_CONNECTOR', 'LINEAR_CONNECTOR')")
+ op.execute((
+ "ALTER TABLE search_source_connectors ALTER COLUMN connector_type TYPE searchsourceconnectortype USING "
+ "connector_type::text::searchsourceconnectortype"
+ ))
+ op.execute("DROP TYPE searchsourceconnectortype_old")
+
+ pass
+ # ### end Alembic commands ###
\ No newline at end of file
diff --git a/surfsense_backend/app/agents/researcher/nodes.py b/surfsense_backend/app/agents/researcher/nodes.py
index 15935f2ea..1b42d7155 100644
--- a/surfsense_backend/app/agents/researcher/nodes.py
+++ b/surfsense_backend/app/agents/researcher/nodes.py
@@ -143,7 +143,7 @@ async def fetch_relevant_documents(
connectors_to_search: List[str],
writer: StreamWriter = None,
state: State = None,
- top_k: int = 20
+ top_k: int = 10
) -> List[Dict[str, Any]]:
"""
Fetch relevant documents for research questions using the provided connectors.
@@ -264,22 +264,6 @@ async def fetch_relevant_documents(
streaming_service.only_update_terminal(f"Found {len(files_chunks)} file chunks relevant to the query")
writer({"yeild_value": streaming_service._format_annotations()})
- elif connector == "TAVILY_API":
- source_object, tavily_chunks = await connector_service.search_tavily(
- user_query=reformulated_query,
- user_id=user_id,
- top_k=top_k
- )
-
- # Add to sources and raw documents
- if source_object:
- all_sources.append(source_object)
- all_raw_documents.extend(tavily_chunks)
-
- # Stream found document count
- if streaming_service and writer:
- streaming_service.only_update_terminal(f"Found {len(tavily_chunks)} web search results relevant to the query")
- writer({"yeild_value": streaming_service._format_annotations()})
elif connector == "SLACK_CONNECTOR":
source_object, slack_chunks = await connector_service.search_slack(
@@ -352,6 +336,47 @@ async def fetch_relevant_documents(
if streaming_service and writer:
streaming_service.only_update_terminal(f"Found {len(linear_chunks)} Linear issues relevant to the query")
writer({"yeild_value": streaming_service._format_annotations()})
+
+ elif connector == "TAVILY_API":
+ source_object, tavily_chunks = await connector_service.search_tavily(
+ user_query=reformulated_query,
+ user_id=user_id,
+ top_k=top_k
+ )
+
+ # Add to sources and raw documents
+ if source_object:
+ all_sources.append(source_object)
+ all_raw_documents.extend(tavily_chunks)
+
+ # Stream found document count
+ if streaming_service and writer:
+ streaming_service.only_update_terminal(f"Found {len(tavily_chunks)} web search results relevant to the query")
+ writer({"yeild_value": streaming_service._format_annotations()})
+
+ elif connector == "LINKUP_API":
+ if top_k > 10:
+ linkup_mode = "deep"
+ else:
+ linkup_mode = "standard"
+
+ source_object, linkup_chunks = await connector_service.search_linkup(
+ user_query=reformulated_query,
+ user_id=user_id,
+ mode=linkup_mode
+ )
+
+ # Add to sources and raw documents
+ if source_object:
+ all_sources.append(source_object)
+ all_raw_documents.extend(linkup_chunks)
+
+ # Stream found document count
+ if streaming_service and writer:
+ streaming_service.only_update_terminal(f"Found {len(linkup_chunks)} Linkup chunks relevant to the query")
+ writer({"yeild_value": streaming_service._format_annotations()})
+
+
except Exception as e:
error_message = f"Error searching connector {connector}: {str(e)}"
print(error_message)
@@ -462,6 +487,14 @@ async def process_sections(state: State, config: RunnableConfig, writer: StreamW
streaming_service.only_update_terminal("Searching for relevant information across all connectors...")
writer({"yeild_value": streaming_service._format_annotations()})
+ if configuration.num_sections == 1:
+ TOP_K = 10
+ elif configuration.num_sections == 3:
+ TOP_K = 20
+ elif configuration.num_sections == 6:
+ TOP_K = 30
+
+
relevant_documents = []
async with async_session_maker() as db_session:
try:
@@ -472,7 +505,8 @@ async def process_sections(state: State, config: RunnableConfig, writer: StreamW
db_session=db_session,
connectors_to_search=configuration.connectors_to_search,
writer=writer,
- state=state
+ state=state,
+ top_k=TOP_K
)
except Exception as e:
error_message = f"Error fetching relevant documents: {str(e)}"
diff --git a/surfsense_backend/app/db.py b/surfsense_backend/app/db.py
index 4426f4ffa..320f059dd 100644
--- a/surfsense_backend/app/db.py
+++ b/surfsense_backend/app/db.py
@@ -44,8 +44,9 @@ class DocumentType(str, Enum):
LINEAR_CONNECTOR = "LINEAR_CONNECTOR"
class SearchSourceConnectorType(str, Enum):
- SERPER_API = "SERPER_API"
+ SERPER_API = "SERPER_API" # NOT IMPLEMENTED YET : DON'T REMEMBER WHY : MOST PROBABLY BECAUSE WE NEED TO CRAWL THE RESULTS RETURNED BY IT
TAVILY_API = "TAVILY_API"
+ LINKUP_API = "LINKUP_API"
SLACK_CONNECTOR = "SLACK_CONNECTOR"
NOTION_CONNECTOR = "NOTION_CONNECTOR"
GITHUB_CONNECTOR = "GITHUB_CONNECTOR"
diff --git a/surfsense_backend/app/schemas/search_source_connector.py b/surfsense_backend/app/schemas/search_source_connector.py
index 6accc12af..cb7152e06 100644
--- a/surfsense_backend/app/schemas/search_source_connector.py
+++ b/surfsense_backend/app/schemas/search_source_connector.py
@@ -36,6 +36,16 @@ class SearchSourceConnectorBase(BaseModel):
# Ensure the API key is not empty
if not config.get("TAVILY_API_KEY"):
raise ValueError("TAVILY_API_KEY cannot be empty")
+
+ elif connector_type == SearchSourceConnectorType.LINKUP_API:
+ # For LINKUP_API, only allow LINKUP_API_KEY
+ allowed_keys = ["LINKUP_API_KEY"]
+ if set(config.keys()) != set(allowed_keys):
+ raise ValueError(f"For LINKUP_API connector type, config must only contain these keys: {allowed_keys}")
+
+ # Ensure the API key is not empty
+ if not config.get("LINKUP_API_KEY"):
+ raise ValueError("LINKUP_API_KEY cannot be empty")
elif connector_type == SearchSourceConnectorType.SLACK_CONNECTOR:
# For SLACK_CONNECTOR, only allow SLACK_BOT_TOKEN
diff --git a/surfsense_backend/app/temp_test.py b/surfsense_backend/app/temp_test.py
new file mode 100644
index 000000000..f8ff10fec
--- /dev/null
+++ b/surfsense_backend/app/temp_test.py
@@ -0,0 +1,17 @@
+from linkup import LinkupClient
+
+# Initialize the client (API key can be read from the environment variable or passed as an argument)
+client = LinkupClient(
+ api_key="0ed1d08a-c8eb-4f01-9e3d-67cf87a3cd8f"
+)
+
+# Perform a search query
+search_response = client.search(
+ query="What is Surfsense?",
+ depth="standard", # "standard" or "deep"
+ output_type="searchResults", # "searchResults" or "sourcedAnswer" or "structured"
+ structured_output_schema=None, # must be filled if output_type is "structured"
+)
+print(search_response)
+
+# results=[LinkupSearchTextResult(type='text', name='SurfSense - Future Tools', url='https://www.futuretools.io/tools/surfsense', content='SurfSense is an open-source AI research assistant that functions as a personal, private alternative to tools like NotebookLM or Perplexity. It enables users to save webpages (even those behind login walls), upload documents, and build a searchable knowledge base that can be queried through natural language. The tool integrates with various external sources including search engines, Slack ...'), LinkupSearchTextResult(type='text', name='r/selfhosted on Reddit: SurfSense - Personal AI Assistant for World Wide Web Surfers.', url='https://www.reddit.com/r/selfhosted/comments/1fl58vh/surfsense_personal_ai_assistant_for_world_wide/', content='14 votes, 22 comments. Hi Everyone, For the past few months I have been trying to build a Personal AI Assistant for World Wide Web Surfers. It…\nWhat it is and why I am making it: Well when I’m browsing the internet, I tend to save a ton of content—but remembering when and what you saved? Total brain freeze! That’s where SurfSense comes in. SurfSense is a Personal AI Assistant for anything you see (Social Media Chats, Calendar Invites, Important Mails, Tutorials, Recipes and anything ) on the World Wide Web.\nPlease test it out at https://github.com/MODSetter/SurfSense and let me know your feedback.\nPosted by u/Uiqueblhats - 14 votes and 22 comments'), LinkupSearchTextResult(type='text', name='SurfSense - GitHub', url='https://github.com/DLMJR/surfsense', content='While tools like NotebookLM and Perplexity are impressive and highly effective for conducting research on any topic/query, SurfSense elevates this capability by integrating with your personal knowledge base. It is a highly customizable AI research agent, connected to external sources such as search engines (Tavily), Slack, Notion, YouTube, GitHub and more to come.'), LinkupSearchTextResult(type='text', name='How to Set Up and Use SurfSense: Your Personal AI Assistant', url='https://fxis.ai/edu/how-to-set-up-and-use-surfsense-your-personal-ai-assistant/', content='SurfSense is the answer to the common struggle of remembering what content you’ve saved while browsing the internet. Imagine your favorite library, but instead of books, it’s filled with every useful webpage, chat message, recipe, and tutorial you’ve come across. With SurfSense, you can instantly recall any of these digital treasures. Let’s embark on a journey to set up and utilize ...'), LinkupSearchTextResult(type='text', name='Surf Sense | F6S', url='https://www.f6s.com/surfsense', content='Surf Sense - Government - Surf Sense is the modern infrastructure network of the ocean.\nsurfsense.com.au · Nathan Adler · Sydney, Australia · Product leader, ex-engineer, start-up founder & maker, with end-to-end product development background in software and hardware. Product · Employee @Airtasker · Product · Employee @SafetyCulture · B Engineering / B Commerce @UNSW See 3 more ·'), LinkupSearchTextResult(type='text', name='Surf Sense | Online Surf Coaching & Knowledge Platform', url='https://www.surf-sense.com/', content='Join Surf Sense, the ultimate online surf coaching platform designed for intermediate and advanced surfers. Access expert-guided courses, weekly live Q&A sessions, and a thriving global surf community. Start improving your surfing today!\nundefined'), LinkupSearchTextResult(type='text', name='SurfSense - The Open Source Alternative to NotebookLM / Perplexity ...', url='https://www.redditmedia.com/r/selfhosted/comments/1jzi67a/surfsense_the_open_source_alternative_to/', content="For those of you who aren't familiar with SurfSense, it aims to be the open-source alternative to NotebookLM, Perplexity, or Glean. In short, it's a Highly Customizable AI Research Agent but connected to your personal external sources like search engines (Tavily), Slack, Notion, YouTube, GitHub, and more coming soon."), LinkupSearchTextResult(type='text', name='GitHub - MODSetter/SurfSense: Open Source Alternative to NotebookLM ...', url='https://github.com/MODSetter/SurfSense', content='While tools like NotebookLM and Perplexity are impressive and highly effective for conducting research on any topic/query, SurfSense elevates this capability by integrating with your personal knowledge base. It is a highly customizable AI research agent, connected to external sources such as search engines (Tavily), Slack, Linear, Notion, YouTube, GitHub and more to come.'), LinkupSearchTextResult(type='text', name='GitHub - MODSetter/SurfSense: Open Source Alternative to NotebookLM / Perplexity / Glean, connected to external sources such as search engines (Tavily), Slack, Linear, Notion, YouTube, GitHub and more.', url='https://github.com/MODSetter/SurfSense', content='Open Source Alternative to NotebookLM / Perplexity / Glean, connected to external sources such as search engines (Tavily), Slack, Linear, Notion, YouTube, GitHub and more. - MODSetter/SurfSense\nWhile tools like NotebookLM and Perplexity are impressive and highly effective for conducting research on any topic/query, SurfSense elevates this capability by integrating with your personal knowledge base.\nThe SurfSense extension can be used to save any webpage you like.\nThe SurfSense Podcast feature is currently being reworked for better UI and stability.\nSurfSense is actively being developed.'), LinkupSearchTextResult(type='text', name='SurfSense - Chrome Web Store', url='https://chromewebstore.google.com/detail/surfsense/jihmihbdpfjhppdlifphccgefjhifblf', content='Extension to collect Browsing History for SurfSense.\nWell when I’m browsing the internet, I tend to save a ton of content—but remembering when and what you saved? Total brain freeze! ❄️ That’s where SurfSense comes in. SurfSense is like a Knowledge Graph 🧠 Brain 🧠 for anything you see on the World Wide Web.\nSurfSense has disclosed the following information regarding the collection and usage of your data.\nThen, ask your personal knowledge base anything about your saved content., and voilà—instant recall! 🧑\u200d💻🌐 Use this extension to capture & save your Web Content and chat with your personal Knowledge Graph 🧠 Brain 🧠 at https://www.surfsense.net')]
\ No newline at end of file
diff --git a/surfsense_backend/app/utils/connector_service.py b/surfsense_backend/app/utils/connector_service.py
index 9a6e13c43..7f88c1c0f 100644
--- a/surfsense_backend/app/utils/connector_service.py
+++ b/surfsense_backend/app/utils/connector_service.py
@@ -5,6 +5,7 @@ from sqlalchemy.future import select
from app.retriver.chunks_hybrid_search import ChucksHybridSearchRetriever
from app.db import SearchSourceConnector, SearchSourceConnectorType
from tavily import TavilyClient
+from linkup import LinkupClient
class ConnectorService:
@@ -643,3 +644,97 @@ class ConnectorService:
}
return result_object, linear_chunks
+
+ async def search_linkup(self, user_query: str, user_id: str, mode: str = "standard") -> tuple:
+ """
+ Search using Linkup API and return both the source information and documents
+
+ Args:
+ user_query: The user's query
+ user_id: The user's ID
+ mode: Search depth mode, can be "standard" or "deep"
+
+ Returns:
+ tuple: (sources_info, documents)
+ """
+ # Get Linkup connector configuration
+ linkup_connector = await self.get_connector_by_type(user_id, SearchSourceConnectorType.LINKUP_API)
+
+ if not linkup_connector:
+ # Return empty results if no Linkup connector is configured
+ return {
+ "id": 10,
+ "name": "Linkup Search",
+ "type": "LINKUP_API",
+ "sources": [],
+ }, []
+
+ # Initialize Linkup client with API key from connector config
+ linkup_api_key = linkup_connector.config.get("LINKUP_API_KEY")
+ linkup_client = LinkupClient(api_key=linkup_api_key)
+
+ # Perform search with Linkup
+ try:
+ response = linkup_client.search(
+ query=user_query,
+ depth=mode, # Use the provided mode ("standard" or "deep")
+ output_type="searchResults", # Default to search results
+ )
+
+ # Extract results from Linkup response - access as attribute instead of using .get()
+ linkup_results = response.results if hasattr(response, 'results') else []
+
+ # Process each result and create sources directly without deduplication
+ sources_list = []
+ documents = []
+
+ for i, result in enumerate(linkup_results):
+ # Fix for UI
+ linkup_results[i]['document']['id'] = self.source_id_counter
+ # Create a source entry
+ source = {
+ "id": self.source_id_counter,
+ "title": result.name if hasattr(result, 'name') else "Linkup Result",
+ "description": result.content[:100] if hasattr(result, 'content') else "",
+ "url": result.url if hasattr(result, 'url') else ""
+ }
+ sources_list.append(source)
+
+ # Create a document entry
+ document = {
+ "chunk_id": f"linkup_chunk_{i}",
+ "content": result.content if hasattr(result, 'content') else "",
+ "score": 1.0, # Default score since not provided by Linkup
+ "document": {
+ "id": self.source_id_counter,
+ "title": result.name if hasattr(result, 'name') else "Linkup Result",
+ "document_type": "LINKUP_API",
+ "metadata": {
+ "url": result.url if hasattr(result, 'url') else "",
+ "type": result.type if hasattr(result, 'type') else "",
+ "source": "LINKUP_API"
+ }
+ }
+ }
+ documents.append(document)
+ self.source_id_counter += 1
+
+ # Create result object
+ result_object = {
+ "id": 10,
+ "name": "Linkup Search",
+ "type": "LINKUP_API",
+ "sources": sources_list,
+ }
+
+ return result_object, documents
+
+ except Exception as e:
+ # Log the error and return empty results
+ print(f"Error searching with Linkup: {str(e)}")
+ return {
+ "id": 10,
+ "name": "Linkup Search",
+ "type": "LINKUP_API",
+ "sources": [],
+ }, []
diff --git a/surfsense_backend/pyproject.toml b/surfsense_backend/pyproject.toml
index 7b7a6f900..8f8dc4c0e 100644
--- a/surfsense_backend/pyproject.toml
+++ b/surfsense_backend/pyproject.toml
@@ -15,6 +15,7 @@ dependencies = [
"langchain-community>=0.3.17",
"langchain-unstructured>=0.1.6",
"langgraph>=0.3.29",
+ "linkup-sdk>=0.2.4",
"litellm>=1.61.4",
"markdownify>=0.14.1",
"notion-client>=2.3.0",
diff --git a/surfsense_backend/uv.lock b/surfsense_backend/uv.lock
index 9b485b0df..9601bccb3 100644
--- a/surfsense_backend/uv.lock
+++ b/surfsense_backend/uv.lock
@@ -1413,6 +1413,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/8b/e4/5380e8229c442e406404977d2ec71a9db6a3e6a89fce7791c6ad7cd2bdbe/langsmith-0.3.8-py3-none-any.whl", hash = "sha256:fbb9dd97b0f090219447fca9362698d07abaeda1da85aa7cc6ec6517b36581b1", size = 332800 },
]
+[[package]]
+name = "linkup-sdk"
+version = "0.2.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "httpx" },
+ { name = "pydantic" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c2/c7/d9a85331bf2611ecac67f1ad92a6ced641b2e2e93eea26b17a9af701b3d1/linkup_sdk-0.2.4.tar.gz", hash = "sha256:2b8fd1894b9b4715bc14aabcbf53df6def9024f2cc426f234cc59e1807ec4c12", size = 9392 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/18/d8/bb9e01328fe5ad979e3e459c0f76321d295663906deef56eeaa5ce0cf269/linkup_sdk-0.2.4-py3-none-any.whl", hash = "sha256:8bc4c4f34de93529136a14e42441d803868d681c2bf3fd59be51923e44f1f1d4", size = 8325 },
+]
+
[[package]]
name = "litellm"
version = "1.61.4"
@@ -3078,6 +3091,7 @@ dependencies = [
{ name = "langchain-community" },
{ name = "langchain-unstructured" },
{ name = "langgraph" },
+ { name = "linkup-sdk" },
{ name = "litellm" },
{ name = "markdownify" },
{ name = "notion-client" },
@@ -3106,6 +3120,7 @@ requires-dist = [
{ name = "langchain-community", specifier = ">=0.3.17" },
{ name = "langchain-unstructured", specifier = ">=0.1.6" },
{ name = "langgraph", specifier = ">=0.3.29" },
+ { name = "linkup-sdk", specifier = ">=0.2.4" },
{ name = "litellm", specifier = ">=1.61.4" },
{ name = "markdownify", specifier = ">=0.14.1" },
{ name = "notion-client", specifier = ">=2.3.0" },
diff --git a/surfsense_web/app/dashboard/[search_space_id]/connectors/(manage)/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/connectors/(manage)/page.tsx
index 24fe6265d..af92a6ae5 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/connectors/(manage)/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/connectors/(manage)/page.tsx
@@ -46,6 +46,7 @@ const getConnectorTypeDisplay = (type: string): string => {
"NOTION_CONNECTOR": "Notion",
"GITHUB_CONNECTOR": "GitHub",
"LINEAR_CONNECTOR": "Linear",
+ "LINKUP_API": "Linkup",
// Add other connector types here as needed
};
return typeMap[type] || type;
diff --git a/surfsense_web/app/dashboard/[search_space_id]/connectors/[connector_id]/edit/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/connectors/[connector_id]/edit/page.tsx
index d41295faa..5afea12c9 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/connectors/[connector_id]/edit/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/connectors/[connector_id]/edit/page.tsx
@@ -160,6 +160,17 @@ export default function EditConnectorPage() {
/>
)}
+ {/* == Linkup == */}
+ {connector.connector_type === 'LINKUP_API' && (
+
+ )}
+