Merge pull request #1066 from MODSetter/dev_mod

feat: bumped version to 0.0.14
This commit is contained in:
Rohan Verma 2026-03-31 21:46:49 -07:00 committed by GitHub
commit 63ec4be46c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
28 changed files with 1081 additions and 323 deletions

View file

@ -447,6 +447,7 @@ async def create_surfsense_deep_agent(
deepagent_middleware = [
TodoListMiddleware(),
KnowledgeBaseSearchMiddleware(
llm=llm,
search_space_id=search_space_id,
available_connectors=available_connectors,
available_document_types=available_document_types,

View file

@ -15,14 +15,19 @@ import logging
import re
import uuid
from collections.abc import Sequence
from datetime import UTC, datetime
from typing import Any
from langchain.agents.middleware import AgentMiddleware, AgentState
from langchain_core.language_models import BaseChatModel
from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, ToolMessage
from langgraph.runtime import Runtime
from litellm import token_counter
from pydantic import BaseModel, Field, ValidationError
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.agents.new_chat.utils import parse_date_or_datetime, resolve_date_range
from app.db import NATIVE_TO_LEGACY_DOCTYPE, Document, Folder, shielded_async_session
from app.retriever.chunks_hybrid_search import ChucksHybridSearchRetriever
from app.utils.document_converters import embed_texts
@ -32,6 +37,23 @@ logger = logging.getLogger(__name__)
_perf_log = get_perf_logger()
class KBSearchPlan(BaseModel):
"""Structured internal plan for KB retrieval."""
optimized_query: str = Field(
min_length=1,
description="Optimized retrieval query preserving the user's intent.",
)
start_date: str | None = Field(
default=None,
description="Optional ISO start date or datetime for KB search filtering.",
)
end_date: str | None = Field(
default=None,
description="Optional ISO end date or datetime for KB search filtering.",
)
def _extract_text_from_message(message: BaseMessage) -> str:
"""Extract plain text from a message content."""
content = getattr(message, "content", "")
@ -61,6 +83,212 @@ def _safe_filename(value: str, *, fallback: str = "untitled.xml") -> str:
return name
def _render_recent_conversation(
messages: Sequence[BaseMessage],
*,
llm: BaseChatModel | None = None,
user_text: str = "",
max_messages: int = 6,
) -> str:
"""Render recent dialogue for internal planning under a token budget.
Prefers the latest messages and uses the project's existing model-aware
token budgeting hooks when available on the LLM (`_count_tokens`,
`_get_max_input_tokens`). Falls back to the prior fixed-message heuristic
if token counting is unavailable.
"""
rendered: list[tuple[str, str]] = []
for message in messages:
role: str | None = None
if isinstance(message, HumanMessage):
role = "user"
elif isinstance(message, AIMessage):
if getattr(message, "tool_calls", None):
continue
role = "assistant"
else:
continue
text = _extract_text_from_message(message).strip()
if not text:
continue
text = re.sub(r"\s+", " ", text)
rendered.append((role, text))
if not rendered:
return ""
# Exclude the latest user message from "recent conversation" because it is
# already passed separately as "Latest user message" in the planner prompt.
if rendered and rendered[-1][0] == "user" and rendered[-1][1] == user_text.strip():
rendered = rendered[:-1]
if not rendered:
return ""
def _legacy_render() -> str:
legacy_lines: list[str] = []
for role, text in rendered[-max_messages:]:
clipped = text[:400].rstrip() + "..." if len(text) > 400 else text
legacy_lines.append(f"{role}: {clipped}")
return "\n".join(legacy_lines)
def _count_prompt_tokens(conversation_text: str) -> int | None:
prompt = _build_kb_planner_prompt(
recent_conversation=conversation_text or "(none)",
user_text=user_text,
)
message_payload = [{"role": "user", "content": prompt}]
count_fn = getattr(llm, "_count_tokens", None) if llm is not None else None
if callable(count_fn):
try:
return count_fn(message_payload)
except Exception:
pass
profile = getattr(llm, "profile", None) if llm is not None else None
model_names: list[str] = []
if isinstance(profile, dict):
tcms = profile.get("token_count_models")
if isinstance(tcms, list):
model_names.extend(
name for name in tcms if isinstance(name, str) and name
)
tcm = profile.get("token_count_model")
if isinstance(tcm, str) and tcm and tcm not in model_names:
model_names.append(tcm)
model_name = model_names[0] if model_names else getattr(llm, "model", None)
if not isinstance(model_name, str) or not model_name:
return None
try:
return token_counter(messages=message_payload, model=model_name)
except Exception:
return None
get_max_input_tokens = getattr(llm, "_get_max_input_tokens", None) if llm else None
if callable(get_max_input_tokens):
try:
max_input_tokens = int(get_max_input_tokens())
except Exception:
max_input_tokens = None
else:
profile = getattr(llm, "profile", None) if llm is not None else None
max_input_tokens = (
profile.get("max_input_tokens")
if isinstance(profile, dict)
and isinstance(profile.get("max_input_tokens"), int)
else None
)
if not isinstance(max_input_tokens, int) or max_input_tokens <= 0:
return _legacy_render()
output_reserve = min(max(int(max_input_tokens * 0.02), 256), 1024)
budget = max_input_tokens - output_reserve
if budget <= 0:
return _legacy_render()
selected_lines: list[str] = []
for role, text in reversed(rendered):
candidate_line = f"{role}: {text}"
candidate_lines = [candidate_line, *selected_lines]
candidate_conversation = "\n".join(candidate_lines)
token_count = _count_prompt_tokens(candidate_conversation)
if token_count is None:
return _legacy_render()
if token_count <= budget:
selected_lines = candidate_lines
continue
# If the full message does not fit, keep as much of this most-recent
# older message as possible via binary search.
lo, hi = 1, len(text)
best_line: str | None = None
while lo <= hi:
mid = (lo + hi) // 2
clipped_text = text[:mid].rstrip() + "..."
clipped_line = f"{role}: {clipped_text}"
clipped_conversation = "\n".join([clipped_line, *selected_lines])
clipped_tokens = _count_prompt_tokens(clipped_conversation)
if clipped_tokens is None:
break
if clipped_tokens <= budget:
best_line = clipped_line
lo = mid + 1
else:
hi = mid - 1
if best_line is not None:
selected_lines = [best_line, *selected_lines]
break
if not selected_lines:
return _legacy_render()
return "\n".join(selected_lines)
def _build_kb_planner_prompt(
*,
recent_conversation: str,
user_text: str,
) -> str:
"""Build a compact internal prompt for KB query rewriting and date scoping."""
today = datetime.now(UTC).date().isoformat()
return (
"You optimize internal knowledge-base search inputs for document retrieval.\n"
"Return JSON only with this exact shape:\n"
'{"optimized_query":"string","start_date":"ISO string or null","end_date":"ISO string or null"}\n\n'
"Rules:\n"
"- Preserve the user's intent.\n"
"- Rewrite the query to improve retrieval using concrete entities, acronyms, projects, tools, people, and document-specific terms when helpful.\n"
"- Keep the query concise and retrieval-focused.\n"
"- Only use date filters when the latest user request or recent dialogue clearly implies a time range.\n"
"- If you use date filters, prefer returning both bounds.\n"
"- If no date filter is useful, return null for both dates.\n"
"- Do not include markdown, prose, or explanations.\n\n"
f"Today's UTC date: {today}\n\n"
f"Recent conversation:\n{recent_conversation or '(none)'}\n\n"
f"Latest user message:\n{user_text}"
)
def _extract_json_payload(text: str) -> str:
"""Extract a JSON object from a raw LLM response."""
stripped = text.strip()
fenced = re.search(r"```(?:json)?\s*(\{.*?\})\s*```", stripped, re.DOTALL)
if fenced:
return fenced.group(1)
start = stripped.find("{")
end = stripped.rfind("}")
if start != -1 and end != -1 and end > start:
return stripped[start : end + 1]
return stripped
def _parse_kb_search_plan_response(response_text: str) -> KBSearchPlan:
"""Parse and validate the planner's JSON response."""
payload = json.loads(_extract_json_payload(response_text))
return KBSearchPlan.model_validate(payload)
def _normalize_optional_date_range(
start_date: str | None,
end_date: str | None,
) -> tuple[datetime | None, datetime | None]:
"""Normalize optional planner dates into a UTC datetime range."""
parsed_start = parse_date_or_datetime(start_date) if start_date else None
parsed_end = parse_date_or_datetime(end_date) if end_date else None
if parsed_start is None and parsed_end is None:
return None, None
resolved_start, resolved_end = resolve_date_range(parsed_start, parsed_end)
return resolved_start, resolved_end
def _build_document_xml(
document: dict[str, Any],
matched_chunk_ids: set[int] | None = None,
@ -264,6 +492,8 @@ async def search_knowledge_base(
available_connectors: list[str] | None = None,
available_document_types: list[str] | None = None,
top_k: int = 10,
start_date: datetime | None = None,
end_date: datetime | None = None,
) -> list[dict[str, Any]]:
"""Run a single unified hybrid search against the knowledge base.
@ -286,6 +516,8 @@ async def search_knowledge_base(
top_k=retriever_top_k,
search_space_id=search_space_id,
document_type=doc_types,
start_date=start_date,
end_date=end_date,
query_embedding=embedding.tolist(),
)
@ -346,16 +578,71 @@ class KnowledgeBaseSearchMiddleware(AgentMiddleware): # type: ignore[type-arg]
def __init__(
self,
*,
llm: BaseChatModel | None = None,
search_space_id: int,
available_connectors: list[str] | None = None,
available_document_types: list[str] | None = None,
top_k: int = 10,
) -> None:
self.llm = llm
self.search_space_id = search_space_id
self.available_connectors = available_connectors
self.available_document_types = available_document_types
self.top_k = top_k
async def _plan_search_inputs(
self,
*,
messages: Sequence[BaseMessage],
user_text: str,
) -> tuple[str, datetime | None, datetime | None]:
"""Rewrite the KB query and infer optional date filters with the LLM."""
if self.llm is None:
return user_text, None, None
recent_conversation = _render_recent_conversation(
messages,
llm=self.llm,
user_text=user_text,
)
prompt = _build_kb_planner_prompt(
recent_conversation=recent_conversation,
user_text=user_text,
)
loop = asyncio.get_running_loop()
t0 = loop.time()
try:
response = await self.llm.ainvoke(
[HumanMessage(content=prompt)],
config={"tags": ["surfsense:internal"]},
)
plan = _parse_kb_search_plan_response(_extract_text_from_message(response))
optimized_query = (
re.sub(r"\s+", " ", plan.optimized_query).strip() or user_text
)
start_date, end_date = _normalize_optional_date_range(
plan.start_date,
plan.end_date,
)
_perf_log.info(
"[kb_fs_middleware] planner in %.3fs query=%r optimized=%r start=%s end=%s",
loop.time() - t0,
user_text[:80],
optimized_query[:120],
start_date.isoformat() if start_date else None,
end_date.isoformat() if end_date else None,
)
return optimized_query, start_date, end_date
except (json.JSONDecodeError, ValidationError, ValueError) as exc:
logger.warning(
"KB planner returned invalid output, using raw query: %s", exc
)
except Exception as exc: # pragma: no cover - defensive fallback
logger.warning("KB planner failed, using raw query: %s", exc)
return user_text, None, None
def before_agent( # type: ignore[override]
self,
state: AgentState,
@ -388,13 +675,19 @@ class KnowledgeBaseSearchMiddleware(AgentMiddleware): # type: ignore[type-arg]
t0 = _perf_log and asyncio.get_event_loop().time()
existing_files = state.get("files")
planned_query, start_date, end_date = await self._plan_search_inputs(
messages=messages,
user_text=user_text,
)
search_results = await search_knowledge_base(
query=user_text,
query=planned_query,
search_space_id=self.search_space_id,
available_connectors=self.available_connectors,
available_document_types=self.available_document_types,
top_k=self.top_k,
start_date=start_date,
end_date=end_date,
)
new_files = await build_scoped_filesystem(
documents=search_results,
@ -405,9 +698,10 @@ class KnowledgeBaseSearchMiddleware(AgentMiddleware): # type: ignore[type-arg]
if t0 is not None:
_perf_log.info(
"[kb_fs_middleware] completed in %.3fs query=%r new_files=%d total=%d",
"[kb_fs_middleware] completed in %.3fs query=%r optimized=%r new_files=%d total=%d",
asyncio.get_event_loop().time() - t0,
user_text[:80],
planned_query[:120],
len(new_files),
len(new_files) + len(existing_files or {}),
)

View file

@ -310,7 +310,7 @@ class GoogleGmailConnector:
Fetch recent messages from Gmail within specified date range.
Args:
max_results: Maximum number of messages to fetch (default: 50)
start_date: Start date in YYYY-MM-DD format (default: 30 days ago)
start_date: Start date in YYYY-MM-DD format (default: 3 days ago)
end_date: End date in YYYY-MM-DD format (default: today)
Returns:
Tuple containing (messages list with details, error message or None)
@ -334,8 +334,8 @@ class GoogleGmailConnector:
start_query = start_dt.strftime("%Y/%m/%d")
query_parts.append(f"after:{start_query}")
else:
# Default to 30 days ago
cutoff_date = datetime.now() - timedelta(days=30)
# Default to 3 days ago
cutoff_date = datetime.now() - timedelta(days=3)
date_query = cutoff_date.strftime("%Y/%m/%d")
query_parts.append(f"after:{date_query}")

View file

@ -1,6 +1,6 @@
[project]
name = "surf-new-backend"
version = "0.0.13"
version = "0.0.14"
description = "SurfSense Backend"
requires-python = ">=3.12"
dependencies = [

View file

@ -152,7 +152,9 @@ class _FakeReconciliationStripeClient:
class TestStripeCheckoutSessionCreation:
async def test_get_status_reflects_backend_toggle(self, client, headers, monkeypatch):
async def test_get_status_reflects_backend_toggle(
self, client, headers, monkeypatch
):
monkeypatch.setattr(stripe_routes.config, "STRIPE_PAGE_BUYING_ENABLED", False)
disabled_response = await client.get("/api/v1/stripe/status", headers=headers)
assert disabled_response.status_code == 200, disabled_response.text
@ -237,7 +239,9 @@ class TestStripeCheckoutSessionCreation:
)
assert response.status_code == 503, response.text
assert response.json()["detail"] == "Page purchases are temporarily unavailable."
assert (
response.json()["detail"] == "Page purchases are temporarily unavailable."
)
purchase_count = await _fetchrow("SELECT COUNT(*) AS count FROM page_purchases")
assert purchase_count is not None

View file

@ -3,7 +3,7 @@
from __future__ import annotations
import uuid
from datetime import UTC, datetime
from datetime import UTC, datetime, timedelta
import pytest_asyncio
from sqlalchemy.ext.asyncio import AsyncSession
@ -22,6 +22,7 @@ def _make_document(
content: str,
search_space_id: int,
created_by_id: str,
updated_at: datetime | None = None,
) -> Document:
uid = uuid.uuid4().hex[:12]
return Document(
@ -34,7 +35,7 @@ def _make_document(
search_space_id=search_space_id,
created_by_id=created_by_id,
embedding=DUMMY_EMBEDDING,
updated_at=datetime.now(UTC),
updated_at=updated_at or datetime.now(UTC),
status={"state": "ready"},
)
@ -104,3 +105,54 @@ async def seed_large_doc(
"search_space": db_search_space,
"user": db_user,
}
@pytest_asyncio.fixture
async def seed_date_filtered_docs(
db_session: AsyncSession, db_user: User, db_search_space: SearchSpace
):
"""Insert matching docs with different timestamps for date-filter tests."""
user_id = str(db_user.id)
space_id = db_search_space.id
now = datetime.now(UTC)
recent_doc = _make_document(
title="Recent OCV Notes",
document_type=DocumentType.FILE,
content="ocv meeting decisions and action items",
search_space_id=space_id,
created_by_id=user_id,
updated_at=now,
)
old_doc = _make_document(
title="Old OCV Notes",
document_type=DocumentType.FILE,
content="ocv meeting decisions and action items",
search_space_id=space_id,
created_by_id=user_id,
updated_at=now - timedelta(days=730),
)
db_session.add_all([recent_doc, old_doc])
await db_session.flush()
db_session.add_all(
[
_make_chunk(
content="ocv meeting decisions and action items recent",
document_id=recent_doc.id,
),
_make_chunk(
content="ocv meeting decisions and action items old",
document_id=old_doc.id,
),
]
)
await db_session.flush()
return {
"recent_doc": recent_doc,
"old_doc": old_doc,
"search_space": db_search_space,
"user": db_user,
}

View file

@ -0,0 +1,62 @@
"""Integration smoke tests for KB search query/date scoping."""
from __future__ import annotations
from contextlib import asynccontextmanager
from datetime import UTC, datetime, timedelta
import numpy as np
import pytest
from app.agents.new_chat.middleware.knowledge_search import search_knowledge_base
from .conftest import DUMMY_EMBEDDING
pytestmark = pytest.mark.integration
async def test_search_knowledge_base_applies_date_filters(
db_session,
seed_date_filtered_docs,
monkeypatch,
):
"""Date filters should remove older matching documents from scoped KB results."""
@asynccontextmanager
async def fake_shielded_async_session():
yield db_session
monkeypatch.setattr(
"app.agents.new_chat.middleware.knowledge_search.shielded_async_session",
fake_shielded_async_session,
)
monkeypatch.setattr(
"app.agents.new_chat.middleware.knowledge_search.embed_texts",
lambda texts: [np.array(DUMMY_EMBEDDING) for _ in texts],
)
space_id = seed_date_filtered_docs["search_space"].id
recent_cutoff = datetime.now(UTC) - timedelta(days=30)
unfiltered_results = await search_knowledge_base(
query="ocv meeting decisions",
search_space_id=space_id,
available_document_types=["FILE"],
top_k=10,
)
filtered_results = await search_knowledge_base(
query="ocv meeting decisions",
search_space_id=space_id,
available_document_types=["FILE"],
top_k=10,
start_date=recent_cutoff,
end_date=datetime.now(UTC),
)
unfiltered_ids = {result["document"]["id"] for result in unfiltered_results}
filtered_ids = {result["document"]["id"] for result in filtered_results}
assert seed_date_filtered_docs["recent_doc"].id in unfiltered_ids
assert seed_date_filtered_docs["old_doc"].id in unfiltered_ids
assert seed_date_filtered_docs["recent_doc"].id in filtered_ids
assert seed_date_filtered_docs["old_doc"].id not in filtered_ids

View file

@ -1,12 +1,16 @@
"""Unit tests for knowledge_search middleware helpers.
"""Unit tests for knowledge_search middleware helpers."""
These test pure functions that don't require a database.
"""
import json
import pytest
from langchain_core.messages import AIMessage, HumanMessage
from app.agents.new_chat.middleware.knowledge_search import (
KnowledgeBaseSearchMiddleware,
_build_document_xml,
_normalize_optional_date_range,
_parse_kb_search_plan_response,
_render_recent_conversation,
_resolve_search_types,
)
@ -131,3 +135,234 @@ class TestBuildDocumentXml:
line for line in lines if "<![CDATA[" in line and "<chunk" in line
]
assert len(chunk_lines) == 3
# ── planner parsing / date normalization ───────────────────────────────
class TestPlannerHelpers:
def test_parse_kb_search_plan_response_accepts_plain_json(self):
plan = _parse_kb_search_plan_response(
json.dumps(
{
"optimized_query": "ocv meeting decisions summary",
"start_date": "2026-03-01",
"end_date": "2026-03-31",
}
)
)
assert plan.optimized_query == "ocv meeting decisions summary"
assert plan.start_date == "2026-03-01"
assert plan.end_date == "2026-03-31"
def test_parse_kb_search_plan_response_accepts_fenced_json(self):
plan = _parse_kb_search_plan_response(
"""```json
{"optimized_query":"deel founders guide","start_date":null,"end_date":null}
```"""
)
assert plan.optimized_query == "deel founders guide"
assert plan.start_date is None
assert plan.end_date is None
def test_normalize_optional_date_range_returns_none_when_absent(self):
start_date, end_date = _normalize_optional_date_range(None, None)
assert start_date is None
assert end_date is None
def test_normalize_optional_date_range_resolves_single_bound(self):
start_date, end_date = _normalize_optional_date_range("2026-03-01", None)
assert start_date is not None
assert end_date is not None
assert start_date.date().isoformat() == "2026-03-01"
assert end_date >= start_date
class FakeLLM:
def __init__(self, response_text: str):
self.response_text = response_text
self.calls: list[dict] = []
async def ainvoke(self, messages, config=None):
self.calls.append({"messages": messages, "config": config})
return AIMessage(content=self.response_text)
class FakeBudgetLLM:
def __init__(self, *, max_input_tokens: int):
self._max_input_tokens_value = max_input_tokens
def _get_max_input_tokens(self) -> int:
return self._max_input_tokens_value
def _count_tokens(self, messages) -> int:
# Deterministic, simple proxy for tests: count characters as tokens.
return sum(len(msg.get("content", "")) for msg in messages)
class TestKnowledgeBaseSearchMiddlewarePlanner:
def test_render_recent_conversation_prefers_latest_messages_under_budget(self):
messages = [
HumanMessage(content="old user context " * 40),
AIMessage(content="old assistant answer " * 35),
HumanMessage(content="recent user context " * 20),
AIMessage(content="recent assistant answer " * 18),
HumanMessage(content="latest question"),
]
rendered = _render_recent_conversation(
messages,
llm=FakeBudgetLLM(max_input_tokens=900),
user_text="latest question",
)
assert "recent user context" in rendered
assert "recent assistant answer" in rendered
assert "latest question" not in rendered
assert rendered.index("recent user context") < rendered.index(
"recent assistant answer"
)
def test_render_recent_conversation_falls_back_to_legacy_without_budgeting(self):
messages = [
HumanMessage(content="message one"),
AIMessage(content="message two"),
HumanMessage(content="latest question"),
]
rendered = _render_recent_conversation(
messages,
llm=None,
user_text="latest question",
)
assert "user: message one" in rendered
assert "assistant: message two" in rendered
assert "latest question" not in rendered
async def test_middleware_uses_optimized_query_and_dates(self, monkeypatch):
captured: dict = {}
async def fake_search_knowledge_base(**kwargs):
captured.update(kwargs)
return []
async def fake_build_scoped_filesystem(**kwargs):
return {}
monkeypatch.setattr(
"app.agents.new_chat.middleware.knowledge_search.search_knowledge_base",
fake_search_knowledge_base,
)
monkeypatch.setattr(
"app.agents.new_chat.middleware.knowledge_search.build_scoped_filesystem",
fake_build_scoped_filesystem,
)
llm = FakeLLM(
json.dumps(
{
"optimized_query": "ocv meeting decisions action items",
"start_date": "2026-03-01",
"end_date": "2026-03-31",
}
)
)
middleware = KnowledgeBaseSearchMiddleware(llm=llm, search_space_id=37)
result = await middleware.abefore_agent(
{
"messages": [
HumanMessage(content="what happened in our OCV meeting last month?")
]
},
runtime=None,
)
assert result is not None
assert captured["query"] == "ocv meeting decisions action items"
assert captured["start_date"] is not None
assert captured["end_date"] is not None
assert captured["start_date"].date().isoformat() == "2026-03-01"
assert captured["end_date"].date().isoformat() == "2026-03-31"
assert llm.calls[0]["config"] == {"tags": ["surfsense:internal"]}
async def test_middleware_falls_back_when_planner_returns_invalid_json(
self,
monkeypatch,
):
captured: dict = {}
async def fake_search_knowledge_base(**kwargs):
captured.update(kwargs)
return []
async def fake_build_scoped_filesystem(**kwargs):
return {}
monkeypatch.setattr(
"app.agents.new_chat.middleware.knowledge_search.search_knowledge_base",
fake_search_knowledge_base,
)
monkeypatch.setattr(
"app.agents.new_chat.middleware.knowledge_search.build_scoped_filesystem",
fake_build_scoped_filesystem,
)
middleware = KnowledgeBaseSearchMiddleware(
llm=FakeLLM("not json"),
search_space_id=37,
)
await middleware.abefore_agent(
{"messages": [HumanMessage(content="summarize founders guide by deel")]},
runtime=None,
)
assert captured["query"] == "summarize founders guide by deel"
assert captured["start_date"] is None
assert captured["end_date"] is None
async def test_middleware_passes_none_dates_when_planner_returns_nulls(
self,
monkeypatch,
):
captured: dict = {}
async def fake_search_knowledge_base(**kwargs):
captured.update(kwargs)
return []
async def fake_build_scoped_filesystem(**kwargs):
return {}
monkeypatch.setattr(
"app.agents.new_chat.middleware.knowledge_search.search_knowledge_base",
fake_search_knowledge_base,
)
monkeypatch.setattr(
"app.agents.new_chat.middleware.knowledge_search.build_scoped_filesystem",
fake_build_scoped_filesystem,
)
middleware = KnowledgeBaseSearchMiddleware(
llm=FakeLLM(
json.dumps(
{
"optimized_query": "deel founders guide summary",
"start_date": None,
"end_date": None,
}
)
),
search_space_id=37,
)
await middleware.abefore_agent(
{"messages": [HumanMessage(content="summarize founders guide by deel")]},
runtime=None,
)
assert captured["query"] == "deel founders guide summary"
assert captured["start_date"] is None
assert captured["end_date"] is None

View file

@ -7933,7 +7933,7 @@ wheels = [
[[package]]
name = "surf-new-backend"
version = "0.0.13"
version = "0.0.14"
source = { editable = "." }
dependencies = [
{ name = "alembic" },

View file

@ -1,7 +1,7 @@
{
"name": "surfsense_browser_extension",
"displayName": "Surfsense Browser Extension",
"version": "0.0.13",
"version": "0.0.14",
"description": "Extension to collect Browsing History for SurfSense.",
"author": "https://github.com/MODSetter",
"engines": {

View file

@ -4,7 +4,14 @@ import { CircleSlash2 } from "lucide-react";
import Link from "next/link";
import { useParams } from "next/navigation";
import { Button } from "@/components/ui/button";
import { Card, CardContent, CardDescription, CardFooter, CardHeader, CardTitle } from "@/components/ui/card";
import {
Card,
CardContent,
CardDescription,
CardFooter,
CardHeader,
CardTitle,
} from "@/components/ui/card";
export default function PurchaseCancelPage() {
const params = useParams();
@ -16,7 +23,9 @@ export default function PurchaseCancelPage() {
<CardHeader className="text-center">
<CircleSlash2 className="mx-auto h-10 w-10 text-muted-foreground" />
<CardTitle className="text-2xl">Checkout canceled</CardTitle>
<CardDescription>No charge was made and your current pages are unchanged.</CardDescription>
<CardDescription>
No charge was made and your current pages are unchanged.
</CardDescription>
</CardHeader>
<CardContent className="text-center text-sm text-muted-foreground">
You can return to the pricing options and try again whenever you&apos;re ready.

View file

@ -7,7 +7,14 @@ import { useParams } from "next/navigation";
import { useEffect } from "react";
import { USER_QUERY_KEY } from "@/atoms/user/user-query.atoms";
import { Button } from "@/components/ui/button";
import { Card, CardContent, CardDescription, CardFooter, CardHeader, CardTitle } from "@/components/ui/card";
import {
Card,
CardContent,
CardDescription,
CardFooter,
CardHeader,
CardTitle,
} from "@/components/ui/card";
export default function PurchaseSuccessPage() {
const params = useParams();

View file

@ -17,9 +17,18 @@ import { stripeApiService } from "@/lib/apis/stripe-api.service";
import { cn } from "@/lib/utils";
const STATUS_STYLES: Record<PagePurchaseStatus, { label: string; className: string }> = {
completed: { label: "Completed", className: "bg-emerald-600 text-white border-transparent hover:bg-emerald-600" },
pending: { label: "Pending", className: "bg-yellow-600 text-white border-transparent hover:bg-yellow-600" },
failed: { label: "Failed", className: "bg-destructive text-white border-transparent hover:bg-destructive" },
completed: {
label: "Completed",
className: "bg-emerald-600 text-white border-transparent hover:bg-emerald-600",
},
pending: {
label: "Pending",
className: "bg-yellow-600 text-white border-transparent hover:bg-yellow-600",
},
failed: {
label: "Failed",
className: "bg-destructive text-white border-transparent hover:bg-destructive",
},
};
function formatDate(iso: string): string {
@ -82,9 +91,7 @@ export function PurchaseHistoryContent() {
const style = STATUS_STYLES[p.status];
return (
<TableRow key={p.id}>
<TableCell className="text-sm">
{formatDate(p.created_at)}
</TableCell>
<TableCell className="text-sm">{formatDate(p.created_at)}</TableCell>
<TableCell className="text-right tabular-nums text-sm">
{p.pages_granted.toLocaleString()}
</TableCell>
@ -92,9 +99,7 @@ export function PurchaseHistoryContent() {
{formatAmount(p)}
</TableCell>
<TableCell className="text-center">
<Badge className={cn("text-[10px]", style.className)}>
{style.label}
</Badge>
<Badge className={cn("text-[10px]", style.className)}>{style.label}</Badge>
</TableCell>
</TableRow>
);

View file

@ -13,6 +13,24 @@ import {
} from "../constants/connector-constants";
import { getDocumentCountForConnector } from "../utils/connector-document-mapping";
type OAuthConnector = (typeof OAUTH_CONNECTORS)[number];
type ComposioConnector = (typeof COMPOSIO_CONNECTORS)[number];
type OtherConnector = (typeof OTHER_CONNECTORS)[number];
type CrawlerConnector = (typeof CRAWLERS)[number];
const DOCUMENT_FILE_CONNECTOR_TYPES = new Set<string>([
EnumConnectorName.GOOGLE_DRIVE_CONNECTOR,
EnumConnectorName.COMPOSIO_GOOGLE_DRIVE_CONNECTOR,
EnumConnectorName.ONEDRIVE_CONNECTOR,
EnumConnectorName.DROPBOX_CONNECTOR,
]);
const OTHER_DOCUMENT_CONNECTOR_TYPES = new Set<string>([
EnumConnectorName.YOUTUBE_CONNECTOR,
EnumConnectorName.NOTION_CONNECTOR,
EnumConnectorName.AIRTABLE_CONNECTOR,
]);
/**
* Extract the display name from a full connector name.
* Full names are in format "Base Name - identifier" (e.g., "Gmail - john@example.com").
@ -34,9 +52,7 @@ interface AllConnectorsTabProps {
allConnectors: SearchSourceConnector[] | undefined;
documentTypeCounts?: Record<string, number>;
indexingConnectorIds?: Set<number>;
onConnectOAuth: (
connector: (typeof OAUTH_CONNECTORS)[number] | (typeof COMPOSIO_CONNECTORS)[number]
) => void;
onConnectOAuth: (connector: OAuthConnector | ComposioConnector) => void;
onConnectNonOAuth?: (connectorType: string) => void;
onCreateWebcrawler?: () => void;
onCreateYouTubeCrawler?: () => void;
@ -92,241 +108,220 @@ export const AllConnectorsTab: FC<AllConnectorsTabProps> = ({
c.description.toLowerCase().includes(searchQuery.toLowerCase())
);
const nativeGoogleDriveConnectors = filteredOAuth.filter(
(c) => c.connectorType === EnumConnectorName.GOOGLE_DRIVE_CONNECTOR
);
const composioGoogleDriveConnectors = filteredComposio.filter(
(c) => c.connectorType === EnumConnectorName.COMPOSIO_GOOGLE_DRIVE_CONNECTOR
);
const fileStorageConnectors = filteredOAuth.filter(
(c) =>
c.connectorType === EnumConnectorName.ONEDRIVE_CONNECTOR ||
c.connectorType === EnumConnectorName.DROPBOX_CONNECTOR
);
const otherDocumentYouTubeConnectors = filteredCrawlers.filter(
(c) => c.connectorType === EnumConnectorName.YOUTUBE_CONNECTOR
);
const otherDocumentNotionConnectors = filteredOAuth.filter(
(c) => c.connectorType === EnumConnectorName.NOTION_CONNECTOR
);
const otherDocumentAirtableConnectors = filteredOAuth.filter(
(c) => c.connectorType === EnumConnectorName.AIRTABLE_CONNECTOR
);
const moreIntegrationsComposio = filteredComposio.filter(
(c) =>
!DOCUMENT_FILE_CONNECTOR_TYPES.has(c.connectorType) &&
!OTHER_DOCUMENT_CONNECTOR_TYPES.has(c.connectorType)
);
const moreIntegrationsOAuth = filteredOAuth.filter(
(c) =>
!DOCUMENT_FILE_CONNECTOR_TYPES.has(c.connectorType) &&
!OTHER_DOCUMENT_CONNECTOR_TYPES.has(c.connectorType)
);
const moreIntegrationsOther = filteredOther;
const moreIntegrationsCrawlers = filteredCrawlers.filter(
(c) =>
!c.connectorType ||
(!DOCUMENT_FILE_CONNECTOR_TYPES.has(c.connectorType) &&
!OTHER_DOCUMENT_CONNECTOR_TYPES.has(c.connectorType))
);
const renderOAuthCard = (connector: OAuthConnector | ComposioConnector) => {
const isConnected = connectedTypes.has(connector.connectorType);
const isConnecting = connectingId === connector.id;
const typeConnectors =
isConnected && allConnectors
? allConnectors.filter(
(c: SearchSourceConnector) => c.connector_type === connector.connectorType
)
: [];
const accountCount = typeConnectors.length;
const documentCount = getDocumentCountForConnector(connector.connectorType, documentTypeCounts);
const isIndexing = typeConnectors.some((c) => indexingConnectorIds?.has(c.id));
return (
<ConnectorCard
key={connector.id}
id={connector.id}
title={connector.title}
description={connector.description}
connectorType={connector.connectorType}
isConnected={isConnected}
isConnecting={isConnecting}
documentCount={documentCount}
accountCount={accountCount}
isIndexing={isIndexing}
onConnect={() => onConnectOAuth(connector)}
onManage={
isConnected && onViewAccountsList
? () => onViewAccountsList(connector.connectorType, connector.title)
: undefined
}
/>
);
};
const renderOtherCard = (connector: OtherConnector) => {
const isConnected = connectedTypes.has(connector.connectorType);
const isConnecting = connectingId === connector.id;
const actualConnector =
isConnected && allConnectors
? allConnectors.find(
(c: SearchSourceConnector) => c.connector_type === connector.connectorType
)
: undefined;
const documentCount = getDocumentCountForConnector(connector.connectorType, documentTypeCounts);
const isIndexing = actualConnector && indexingConnectorIds?.has(actualConnector.id);
const isMCP = connector.connectorType === EnumConnectorName.MCP_CONNECTOR;
const mcpConnectorCount =
isMCP && allConnectors
? allConnectors.filter(
(c: SearchSourceConnector) => c.connector_type === EnumConnectorName.MCP_CONNECTOR
).length
: undefined;
const handleConnect = onConnectNonOAuth
? () => onConnectNonOAuth(connector.connectorType)
: () => {};
return (
<ConnectorCard
key={connector.id}
id={connector.id}
title={connector.title}
description={connector.description}
connectorType={connector.connectorType}
isConnected={isConnected}
isConnecting={isConnecting}
documentCount={documentCount}
connectorCount={mcpConnectorCount}
isIndexing={isIndexing}
onConnect={handleConnect}
onManage={actualConnector && onManage ? () => onManage(actualConnector) : undefined}
/>
);
};
const renderCrawlerCard = (crawler: CrawlerConnector) => {
const isYouTube = crawler.id === "youtube-crawler";
const isWebcrawler = crawler.id === "webcrawler-connector";
const isConnected = crawler.connectorType ? connectedTypes.has(crawler.connectorType) : false;
const isConnecting = connectingId === crawler.id;
const actualConnector =
isConnected && crawler.connectorType && allConnectors
? allConnectors.find(
(c: SearchSourceConnector) => c.connector_type === crawler.connectorType
)
: undefined;
const documentCount = crawler.connectorType
? getDocumentCountForConnector(crawler.connectorType, documentTypeCounts)
: undefined;
const isIndexing = actualConnector && indexingConnectorIds?.has(actualConnector.id);
const handleConnect =
isYouTube && onCreateYouTubeCrawler
? onCreateYouTubeCrawler
: isWebcrawler && onCreateWebcrawler
? onCreateWebcrawler
: crawler.connectorType && onConnectNonOAuth
? () => {
if (crawler.connectorType) {
onConnectNonOAuth(crawler.connectorType);
}
}
: () => {};
return (
<ConnectorCard
key={crawler.id}
id={crawler.id}
title={crawler.title}
description={crawler.description}
connectorType={crawler.connectorType || undefined}
isConnected={isConnected}
isConnecting={isConnecting}
documentCount={documentCount}
isIndexing={isIndexing}
onConnect={handleConnect}
onManage={actualConnector && onManage ? () => onManage(actualConnector) : undefined}
/>
);
};
const hasDocumentFileConnectors =
nativeGoogleDriveConnectors.length > 0 ||
composioGoogleDriveConnectors.length > 0 ||
fileStorageConnectors.length > 0;
const hasMoreIntegrations =
otherDocumentYouTubeConnectors.length > 0 ||
otherDocumentNotionConnectors.length > 0 ||
otherDocumentAirtableConnectors.length > 0 ||
moreIntegrationsComposio.length > 0 ||
moreIntegrationsOAuth.length > 0 ||
moreIntegrationsOther.length > 0 ||
moreIntegrationsCrawlers.length > 0;
return (
<div className="space-y-8">
{/* Managed OAuth (Composio Integrations) */}
{filteredComposio.length > 0 && (
{/* Document/Files Connectors */}
{hasDocumentFileConnectors && (
<section>
<div className="flex items-center gap-2 mb-4">
<h3 className="text-sm font-semibold text-muted-foreground">
Managed OAuth (Composio)
Document/Files Connectors
</h3>
</div>
<div className="grid grid-cols-1 sm:grid-cols-2 gap-3">
{filteredComposio.map((connector) => {
const isConnected = connectedTypes.has(connector.connectorType);
const isConnecting = connectingId === connector.id;
// Find all connectors of this type
const typeConnectors =
isConnected && allConnectors
? allConnectors.filter(
(c: SearchSourceConnector) => c.connector_type === connector.connectorType
)
: [];
const accountCount = typeConnectors.length;
const documentCount = getDocumentCountForConnector(
connector.connectorType,
documentTypeCounts
);
// Check if any account is currently indexing
const isIndexing = typeConnectors.some((c) => indexingConnectorIds?.has(c.id));
return (
<ConnectorCard
key={connector.id}
id={connector.id}
title={connector.title}
description={connector.description}
connectorType={connector.connectorType}
isConnected={isConnected}
isConnecting={isConnecting}
documentCount={documentCount}
accountCount={accountCount}
isIndexing={isIndexing}
onConnect={() => onConnectOAuth(connector)}
onManage={
isConnected && onViewAccountsList
? () => onViewAccountsList(connector.connectorType, connector.title)
: undefined
}
/>
);
})}
</div>
</section>
)}
{/* Quick Connect */}
{filteredOAuth.length > 0 && (
<section>
<div className="flex items-center gap-2 mb-4">
<h3 className="text-sm font-semibold text-muted-foreground">Quick Connect</h3>
</div>
<div className="grid grid-cols-1 sm:grid-cols-2 gap-3">
{filteredOAuth.map((connector) => {
const isConnected = connectedTypes.has(connector.connectorType);
const isConnecting = connectingId === connector.id;
// Find all connectors of this type
const typeConnectors =
isConnected && allConnectors
? allConnectors.filter(
(c: SearchSourceConnector) => c.connector_type === connector.connectorType
)
: [];
const accountCount = typeConnectors.length;
const documentCount = getDocumentCountForConnector(
connector.connectorType,
documentTypeCounts
);
// Check if any account is currently indexing
const isIndexing = typeConnectors.some((c) => indexingConnectorIds?.has(c.id));
return (
<ConnectorCard
key={connector.id}
id={connector.id}
title={connector.title}
description={connector.description}
connectorType={connector.connectorType}
isConnected={isConnected}
isConnecting={isConnecting}
documentCount={documentCount}
accountCount={accountCount}
isIndexing={isIndexing}
onConnect={() => onConnectOAuth(connector)}
onManage={
isConnected && onViewAccountsList
? () => onViewAccountsList(connector.connectorType, connector.title)
: undefined
}
/>
);
})}
{nativeGoogleDriveConnectors.map(renderOAuthCard)}
{composioGoogleDriveConnectors.map(renderOAuthCard)}
{fileStorageConnectors.map(renderOAuthCard)}
</div>
</section>
)}
{/* More Integrations */}
{filteredOther.length > 0 && (
{hasMoreIntegrations && (
<section>
<div className="flex items-center gap-2 mb-4">
<h3 className="text-sm font-semibold text-muted-foreground">More Integrations</h3>
</div>
<div className="grid grid-cols-1 sm:grid-cols-2 gap-3">
{filteredOther.map((connector) => {
const isConnected = connectedTypes.has(connector.connectorType);
const isConnecting = connectingId === connector.id;
// Find the actual connector object if connected
const actualConnector =
isConnected && allConnectors
? allConnectors.find(
(c: SearchSourceConnector) => c.connector_type === connector.connectorType
)
: undefined;
const documentCount = getDocumentCountForConnector(
connector.connectorType,
documentTypeCounts
);
const isIndexing = actualConnector && indexingConnectorIds?.has(actualConnector.id);
// For MCP connectors, count total MCP connectors instead of document count
const isMCP = connector.connectorType === EnumConnectorName.MCP_CONNECTOR;
const mcpConnectorCount =
isMCP && allConnectors
? allConnectors.filter(
(c: SearchSourceConnector) =>
c.connector_type === EnumConnectorName.MCP_CONNECTOR
).length
: undefined;
const handleConnect = onConnectNonOAuth
? () => onConnectNonOAuth(connector.connectorType)
: () => {}; // Fallback - connector popup should handle all connector types
return (
<ConnectorCard
key={connector.id}
id={connector.id}
title={connector.title}
description={connector.description}
connectorType={connector.connectorType}
isConnected={isConnected}
isConnecting={isConnecting}
documentCount={documentCount}
connectorCount={mcpConnectorCount}
isIndexing={isIndexing}
onConnect={handleConnect}
onManage={
actualConnector && onManage ? () => onManage(actualConnector) : undefined
}
/>
);
})}
</div>
</section>
)}
{/* Content Sources */}
{filteredCrawlers.length > 0 && (
<section>
<div className="flex items-center gap-2 mb-4">
<h3 className="text-sm font-semibold text-muted-foreground">Content Sources</h3>
</div>
<div className="grid grid-cols-1 sm:grid-cols-2 gap-3">
{filteredCrawlers.map((crawler) => {
const isYouTube = crawler.id === "youtube-crawler";
const isWebcrawler = crawler.id === "webcrawler-connector";
// For crawlers that are actual connectors, check connection status
const isConnected = crawler.connectorType
? connectedTypes.has(crawler.connectorType)
: false;
const isConnecting = connectingId === crawler.id;
// Find the actual connector object if connected
const actualConnector =
isConnected && crawler.connectorType && allConnectors
? allConnectors.find(
(c: SearchSourceConnector) => c.connector_type === crawler.connectorType
)
: undefined;
const documentCount = crawler.connectorType
? getDocumentCountForConnector(crawler.connectorType, documentTypeCounts)
: undefined;
const isIndexing = actualConnector && indexingConnectorIds?.has(actualConnector.id);
const handleConnect =
isYouTube && onCreateYouTubeCrawler
? onCreateYouTubeCrawler
: isWebcrawler && onCreateWebcrawler
? onCreateWebcrawler
: crawler.connectorType && onConnectNonOAuth
? () => {
if (crawler.connectorType) {
onConnectNonOAuth(crawler.connectorType);
}
}
: () => {}; // Fallback for non-connector crawlers
return (
<ConnectorCard
key={crawler.id}
id={crawler.id}
title={crawler.title}
description={crawler.description}
connectorType={crawler.connectorType || undefined}
isConnected={isConnected}
isConnecting={isConnecting}
documentCount={documentCount}
isIndexing={isIndexing}
onConnect={handleConnect}
onManage={
actualConnector && onManage ? () => onManage(actualConnector) : undefined
}
/>
);
})}
{otherDocumentYouTubeConnectors.map(renderCrawlerCard)}
{otherDocumentNotionConnectors.map(renderOAuthCard)}
{otherDocumentAirtableConnectors.map(renderOAuthCard)}
{moreIntegrationsComposio.map(renderOAuthCard)}
{moreIntegrationsOAuth.map(renderOAuthCard)}
{moreIntegrationsOther.map(renderOtherCard)}
{moreIntegrationsCrawlers.map(renderCrawlerCard)}
</div>
</section>
)}

View file

@ -153,7 +153,7 @@ export const DocumentNode = React.memo(function DocumentNode({
<Clock className="h-3.5 w-3.5 text-muted-foreground/60" />
</span>
</TooltipTrigger>
<TooltipContent side="top">Pending - waiting to be synced</TooltipContent>
<TooltipContent side="top">Pending waiting to be synced</TooltipContent>
</Tooltip>
);
}

View file

@ -51,7 +51,6 @@ import {
import { Input } from "@/components/ui/input";
import { Spinner } from "@/components/ui/spinner";
import { useAnnouncements } from "@/hooks/use-announcements";
import { useDocumentsProcessing } from "@/hooks/use-documents-processing";
import { useInbox } from "@/hooks/use-inbox";
import { useIsMobile } from "@/hooks/use-mobile";
import { notificationsApiService } from "@/lib/apis/notifications-api.service";
@ -194,9 +193,6 @@ export function LayoutDataProvider({ searchSpaceId, children }: LayoutDataProvid
setStatusInboxItems(statusInbox.inboxItems);
}, [statusInbox.inboxItems, setStatusInboxItems]);
// Document processing status — drives sidebar status indicator (spinner / check / error)
const documentsProcessingStatus = useDocumentsProcessing(numericSpaceId);
// Track seen notification IDs to detect new page_limit_exceeded notifications
const seenPageLimitNotifications = useRef<Set<number>>(new Set());
const isInitialLoad = useRef(true);
@ -366,7 +362,6 @@ export function LayoutDataProvider({ searchSpaceId, children }: LayoutDataProvid
isActive: isMobile
? isDocumentsSidebarOpen
: isDocumentsSidebarOpen && !isRightPanelCollapsed,
statusIndicator: documentsProcessingStatus,
},
{
title: "Announcements",
@ -384,7 +379,6 @@ export function LayoutDataProvider({ searchSpaceId, children }: LayoutDataProvid
totalUnreadCount,
isAnnouncementsSidebarOpen,
announcementUnreadCount,
documentsProcessingStatus,
]
);

View file

@ -3,18 +3,13 @@
import { useAtomValue } from "jotai";
import { usePathname } from "next/navigation";
import { currentThreadAtom } from "@/atoms/chat/current-thread.atom";
import { hitlEditPanelAtom } from "@/atoms/chat/hitl-edit-panel.atom";
import { reportPanelAtom } from "@/atoms/chat/report-panel.atom";
import { documentsSidebarOpenAtom } from "@/atoms/documents/ui.atoms";
import { editorPanelAtom } from "@/atoms/editor/editor-panel.atom";
import { rightPanelCollapsedAtom } from "@/atoms/layout/right-panel.atom";
import { activeSearchSpaceIdAtom } from "@/atoms/search-spaces/search-space-query.atoms";
import { activeTabAtom, tabsAtom } from "@/atoms/tabs/tabs.atom";
import { ChatHeader } from "@/components/new-chat/chat-header";
import { ChatShareButton } from "@/components/new-chat/chat-share-button";
import { useIsMobile } from "@/hooks/use-mobile";
import type { ChatVisibility, ThreadRecord } from "@/lib/chat/thread-persistence";
import { cn } from "@/lib/utils";
import { RightPanelExpandButton } from "../right-panel/RightPanel";
interface HeaderProps {
mobileMenuTrigger?: React.ReactNode;
@ -26,19 +21,9 @@ export function Header({ mobileMenuTrigger }: HeaderProps) {
const isMobile = useIsMobile();
const activeTab = useAtomValue(activeTabAtom);
const tabs = useAtomValue(tabsAtom);
const collapsed = useAtomValue(rightPanelCollapsedAtom);
const documentsOpen = useAtomValue(documentsSidebarOpenAtom);
const reportState = useAtomValue(reportPanelAtom);
const editorState = useAtomValue(editorPanelAtom);
const hitlEditState = useAtomValue(hitlEditPanelAtom);
const isChatPage = pathname?.includes("/new-chat") ?? false;
const isDocumentTab = activeTab?.type === "document";
const reportOpen = reportState.isOpen && !!reportState.reportId;
const editorOpen = editorState.isOpen && !!editorState.documentId;
const hitlEditOpen = hitlEditState.isOpen && !!hitlEditState.onSave;
const showExpandButton =
!isMobile && collapsed && (documentsOpen || reportOpen || editorOpen || hitlEditOpen);
const hasTabBar = tabs.length > 1;
const currentThreadState = useAtomValue(currentThreadAtom);
@ -72,12 +57,11 @@ export function Header({ mobileMenuTrigger }: HeaderProps) {
</div>
{/* Right side - Actions */}
<div
className={cn("ml-auto flex items-center gap-2", showExpandButton && !hasTabBar && "mr-10")}
>
<div className="ml-auto flex items-center gap-2">
{hasThread && (
<ChatShareButton thread={threadForButton} onVisibilityChange={handleVisibilityChange} />
)}
{!isMobile && !hasTabBar && <RightPanelExpandButton />}
</div>
</header>
);

View file

@ -21,7 +21,6 @@ import type { DocumentNodeDoc } from "@/components/documents/DocumentNode";
import type { FolderDisplay } from "@/components/documents/FolderNode";
import { FolderPickerDialog } from "@/components/documents/FolderPickerDialog";
import { FolderTreeView } from "@/components/documents/FolderTreeView";
import { MarkdownViewer } from "@/components/markdown-viewer";
import { EXPORT_FILE_EXTENSIONS } from "@/components/shared/ExportMenuItems";
import {
AlertDialog,

View file

@ -1,6 +1,6 @@
"use client";
import { CheckCircle2, CircleAlert } from "lucide-react";
import { CheckCircle2, CircleAlert, RefreshCw } from "lucide-react";
import { Spinner } from "@/components/ui/spinner";
import { cn } from "@/lib/utils";
import type { NavItem } from "../../types/layout.types";
@ -12,6 +12,46 @@ interface NavSectionProps {
isCollapsed?: boolean;
}
function getStatusInfo(status: NavItem["statusIndicator"]) {
switch (status) {
case "processing":
return {
tooltip: "New or updated documents are still being prepared for search.",
};
case "background_sync":
return {
pillLabel: "Background sync",
tooltip:
"Periodic sync is checking for updates in the background. Existing documents stay searchable while this runs.",
};
case "success":
return {
tooltip: "All document updates are fully synced.",
};
case "error":
return {
pillLabel: "Needs attention",
tooltip: "Some documents failed to sync. Open Documents or Inbox for details.",
};
default:
return {};
}
}
function StatusPill({ status }: { status: NavItem["statusIndicator"] }) {
const { pillLabel } = getStatusInfo(status);
if (!pillLabel) {
return null;
}
return (
<span className="inline-flex items-center rounded-full border border-border/60 bg-background/60 px-2 py-0.5 text-[10px] font-medium text-muted-foreground">
{pillLabel}
</span>
);
}
function StatusBadge({ status }: { status: NavItem["statusIndicator"] }) {
if (status === "processing") {
return (
@ -20,6 +60,13 @@ function StatusBadge({ status }: { status: NavItem["statusIndicator"] }) {
</span>
);
}
if (status === "background_sync") {
return (
<span className="absolute top-0.5 right-0.5 inline-flex items-center justify-center h-[14px] w-[14px] rounded-full bg-primary/15">
<RefreshCw className="h-[9px] w-[9px] text-primary animate-[spin_3s_linear_infinite]" />
</span>
);
}
if (status === "success") {
return (
<span className="absolute top-0.5 right-0.5 inline-flex items-center justify-center h-[14px] w-[14px] rounded-full bg-emerald-500/15 animate-in fade-in duration-300">
@ -49,6 +96,13 @@ function StatusIcon({
if (status === "processing") {
return <Spinner size="sm" className={cn("shrink-0 text-primary", className)} />;
}
if (status === "background_sync") {
return (
<RefreshCw
className={cn("shrink-0 text-primary animate-[spin_3s_linear_infinite]", className)}
/>
);
}
if (status === "success") {
return (
<CheckCircle2
@ -89,6 +143,7 @@ export function NavSection({ items, onItemClick, isCollapsed = false }: NavSecti
item.title === "Inbox" || item.title.toLowerCase().includes("inbox")
? { "data-joyride": "inbox-sidebar" as const }
: {};
const { tooltip } = getStatusInfo(item.statusIndicator);
return (
<SidebarButton
@ -107,6 +162,8 @@ export function NavSection({ items, onItemClick, isCollapsed = false }: NavSecti
className="h-4 w-4"
/>
}
trailingContent={<StatusPill status={item.statusIndicator} />}
tooltipContent={tooltip}
buttonProps={joyrideAttr}
/>
);

View file

@ -54,9 +54,7 @@ export function PageUsageDisplay({ pagesUsed, pagesLimit }: PageUsageDisplayProp
<CreditCard className="h-3 w-3 shrink-0" />
Buy Pages
</span>
<span className="text-[10px] font-medium text-muted-foreground">
$1/1k
</span>
<span className="text-[10px] font-medium text-muted-foreground">$1/1k</span>
</Link>
)}
</div>

View file

@ -16,6 +16,10 @@ interface SidebarButtonProps {
collapsedOverlay?: React.ReactNode;
/** Custom icon node for expanded mode — overrides the default <Icon> rendering */
expandedIconNode?: React.ReactNode;
/** Optional inline trailing content shown in expanded mode */
trailingContent?: React.ReactNode;
/** Optional tooltip content that replaces the default label tooltip */
tooltipContent?: React.ReactNode;
className?: string;
/** Extra attributes spread onto the inner <button> (e.g. data-joyride) */
buttonProps?: React.ButtonHTMLAttributes<HTMLButtonElement>;
@ -42,6 +46,8 @@ export function SidebarButton({
badge,
collapsedOverlay,
expandedIconNode,
trailingContent,
tooltipContent,
className,
buttonProps,
}: SidebarButtonProps) {
@ -62,15 +68,19 @@ export function SidebarButton({
<span className="sr-only">{label}</span>
</button>
</TooltipTrigger>
<TooltipContent side="right">
{label}
{typeof badge === "string" && ` (${badge})`}
<TooltipContent side="right" className="max-w-xs">
{tooltipContent ?? (
<>
{label}
{typeof badge === "string" && ` (${badge})`}
</>
)}
</TooltipContent>
</Tooltip>
);
}
return (
const button = (
<button
type="button"
onClick={onClick}
@ -79,6 +89,7 @@ export function SidebarButton({
>
{expandedIconNode ?? <Icon className="h-4 w-4 shrink-0" />}
<span className="flex-1 truncate">{label}</span>
{trailingContent}
{badge && typeof badge !== "string" ? badge : null}
{badge && typeof badge === "string" ? (
<span className="inline-flex items-center justify-center min-w-4 h-4 px-1 rounded-full bg-red-500 text-white text-[10px] font-medium">
@ -87,4 +98,17 @@ export function SidebarButton({
) : null}
</button>
);
if (!tooltipContent) {
return button;
}
return (
<Tooltip>
<TooltipTrigger asChild>{button}</TooltipTrigger>
<TooltipContent side="right" className="max-w-xs">
{tooltipContent}
</TooltipContent>
</Tooltip>
);
}

View file

@ -46,9 +46,7 @@ export function BuyPagesContent() {
return (
<div className="w-full space-y-3 text-center">
<h2 className="text-xl font-bold tracking-tight">Buy Pages</h2>
<p className="text-sm text-muted-foreground">
Page purchases are temporarily unavailable.
</p>
<p className="text-sm text-muted-foreground">Page purchases are temporarily unavailable.</p>
</div>
);
}
@ -68,9 +66,7 @@ export function BuyPagesContent() {
<div className="w-full space-y-5">
<div className="text-center">
<h2 className="text-xl font-bold tracking-tight">Buy Pages</h2>
<p className="mt-1 text-sm text-muted-foreground">
$1 per 1,000 pages, pay as you go
</p>
<p className="mt-1 text-sm text-muted-foreground">$1 per 1,000 pages, pay as you go</p>
</div>
<div className="space-y-3">
@ -118,7 +114,9 @@ export function BuyPagesContent() {
</div>
<div className="flex items-center justify-between rounded-lg border bg-muted/30 px-3 py-2">
<span className="text-sm font-medium tabular-nums">{totalPages.toLocaleString()} pages</span>
<span className="text-sm font-medium tabular-nums">
{totalPages.toLocaleString()} pages
</span>
<span className="text-sm font-semibold tabular-nums">${totalPrice}</span>
</div>
@ -133,12 +131,12 @@ export function BuyPagesContent() {
Redirecting
</>
) : (
<>Buy {totalPages.toLocaleString()} Pages for ${totalPrice}</>
<>
Buy {totalPages.toLocaleString()} Pages for ${totalPrice}
</>
)}
</Button>
<p className="text-center text-[11px] text-muted-foreground">
Secure checkout via Stripe
</p>
<p className="text-center text-[11px] text-muted-foreground">Secure checkout via Stripe</p>
</div>
</div>
);

View file

@ -8,10 +8,7 @@ import { useEffect, useState } from "react";
import { toast } from "sonner";
import { USER_QUERY_KEY } from "@/atoms/user/user-query.atoms";
import { Button } from "@/components/ui/button";
import {
Card,
CardContent,
} from "@/components/ui/card";
import { Card, CardContent } from "@/components/ui/card";
import {
Dialog,
DialogContent,
@ -98,7 +95,11 @@ export function MorePagesContent() {
Limited offer. Schedule a meeting or email us to claim.
</p>
</div>
<Button size="sm" className="bg-emerald-600 text-white hover:bg-emerald-700" onClick={() => setClaimOpen(true)}>
<Button
size="sm"
className="bg-emerald-600 text-white hover:bg-emerald-700"
onClick={() => setClaimOpen(true)}
>
Claim
</Button>
</CardContent>
@ -133,7 +134,11 @@ export function MorePagesContent() {
task.completed ? "bg-primary text-primary-foreground" : "bg-muted"
)}
>
{task.completed ? <Check className="h-3.5 w-3.5" /> : <span className="text-xs font-semibold">+{task.pages_reward}</span>}
{task.completed ? (
<Check className="h-3.5 w-3.5" />
) : (
<span className="text-xs font-semibold">+{task.pages_reward}</span>
)}
</div>
<p
className={cn(
@ -199,7 +204,8 @@ export function MorePagesContent() {
<DialogHeader>
<DialogTitle>Claim 6,000 Free Pages</DialogTitle>
<DialogDescription>
Send us an email to claim your free 6,000 pages. Include your account email and primary usecase for free pages.
Send us an email to claim your free 6,000 pages. Include your account email and
primary usecase for free pages.
</DialogDescription>
</DialogHeader>
<Button asChild className="w-full gap-2">

View file

@ -11,7 +11,7 @@ export function sanitizeHref(href?: string): string | undefined {
candidate.startsWith("#")
) {
if (candidate.startsWith("//")) return undefined;
// eslint-disable-next-line no-control-regex -- intentionally matching control characters
// biome-ignore lint/suspicious/noControlCharactersInRegex: intentionally matching control characters
if (/[\u0000-\u001F\u007F]/.test(candidate)) return undefined;
return candidate;
}

View file

@ -4,18 +4,31 @@ import { useQuery } from "@rocicorp/zero/react";
import { useEffect, useRef, useState } from "react";
import { queries } from "@/zero/queries";
export type DocumentsProcessingStatus = "idle" | "processing" | "success" | "error";
export type DocumentsProcessingStatus =
| "idle"
| "processing"
| "background_sync"
| "success"
| "error";
const SUCCESS_LINGER_MS = 5000;
interface UseDocumentsProcessingOptions {
hasPeriodicSyncEnabled?: boolean;
}
/**
* Returns the processing status of documents in the search space:
* - "processing" at least one doc is pending/processing (show spinner)
* - "processing" docs are queued or actively being prepared for search
* - "background_sync" existing docs are being refreshed in the background
* - "error" nothing processing, but failed docs exist (show red icon)
* - "success" just transitioned from processing all clear (green check, auto-dismisses)
* - "idle" nothing noteworthy (show normal icon)
*/
export function useDocumentsProcessing(searchSpaceId: number | null): DocumentsProcessingStatus {
export function useDocumentsProcessing(
searchSpaceId: number | null,
{ hasPeriodicSyncEnabled = false }: UseDocumentsProcessingOptions = {}
): DocumentsProcessingStatus {
const [status, setStatus] = useState<DocumentsProcessingStatus>("idle");
const wasProcessingRef = useRef(false);
const successTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
@ -25,38 +38,56 @@ export function useDocumentsProcessing(searchSpaceId: number | null): DocumentsP
useEffect(() => {
if (!searchSpaceId || !documents) return;
const clearSuccessTimer = () => {
if (successTimerRef.current) {
clearTimeout(successTimerRef.current);
successTimerRef.current = null;
}
};
let pendingCount = 0;
let processingCount = 0;
let failedCount = 0;
let readyCount = 0;
for (const doc of documents) {
// Keep the nav indicator aligned with what the Documents sidebar actually renders.
// Some connectors can create temporary untitled placeholder rows that remain hidden
// from the sidebar, and those should not keep the whole section looking "stuck".
if (!doc.title || doc.title.trim() === "") {
continue;
}
const state = (doc.status as { state?: string } | null)?.state;
if (state === "pending" || state === "processing") {
if (state === "pending") {
pendingCount++;
} else if (state === "processing") {
processingCount++;
} else if (state === "failed") {
failedCount++;
} else {
readyCount++;
}
}
if (processingCount > 0) {
if (pendingCount > 0) {
wasProcessingRef.current = true;
if (successTimerRef.current) {
clearTimeout(successTimerRef.current);
successTimerRef.current = null;
}
clearSuccessTimer();
setStatus("processing");
} else if (processingCount > 0) {
wasProcessingRef.current = true;
clearSuccessTimer();
const isBackgroundSync = hasPeriodicSyncEnabled && readyCount > 0;
setStatus(isBackgroundSync ? "background_sync" : "processing");
} else if (failedCount > 0) {
wasProcessingRef.current = false;
if (successTimerRef.current) {
clearTimeout(successTimerRef.current);
successTimerRef.current = null;
}
clearSuccessTimer();
setStatus("error");
} else if (wasProcessingRef.current) {
wasProcessingRef.current = false;
setStatus("success");
if (successTimerRef.current) {
clearTimeout(successTimerRef.current);
}
clearSuccessTimer();
successTimerRef.current = setTimeout(() => {
setStatus("idle");
successTimerRef.current = null;
@ -64,7 +95,7 @@ export function useDocumentsProcessing(searchSpaceId: number | null): DocumentsP
} else {
setStatus("idle");
}
}, [searchSpaceId, documents]);
}, [searchSpaceId, documents, hasPeriodicSyncEnabled]);
useEffect(() => {
return () => {

View file

@ -13,9 +13,13 @@ class StripeApiService {
createCheckoutSession = async (
request: CreateCheckoutSessionRequest
): Promise<CreateCheckoutSessionResponse> => {
return baseApiService.post("/api/v1/stripe/create-checkout-session", createCheckoutSessionResponse, {
body: request,
});
return baseApiService.post(
"/api/v1/stripe/create-checkout-session",
createCheckoutSessionResponse,
{
body: request,
}
);
};
getPurchases = async (): Promise<GetPagePurchasesResponse> => {

View file

@ -45,4 +45,3 @@ export const isSelfHosted = () => DEPLOYMENT_MODE === "self-hosted";
// Helper to check if running in cloud mode
export const isCloud = () => DEPLOYMENT_MODE === "cloud";

View file

@ -1,6 +1,6 @@
{
"name": "surfsense_web",
"version": "0.0.13",
"version": "0.0.14",
"private": true,
"description": "SurfSense Frontend",
"scripts": {