feat: no login experience and prem tokens
Some checks are pending
Build and Push Docker Images / tag_release (push) Waiting to run
Build and Push Docker Images / build (./surfsense_backend, ./surfsense_backend/Dockerfile, backend, surfsense-backend, ubuntu-24.04-arm, linux/arm64, arm64) (push) Blocked by required conditions
Build and Push Docker Images / build (./surfsense_backend, ./surfsense_backend/Dockerfile, backend, surfsense-backend, ubuntu-latest, linux/amd64, amd64) (push) Blocked by required conditions
Build and Push Docker Images / build (./surfsense_web, ./surfsense_web/Dockerfile, web, surfsense-web, ubuntu-24.04-arm, linux/arm64, arm64) (push) Blocked by required conditions
Build and Push Docker Images / build (./surfsense_web, ./surfsense_web/Dockerfile, web, surfsense-web, ubuntu-latest, linux/amd64, amd64) (push) Blocked by required conditions
Build and Push Docker Images / create_manifest (backend, surfsense-backend) (push) Blocked by required conditions
Build and Push Docker Images / create_manifest (web, surfsense-web) (push) Blocked by required conditions

This commit is contained in:
DESKTOP-RTLN3BA\$punk 2026-04-15 17:02:00 -07:00
parent 87452bb315
commit ff4e0f9b62
68 changed files with 5914 additions and 121 deletions

3
.gitignore vendored
View file

@ -6,4 +6,5 @@ node_modules/
.venv .venv
.pnpm-store .pnpm-store
.DS_Store .DS_Store
deepagents/ deepagents/
debug.log

View file

@ -158,6 +158,11 @@ STRIPE_PAGE_BUYING_ENABLED=FALSE
# STRIPE_RECONCILIATION_LOOKBACK_MINUTES=10 # STRIPE_RECONCILIATION_LOOKBACK_MINUTES=10
# STRIPE_RECONCILIATION_BATCH_SIZE=100 # STRIPE_RECONCILIATION_BATCH_SIZE=100
# Premium token purchases ($1 per 1M tokens for premium-tier models)
# STRIPE_TOKEN_BUYING_ENABLED=FALSE
# STRIPE_PREMIUM_TOKEN_PRICE_ID=price_...
# STRIPE_TOKENS_PER_UNIT=1000000
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
# TTS & STT (Text-to-Speech / Speech-to-Text) # TTS & STT (Text-to-Speech / Speech-to-Text)
# ------------------------------------------------------------------------------ # ------------------------------------------------------------------------------
@ -309,6 +314,26 @@ STT_SERVICE=local/base
# Pages limit per user for ETL (default: unlimited) # Pages limit per user for ETL (default: unlimited)
# PAGES_LIMIT=500 # PAGES_LIMIT=500
# Premium token quota per registered user (default: 5M)
# Only applies to models with billing_tier=premium in global_llm_config.yaml
# PREMIUM_TOKEN_LIMIT=5000000
# No-login (anonymous) mode — public users can chat without an account
# Set TRUE to enable /free pages and anonymous chat API
NOLOGIN_MODE_ENABLED=FALSE
# ANON_TOKEN_LIMIT=1000000
# ANON_TOKEN_WARNING_THRESHOLD=800000
# ANON_TOKEN_QUOTA_TTL_DAYS=30
# ANON_MAX_UPLOAD_SIZE_MB=5
# QUOTA_MAX_RESERVE_PER_CALL=8000
# Abuse prevention: max concurrent anonymous streams per IP
# ANON_MAX_CONCURRENT_STREAMS=2
# Number of chat requests per IP before Turnstile CAPTCHA is required
# ANON_CAPTCHA_REQUEST_THRESHOLD=5
# Cloudflare Turnstile CAPTCHA (https://dash.cloudflare.com/ -> Turnstile)
# TURNSTILE_ENABLED=FALSE
# TURNSTILE_SECRET_KEY=
# Connector indexing lock TTL in seconds (default: 28800 = 8 hours) # Connector indexing lock TTL in seconds (default: 28800 = 8 hours)
# CONNECTOR_INDEXING_LOCK_TTL_SECONDS=28800 # CONNECTOR_INDEXING_LOCK_TTL_SECONDS=28800

View file

@ -53,6 +53,13 @@ STRIPE_PRICE_ID=price_...
STRIPE_PAGES_PER_UNIT=1000 STRIPE_PAGES_PER_UNIT=1000
# Set FALSE to disable new checkout session creation temporarily # Set FALSE to disable new checkout session creation temporarily
STRIPE_PAGE_BUYING_ENABLED=TRUE STRIPE_PAGE_BUYING_ENABLED=TRUE
# Premium token purchases via Stripe (for premium-tier model usage)
# Set TRUE to allow users to buy premium token packs ($1 per 1M tokens)
STRIPE_TOKEN_BUYING_ENABLED=FALSE
STRIPE_PREMIUM_TOKEN_PRICE_ID=price_...
STRIPE_TOKENS_PER_UNIT=1000000
# Periodic Stripe safety net for purchases left in PENDING (minutes old) # Periodic Stripe safety net for purchases left in PENDING (minutes old)
STRIPE_RECONCILIATION_LOOKBACK_MINUTES=10 STRIPE_RECONCILIATION_LOOKBACK_MINUTES=10
# Max pending purchases to check per reconciliation run # Max pending purchases to check per reconciliation run
@ -177,6 +184,34 @@ VIDEO_PRESENTATION_DEFAULT_DURATION_IN_FRAMES=300
# (Optional) Maximum pages limit per user for ETL services (default: `999999999` for unlimited in OSS version) # (Optional) Maximum pages limit per user for ETL services (default: `999999999` for unlimited in OSS version)
PAGES_LIMIT=500 PAGES_LIMIT=500
# Premium token quota per registered user (default: 5,000,000)
# Applies only to models with billing_tier=premium in global_llm_config.yaml
PREMIUM_TOKEN_LIMIT=5000000
# No-login (anonymous) mode — allows public users to chat without an account
# Set TRUE to enable /free pages and anonymous chat API
NOLOGIN_MODE_ENABLED=FALSE
# Total tokens allowed per anonymous session before requiring account creation
ANON_TOKEN_LIMIT=1000000
# Token count at which the UI shows a soft warning
ANON_TOKEN_WARNING_THRESHOLD=800000
# Days before anonymous quota tracking expires in Redis
ANON_TOKEN_QUOTA_TTL_DAYS=30
# Max document upload size for anonymous users (MB)
ANON_MAX_UPLOAD_SIZE_MB=5
# Maximum tokens to reserve per LLM call for quota enforcement (safety cap)
QUOTA_MAX_RESERVE_PER_CALL=8000
# Abuse prevention: max concurrent anonymous streams per IP (default: 2)
ANON_MAX_CONCURRENT_STREAMS=2
# Number of chat requests per IP before Turnstile CAPTCHA is required (default: 5)
ANON_CAPTCHA_REQUEST_THRESHOLD=5
# Cloudflare Turnstile CAPTCHA (https://dash.cloudflare.com/ -> Turnstile)
# Set TURNSTILE_ENABLED=TRUE and provide keys to activate CAPTCHA for anonymous chat
TURNSTILE_ENABLED=FALSE
TURNSTILE_SECRET_KEY=
# Residential Proxy Configuration (anonymous-proxies.net) # Residential Proxy Configuration (anonymous-proxies.net)
# Used for web crawling, link previews, and YouTube transcript fetching to avoid IP bans. # Used for web crawling, link previews, and YouTube transcript fetching to avoid IP bans.

View file

@ -0,0 +1,166 @@
"""add premium token quota columns and purchase table
Revision ID: 126
Revises: 125
Create Date: 2026-04-15
Adds premium_tokens_limit, premium_tokens_used, premium_tokens_reserved
to the user table and creates the premium_token_purchases table.
"""
from __future__ import annotations
import os
from collections.abc import Sequence
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from alembic import op
revision: str = "126"
down_revision: str | None = "125"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
PREMIUM_TOKEN_LIMIT_DEFAULT = os.getenv("PREMIUM_TOKEN_LIMIT", "5000000")
def upgrade() -> None:
conn = op.get_bind()
# --- User table: add premium token columns if missing ---
inspector = sa.inspect(conn)
user_columns = {c["name"] for c in inspector.get_columns("user")}
if "premium_tokens_limit" not in user_columns:
op.add_column(
"user",
sa.Column(
"premium_tokens_limit",
sa.BigInteger(),
nullable=False,
server_default=PREMIUM_TOKEN_LIMIT_DEFAULT,
),
)
if "premium_tokens_used" not in user_columns:
op.add_column(
"user",
sa.Column(
"premium_tokens_used",
sa.BigInteger(),
nullable=False,
server_default="0",
),
)
if "premium_tokens_reserved" not in user_columns:
op.add_column(
"user",
sa.Column(
"premium_tokens_reserved",
sa.BigInteger(),
nullable=False,
server_default="0",
),
)
# --- PremiumTokenPurchase enum + table ---
enum_exists = conn.execute(
sa.text("SELECT 1 FROM pg_type WHERE typname = 'premiumtokenpurchasestatus'")
).fetchone()
if not enum_exists:
purchase_status_enum = postgresql.ENUM(
"PENDING",
"COMPLETED",
"FAILED",
name="premiumtokenpurchasestatus",
create_type=False,
)
purchase_status_enum.create(conn, checkfirst=True)
if not inspector.has_table("premium_token_purchases"):
op.create_table(
"premium_token_purchases",
sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False),
sa.Column(
"stripe_checkout_session_id",
sa.String(length=255),
nullable=False,
),
sa.Column(
"stripe_payment_intent_id",
sa.String(length=255),
nullable=True,
),
sa.Column("quantity", sa.Integer(), nullable=False),
sa.Column("tokens_granted", sa.BigInteger(), nullable=False),
sa.Column("amount_total", sa.Integer(), nullable=True),
sa.Column("currency", sa.String(length=10), nullable=True),
sa.Column(
"status",
postgresql.ENUM(
"PENDING",
"COMPLETED",
"FAILED",
name="premiumtokenpurchasestatus",
create_type=False,
),
nullable=False,
server_default=sa.text("'PENDING'::premiumtokenpurchasestatus"),
),
sa.Column("completed_at", sa.TIMESTAMP(timezone=True), nullable=True),
sa.Column(
"created_at",
sa.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.TIMESTAMP(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.ForeignKeyConstraint(
["user_id"],
["user.id"],
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint(
"stripe_checkout_session_id",
name="uq_premium_token_purchases_stripe_checkout_session_id",
),
)
op.execute(
"CREATE INDEX IF NOT EXISTS ix_premium_token_purchases_user_id "
"ON premium_token_purchases (user_id)"
)
op.execute(
"CREATE UNIQUE INDEX IF NOT EXISTS ix_premium_token_purchases_stripe_session "
"ON premium_token_purchases (stripe_checkout_session_id)"
)
op.execute(
"CREATE INDEX IF NOT EXISTS ix_premium_token_purchases_payment_intent "
"ON premium_token_purchases (stripe_payment_intent_id)"
)
op.execute(
"CREATE INDEX IF NOT EXISTS ix_premium_token_purchases_status "
"ON premium_token_purchases (status)"
)
def downgrade() -> None:
op.execute("DROP INDEX IF EXISTS ix_premium_token_purchases_status")
op.execute("DROP INDEX IF EXISTS ix_premium_token_purchases_payment_intent")
op.execute("DROP INDEX IF EXISTS ix_premium_token_purchases_stripe_session")
op.execute("DROP INDEX IF EXISTS ix_premium_token_purchases_user_id")
op.execute("DROP TABLE IF EXISTS premium_token_purchases")
postgresql.ENUM(name="premiumtokenpurchasestatus").drop(
op.get_bind(), checkfirst=True
)
op.drop_column("user", "premium_tokens_reserved")
op.drop_column("user", "premium_tokens_used")
op.drop_column("user", "premium_tokens_limit")

View file

@ -161,6 +161,7 @@ async def create_surfsense_deep_agent(
firecrawl_api_key: str | None = None, firecrawl_api_key: str | None = None,
thread_visibility: ChatVisibility | None = None, thread_visibility: ChatVisibility | None = None,
mentioned_document_ids: list[int] | None = None, mentioned_document_ids: list[int] | None = None,
anon_session_id: str | None = None,
): ):
""" """
Create a SurfSense deep agent with configurable tools and prompts. Create a SurfSense deep agent with configurable tools and prompts.
@ -463,6 +464,7 @@ async def create_surfsense_deep_agent(
available_connectors=available_connectors, available_connectors=available_connectors,
available_document_types=available_document_types, available_document_types=available_document_types,
mentioned_document_ids=mentioned_document_ids, mentioned_document_ids=mentioned_document_ids,
anon_session_id=anon_session_id,
), ),
SurfSenseFilesystemMiddleware( SurfSenseFilesystemMiddleware(
search_space_id=search_space_id, search_space_id=search_space_id,

View file

@ -109,6 +109,12 @@ class AgentConfig:
# Auto mode flag # Auto mode flag
is_auto_mode: bool = False is_auto_mode: bool = False
# Token quota and policy
billing_tier: str = "free"
is_premium: bool = False
anonymous_enabled: bool = False
quota_reserve_tokens: int | None = None
@classmethod @classmethod
def from_auto_mode(cls) -> "AgentConfig": def from_auto_mode(cls) -> "AgentConfig":
""" """
@ -130,6 +136,10 @@ class AgentConfig:
config_id=AUTO_MODE_ID, config_id=AUTO_MODE_ID,
config_name="Auto (Fastest)", config_name="Auto (Fastest)",
is_auto_mode=True, is_auto_mode=True,
billing_tier="free",
is_premium=False,
anonymous_enabled=False,
quota_reserve_tokens=None,
) )
@classmethod @classmethod
@ -158,6 +168,10 @@ class AgentConfig:
config_id=config.id, config_id=config.id,
config_name=config.name, config_name=config.name,
is_auto_mode=False, is_auto_mode=False,
billing_tier="free",
is_premium=False,
anonymous_enabled=False,
quota_reserve_tokens=None,
) )
@classmethod @classmethod
@ -195,6 +209,10 @@ class AgentConfig:
config_id=yaml_config.get("id"), config_id=yaml_config.get("id"),
config_name=yaml_config.get("name"), config_name=yaml_config.get("name"),
is_auto_mode=False, is_auto_mode=False,
billing_tier=yaml_config.get("billing_tier", "free"),
is_premium=yaml_config.get("billing_tier", "free") == "premium",
anonymous_enabled=yaml_config.get("anonymous_enabled", False),
quota_reserve_tokens=yaml_config.get("quota_reserve_tokens"),
) )

View file

@ -819,6 +819,34 @@ async def build_scoped_filesystem(
return files, doc_id_to_path return files, doc_id_to_path
def _build_anon_scoped_filesystem(
documents: Sequence[dict[str, Any]],
) -> dict[str, dict[str, str]]:
"""Build a scoped filesystem for anonymous documents without DB queries.
Anonymous uploads have no folders, so all files go under /documents.
"""
files: dict[str, dict[str, str]] = {}
for document in documents:
doc_meta = document.get("document") or {}
title = str(doc_meta.get("title") or "untitled")
file_name = _safe_filename(title)
path = f"/documents/{file_name}"
if path in files:
doc_id = doc_meta.get("id", "dup")
stem = file_name.removesuffix(".xml")
path = f"/documents/{stem} ({doc_id}).xml"
matched_ids = set(document.get("matched_chunk_ids") or [])
xml_content = _build_document_xml(document, matched_chunk_ids=matched_ids)
files[path] = {
"content": xml_content.split("\n"),
"encoding": "utf-8",
"created_at": "",
"modified_at": "",
}
return files
class KnowledgeBaseSearchMiddleware(AgentMiddleware): # type: ignore[type-arg] class KnowledgeBaseSearchMiddleware(AgentMiddleware): # type: ignore[type-arg]
"""Pre-agent middleware that always searches the KB and seeds a scoped filesystem.""" """Pre-agent middleware that always searches the KB and seeds a scoped filesystem."""
@ -833,6 +861,7 @@ class KnowledgeBaseSearchMiddleware(AgentMiddleware): # type: ignore[type-arg]
available_document_types: list[str] | None = None, available_document_types: list[str] | None = None,
top_k: int = 10, top_k: int = 10,
mentioned_document_ids: list[int] | None = None, mentioned_document_ids: list[int] | None = None,
anon_session_id: str | None = None,
) -> None: ) -> None:
self.llm = llm self.llm = llm
self.search_space_id = search_space_id self.search_space_id = search_space_id
@ -840,6 +869,7 @@ class KnowledgeBaseSearchMiddleware(AgentMiddleware): # type: ignore[type-arg]
self.available_document_types = available_document_types self.available_document_types = available_document_types
self.top_k = top_k self.top_k = top_k
self.mentioned_document_ids = mentioned_document_ids or [] self.mentioned_document_ids = mentioned_document_ids or []
self.anon_session_id = anon_session_id
async def _plan_search_inputs( async def _plan_search_inputs(
self, self,
@ -913,6 +943,50 @@ class KnowledgeBaseSearchMiddleware(AgentMiddleware): # type: ignore[type-arg]
pass pass
return asyncio.run(self.abefore_agent(state, runtime)) return asyncio.run(self.abefore_agent(state, runtime))
async def _load_anon_document(self) -> dict[str, Any] | None:
"""Load the anonymous user's uploaded document from Redis."""
if not self.anon_session_id:
return None
try:
import redis.asyncio as aioredis
from app.config import config
redis_client = aioredis.from_url(
config.REDIS_APP_URL, decode_responses=True
)
try:
redis_key = f"anon:doc:{self.anon_session_id}"
data = await redis_client.get(redis_key)
if not data:
return None
doc = json.loads(data)
return {
"document_id": -1,
"content": doc.get("content", ""),
"score": 1.0,
"chunks": [
{
"chunk_id": -1,
"content": doc.get("content", ""),
}
],
"matched_chunk_ids": [-1],
"document": {
"id": -1,
"title": doc.get("filename", "uploaded_document"),
"document_type": "FILE",
"metadata": {"source": "anonymous_upload"},
},
"source": "FILE",
"_user_mentioned": True,
}
finally:
await redis_client.aclose()
except Exception as exc:
logger.warning("Failed to load anonymous document from Redis: %s", exc)
return None
async def abefore_agent( # type: ignore[override] async def abefore_agent( # type: ignore[override]
self, self,
state: AgentState, state: AgentState,
@ -937,6 +1011,35 @@ class KnowledgeBaseSearchMiddleware(AgentMiddleware): # type: ignore[type-arg]
t0 = _perf_log and asyncio.get_event_loop().time() t0 = _perf_log and asyncio.get_event_loop().time()
existing_files = state.get("files") existing_files = state.get("files")
# --- Anonymous session: load Redis doc and skip DB queries ---
if self.anon_session_id:
merged: list[dict[str, Any]] = []
anon_doc = await self._load_anon_document()
if anon_doc:
merged.append(anon_doc)
if merged:
new_files = _build_anon_scoped_filesystem(merged)
mentioned_paths = set(new_files.keys())
else:
new_files = {}
mentioned_paths = set()
ai_msg, tool_msg = _build_synthetic_ls(
existing_files,
new_files,
mentioned_paths=mentioned_paths,
)
if t0 is not None:
_perf_log.info(
"[kb_fs_middleware] anon completed in %.3fs new_files=%d",
asyncio.get_event_loop().time() - t0,
len(new_files),
)
return {"files": new_files, "messages": [ai_msg, tool_msg]}
# --- Authenticated session: full KB search ---
( (
planned_query, planned_query,
start_date, start_date,
@ -954,8 +1057,6 @@ class KnowledgeBaseSearchMiddleware(AgentMiddleware): # type: ignore[type-arg]
document_ids=self.mentioned_document_ids, document_ids=self.mentioned_document_ids,
search_space_id=self.search_space_id, search_space_id=self.search_space_id,
) )
# Clear after first turn so they are not re-fetched on subsequent
# messages within the same agent instance.
self.mentioned_document_ids = [] self.mentioned_document_ids = []
# --- 2. Run KB search (recency browse or hybrid) --- # --- 2. Run KB search (recency browse or hybrid) ---
@ -983,26 +1084,24 @@ class KnowledgeBaseSearchMiddleware(AgentMiddleware): # type: ignore[type-arg]
# --- 3. Merge: mentioned first, then search (dedup by doc id) --- # --- 3. Merge: mentioned first, then search (dedup by doc id) ---
seen_doc_ids: set[int] = set() seen_doc_ids: set[int] = set()
merged: list[dict[str, Any]] = [] merged_auth: list[dict[str, Any]] = []
for doc in mentioned_results: for doc in mentioned_results:
doc_id = (doc.get("document") or {}).get("id") doc_id = (doc.get("document") or {}).get("id")
if doc_id is not None: if doc_id is not None:
seen_doc_ids.add(doc_id) seen_doc_ids.add(doc_id)
merged.append(doc) merged_auth.append(doc)
for doc in search_results: for doc in search_results:
doc_id = (doc.get("document") or {}).get("id") doc_id = (doc.get("document") or {}).get("id")
if doc_id is not None and doc_id in seen_doc_ids: if doc_id is not None and doc_id in seen_doc_ids:
continue continue
merged.append(doc) merged_auth.append(doc)
# --- 4. Build scoped filesystem --- # --- 4. Build scoped filesystem ---
new_files, doc_id_to_path = await build_scoped_filesystem( new_files, doc_id_to_path = await build_scoped_filesystem(
documents=merged, documents=merged_auth,
search_space_id=self.search_space_id, search_space_id=self.search_space_id,
) )
# Identify which paths belong to user-mentioned documents using
# the authoritative doc_id -> path mapping (no title guessing).
mentioned_doc_ids = { mentioned_doc_ids = {
(d.get("document") or {}).get("id") for d in mentioned_results (d.get("document") or {}).get("id") for d in mentioned_results
} }

View file

@ -13,8 +13,6 @@ from fastapi import Depends, FastAPI, HTTPException, Request, status
from fastapi.exceptions import RequestValidationError from fastapi.exceptions import RequestValidationError
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from limits.storage import MemoryStorage
from slowapi import Limiter
from slowapi.errors import RateLimitExceeded from slowapi.errors import RateLimitExceeded
from slowapi.middleware import SlowAPIMiddleware from slowapi.middleware import SlowAPIMiddleware
from slowapi.util import get_remote_address from slowapi.util import get_remote_address
@ -36,6 +34,7 @@ from app.config import (
) )
from app.db import User, create_db_and_tables, get_async_session from app.db import User, create_db_and_tables, get_async_session
from app.exceptions import GENERIC_5XX_MESSAGE, ISSUES_URL, SurfSenseError from app.exceptions import GENERIC_5XX_MESSAGE, ISSUES_URL, SurfSenseError
from app.rate_limiter import limiter
from app.routes import router as crud_router from app.routes import router as crud_router
from app.routes.auth_routes import router as auth_router from app.routes.auth_routes import router as auth_router
from app.schemas import UserCreate, UserRead, UserUpdate from app.schemas import UserCreate, UserRead, UserUpdate
@ -54,17 +53,7 @@ rate_limit_logger = logging.getLogger("surfsense.rate_limit")
# Uses the same Redis instance as Celery for zero additional infrastructure. # Uses the same Redis instance as Celery for zero additional infrastructure.
# Protects auth endpoints from brute force and user enumeration attacks. # Protects auth endpoints from brute force and user enumeration attacks.
# SlowAPI limiter — provides default rate limits (1024/min) for ALL routes # limiter is imported from app.rate_limiter (shared module to avoid circular imports)
# via the ASGI middleware. This is the general safety net.
# in_memory_fallback ensures requests are still served (with per-worker
# in-memory limiting) when Redis is unreachable, instead of hanging.
limiter = Limiter(
key_func=get_remote_address,
storage_uri=config.REDIS_APP_URL,
default_limits=["1024/minute"],
in_memory_fallback_enabled=True,
in_memory_fallback=[MemoryStorage()],
)
def _get_request_id(request: Request) -> str: def _get_request_id(request: Request) -> str:
@ -126,6 +115,39 @@ def _surfsense_error_handler(request: Request, exc: SurfSenseError) -> JSONRespo
def _http_exception_handler(request: Request, exc: HTTPException) -> JSONResponse: def _http_exception_handler(request: Request, exc: HTTPException) -> JSONResponse:
"""Wrap FastAPI/Starlette HTTPExceptions into the standard envelope.""" """Wrap FastAPI/Starlette HTTPExceptions into the standard envelope."""
rid = _get_request_id(request) rid = _get_request_id(request)
# Structured dict details (e.g. {"code": "CAPTCHA_REQUIRED", "message": "..."})
# are preserved so the frontend can parse them.
if isinstance(exc.detail, dict):
err_code = exc.detail.get("code", _status_to_code(exc.status_code))
message = exc.detail.get("message", str(exc.detail))
if exc.status_code >= 500:
_error_logger.error(
"[%s] %s - HTTPException %d: %s",
rid,
request.url.path,
exc.status_code,
message,
)
message = GENERIC_5XX_MESSAGE
err_code = "INTERNAL_ERROR"
body = {
"error": {
"code": err_code,
"message": message,
"status": exc.status_code,
"request_id": rid,
"timestamp": datetime.now(UTC).isoformat(),
"report_url": ISSUES_URL,
},
"detail": exc.detail,
}
return JSONResponse(
status_code=exc.status_code,
content=body,
headers={"X-Request-ID": rid},
)
detail = exc.detail if isinstance(exc.detail, str) else str(exc.detail) detail = exc.detail if isinstance(exc.detail, str) else str(exc.detail)
if exc.status_code >= 500: if exc.status_code >= 500:
_error_logger.error( _error_logger.error(
@ -663,6 +685,13 @@ if config.AUTH_TYPE == "GOOGLE":
return response return response
# Anonymous (no-login) chat routes — mounted at /api/v1/public/anon-chat
from app.routes.anonymous_chat_routes import ( # noqa: E402
router as anonymous_chat_router,
)
app.include_router(anonymous_chat_router)
app.include_router(crud_router, prefix="/api/v1", tags=["crud"]) app.include_router(crud_router, prefix="/api/v1", tags=["crud"])

View file

@ -187,4 +187,11 @@ celery_app.conf.beat_schedule = {
"expires": 60, "expires": 60,
}, },
}, },
"reconcile-pending-stripe-token-purchases": {
"task": "reconcile_pending_stripe_token_purchases",
"schedule": crontab(**stripe_reconciliation_schedule_params),
"options": {
"expires": 60,
},
},
} }

View file

@ -42,7 +42,25 @@ def load_global_llm_configs():
try: try:
with open(global_config_file, encoding="utf-8") as f: with open(global_config_file, encoding="utf-8") as f:
data = yaml.safe_load(f) data = yaml.safe_load(f)
return data.get("global_llm_configs", []) configs = data.get("global_llm_configs", [])
seen_slugs: dict[str, int] = {}
for cfg in configs:
cfg.setdefault("billing_tier", "free")
cfg.setdefault("anonymous_enabled", False)
cfg.setdefault("seo_enabled", False)
if cfg.get("seo_enabled") and cfg.get("seo_slug"):
slug = cfg["seo_slug"]
if slug in seen_slugs:
print(
f"Warning: Duplicate seo_slug '{slug}' in global LLM configs "
f"(ids {seen_slugs[slug]} and {cfg.get('id')})"
)
else:
seen_slugs[slug] = cfg.get("id", 0)
return configs
except Exception as e: except Exception as e:
print(f"Warning: Failed to load global LLM configs: {e}") print(f"Warning: Failed to load global LLM configs: {e}")
return [] return []
@ -307,6 +325,36 @@ class Config:
os.getenv("STRIPE_RECONCILIATION_BATCH_SIZE", "100") os.getenv("STRIPE_RECONCILIATION_BATCH_SIZE", "100")
) )
# Premium token quota settings
PREMIUM_TOKEN_LIMIT = int(os.getenv("PREMIUM_TOKEN_LIMIT", "5000000"))
STRIPE_PREMIUM_TOKEN_PRICE_ID = os.getenv("STRIPE_PREMIUM_TOKEN_PRICE_ID")
STRIPE_TOKENS_PER_UNIT = int(os.getenv("STRIPE_TOKENS_PER_UNIT", "1000000"))
STRIPE_TOKEN_BUYING_ENABLED = (
os.getenv("STRIPE_TOKEN_BUYING_ENABLED", "FALSE").upper() == "TRUE"
)
# Anonymous / no-login mode settings
NOLOGIN_MODE_ENABLED = os.getenv("NOLOGIN_MODE_ENABLED", "FALSE").upper() == "TRUE"
ANON_TOKEN_LIMIT = int(os.getenv("ANON_TOKEN_LIMIT", "1000000"))
ANON_TOKEN_WARNING_THRESHOLD = int(
os.getenv("ANON_TOKEN_WARNING_THRESHOLD", "800000")
)
ANON_TOKEN_QUOTA_TTL_DAYS = int(os.getenv("ANON_TOKEN_QUOTA_TTL_DAYS", "30"))
ANON_MAX_UPLOAD_SIZE_MB = int(os.getenv("ANON_MAX_UPLOAD_SIZE_MB", "5"))
# Default quota reserve tokens when not specified per-model
QUOTA_MAX_RESERVE_PER_CALL = int(os.getenv("QUOTA_MAX_RESERVE_PER_CALL", "8000"))
# Abuse prevention: concurrent stream cap and CAPTCHA
ANON_MAX_CONCURRENT_STREAMS = int(os.getenv("ANON_MAX_CONCURRENT_STREAMS", "2"))
ANON_CAPTCHA_REQUEST_THRESHOLD = int(
os.getenv("ANON_CAPTCHA_REQUEST_THRESHOLD", "5")
)
# Cloudflare Turnstile CAPTCHA
TURNSTILE_ENABLED = os.getenv("TURNSTILE_ENABLED", "FALSE").upper() == "TRUE"
TURNSTILE_SECRET_KEY = os.getenv("TURNSTILE_SECRET_KEY", "")
# Auth # Auth
AUTH_TYPE = os.getenv("AUTH_TYPE") AUTH_TYPE = os.getenv("AUTH_TYPE")
REGISTRATION_ENABLED = os.getenv("REGISTRATION_ENABLED", "TRUE").upper() == "TRUE" REGISTRATION_ENABLED = os.getenv("REGISTRATION_ENABLED", "TRUE").upper() == "TRUE"

View file

@ -48,6 +48,11 @@ global_llm_configs:
- id: -1 - id: -1
name: "Global GPT-4 Turbo" name: "Global GPT-4 Turbo"
description: "OpenAI's GPT-4 Turbo with default prompts and citations" description: "OpenAI's GPT-4 Turbo with default prompts and citations"
billing_tier: "free"
anonymous_enabled: true
seo_enabled: true
seo_slug: "gpt-4-turbo"
quota_reserve_tokens: 4000
provider: "OPENAI" provider: "OPENAI"
model_name: "gpt-4-turbo-preview" model_name: "gpt-4-turbo-preview"
api_key: "sk-your-openai-api-key-here" api_key: "sk-your-openai-api-key-here"
@ -67,6 +72,11 @@ global_llm_configs:
- id: -2 - id: -2
name: "Global Claude 3 Opus" name: "Global Claude 3 Opus"
description: "Anthropic's most capable model with citations" description: "Anthropic's most capable model with citations"
billing_tier: "free"
anonymous_enabled: true
seo_enabled: true
seo_slug: "claude-3-opus"
quota_reserve_tokens: 4000
provider: "ANTHROPIC" provider: "ANTHROPIC"
model_name: "claude-3-opus-20240229" model_name: "claude-3-opus-20240229"
api_key: "sk-ant-your-anthropic-api-key-here" api_key: "sk-ant-your-anthropic-api-key-here"
@ -84,6 +94,11 @@ global_llm_configs:
- id: -3 - id: -3
name: "Global GPT-3.5 Turbo (Fast)" name: "Global GPT-3.5 Turbo (Fast)"
description: "Fast responses without citations for quick queries" description: "Fast responses without citations for quick queries"
billing_tier: "free"
anonymous_enabled: true
seo_enabled: true
seo_slug: "gpt-3.5-turbo-fast"
quota_reserve_tokens: 2000
provider: "OPENAI" provider: "OPENAI"
model_name: "gpt-3.5-turbo" model_name: "gpt-3.5-turbo"
api_key: "sk-your-openai-api-key-here" api_key: "sk-your-openai-api-key-here"
@ -101,6 +116,11 @@ global_llm_configs:
- id: -4 - id: -4
name: "Global DeepSeek Chat (Chinese)" name: "Global DeepSeek Chat (Chinese)"
description: "DeepSeek optimized for Chinese language responses" description: "DeepSeek optimized for Chinese language responses"
billing_tier: "free"
anonymous_enabled: true
seo_enabled: true
seo_slug: "deepseek-chat-chinese"
quota_reserve_tokens: 4000
provider: "DEEPSEEK" provider: "DEEPSEEK"
model_name: "deepseek-chat" model_name: "deepseek-chat"
api_key: "your-deepseek-api-key-here" api_key: "your-deepseek-api-key-here"
@ -128,6 +148,11 @@ global_llm_configs:
- id: -5 - id: -5
name: "Global Azure GPT-4o" name: "Global Azure GPT-4o"
description: "Azure OpenAI GPT-4o deployment" description: "Azure OpenAI GPT-4o deployment"
billing_tier: "free"
anonymous_enabled: true
seo_enabled: true
seo_slug: "azure-gpt-4o"
quota_reserve_tokens: 4000
provider: "AZURE" provider: "AZURE"
# model_name format for Azure: azure/<your-deployment-name> # model_name format for Azure: azure/<your-deployment-name>
model_name: "azure/gpt-4o-deployment" model_name: "azure/gpt-4o-deployment"
@ -151,6 +176,11 @@ global_llm_configs:
- id: -6 - id: -6
name: "Global Azure GPT-4 Turbo" name: "Global Azure GPT-4 Turbo"
description: "Azure OpenAI GPT-4 Turbo deployment" description: "Azure OpenAI GPT-4 Turbo deployment"
billing_tier: "free"
anonymous_enabled: true
seo_enabled: true
seo_slug: "azure-gpt-4-turbo"
quota_reserve_tokens: 4000
provider: "AZURE" provider: "AZURE"
model_name: "azure/gpt-4-turbo-deployment" model_name: "azure/gpt-4-turbo-deployment"
api_key: "your-azure-api-key-here" api_key: "your-azure-api-key-here"
@ -170,6 +200,11 @@ global_llm_configs:
- id: -7 - id: -7
name: "Global Groq Llama 3" name: "Global Groq Llama 3"
description: "Ultra-fast Llama 3 70B via Groq" description: "Ultra-fast Llama 3 70B via Groq"
billing_tier: "free"
anonymous_enabled: true
seo_enabled: true
seo_slug: "groq-llama-3"
quota_reserve_tokens: 8000
provider: "GROQ" provider: "GROQ"
model_name: "llama3-70b-8192" model_name: "llama3-70b-8192"
api_key: "your-groq-api-key-here" api_key: "your-groq-api-key-here"
@ -187,6 +222,11 @@ global_llm_configs:
- id: -8 - id: -8
name: "Global MiniMax M2.5" name: "Global MiniMax M2.5"
description: "MiniMax M2.5 with 204K context window and competitive pricing" description: "MiniMax M2.5 with 204K context window and competitive pricing"
billing_tier: "free"
anonymous_enabled: true
seo_enabled: true
seo_slug: "minimax-m2.5"
quota_reserve_tokens: 4000
provider: "MINIMAX" provider: "MINIMAX"
model_name: "MiniMax-M2.5" model_name: "MiniMax-M2.5"
api_key: "your-minimax-api-key-here" api_key: "your-minimax-api-key-here"
@ -365,3 +405,13 @@ global_vision_llm_configs:
# - Only use vision-capable models (GPT-4o, Gemini, Claude 3, etc.) # - Only use vision-capable models (GPT-4o, Gemini, Claude 3, etc.)
# - Lower temperature (0.3) is recommended for accurate screenshot analysis # - Lower temperature (0.3) is recommended for accurate screenshot analysis
# - Lower max_tokens (1000) is sufficient since autocomplete produces short suggestions # - Lower max_tokens (1000) is sufficient since autocomplete produces short suggestions
#
# TOKEN QUOTA & ANONYMOUS ACCESS NOTES:
# - billing_tier: "free" or "premium". Controls whether registered users need premium token quota.
# - anonymous_enabled: true/false. Whether the model appears in the public no-login catalog.
# - seo_enabled: true/false. Whether a /free/<seo_slug> landing page is generated.
# - seo_slug: Stable URL slug for SEO pages. Must be unique. Do NOT change once public.
# - seo_title: Optional HTML title tag override for the model's /free/<slug> page.
# - seo_description: Optional meta description override for the model's /free/<slug> page.
# - quota_reserve_tokens: Tokens reserved before each LLM call for quota enforcement.
# Independent of litellm_params.max_tokens. Used by the token quota service.

View file

@ -12,6 +12,7 @@ from sqlalchemy import (
ARRAY, ARRAY,
JSON, JSON,
TIMESTAMP, TIMESTAMP,
BigInteger,
Boolean, Boolean,
Column, Column,
Enum as SQLAlchemyEnum, Enum as SQLAlchemyEnum,
@ -318,6 +319,12 @@ class PagePurchaseStatus(StrEnum):
FAILED = "failed" FAILED = "failed"
class PremiumTokenPurchaseStatus(StrEnum):
PENDING = "pending"
COMPLETED = "completed"
FAILED = "failed"
# Centralized configuration for incentive tasks # Centralized configuration for incentive tasks
# This makes it easy to add new tasks without changing code in multiple places # This makes it easy to add new tasks without changing code in multiple places
INCENTIVE_TASKS_CONFIG = { INCENTIVE_TASKS_CONFIG = {
@ -1739,6 +1746,38 @@ class PagePurchase(Base, TimestampMixin):
user = relationship("User", back_populates="page_purchases") user = relationship("User", back_populates="page_purchases")
class PremiumTokenPurchase(Base, TimestampMixin):
"""Tracks Stripe checkout sessions used to grant additional premium token credits."""
__tablename__ = "premium_token_purchases"
__allow_unmapped__ = True
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
user_id = Column(
UUID(as_uuid=True),
ForeignKey("user.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
stripe_checkout_session_id = Column(
String(255), nullable=False, unique=True, index=True
)
stripe_payment_intent_id = Column(String(255), nullable=True, index=True)
quantity = Column(Integer, nullable=False)
tokens_granted = Column(BigInteger, nullable=False)
amount_total = Column(Integer, nullable=True)
currency = Column(String(10), nullable=True)
status = Column(
SQLAlchemyEnum(PremiumTokenPurchaseStatus),
nullable=False,
default=PremiumTokenPurchaseStatus.PENDING,
index=True,
)
completed_at = Column(TIMESTAMP(timezone=True), nullable=True)
user = relationship("User", back_populates="premium_token_purchases")
class SearchSpaceRole(BaseModel, TimestampMixin): class SearchSpaceRole(BaseModel, TimestampMixin):
""" """
Custom roles that can be defined per search space. Custom roles that can be defined per search space.
@ -2009,6 +2048,11 @@ if config.AUTH_TYPE == "GOOGLE":
back_populates="user", back_populates="user",
cascade="all, delete-orphan", cascade="all, delete-orphan",
) )
premium_token_purchases = relationship(
"PremiumTokenPurchase",
back_populates="user",
cascade="all, delete-orphan",
)
# Page usage tracking for ETL services # Page usage tracking for ETL services
pages_limit = Column( pages_limit = Column(
@ -2019,6 +2063,19 @@ if config.AUTH_TYPE == "GOOGLE":
) )
pages_used = Column(Integer, nullable=False, default=0, server_default="0") pages_used = Column(Integer, nullable=False, default=0, server_default="0")
premium_tokens_limit = Column(
BigInteger,
nullable=False,
default=config.PREMIUM_TOKEN_LIMIT,
server_default=str(config.PREMIUM_TOKEN_LIMIT),
)
premium_tokens_used = Column(
BigInteger, nullable=False, default=0, server_default="0"
)
premium_tokens_reserved = Column(
BigInteger, nullable=False, default=0, server_default="0"
)
# User profile from OAuth # User profile from OAuth
display_name = Column(String, nullable=True) display_name = Column(String, nullable=True)
avatar_url = Column(String, nullable=True) avatar_url = Column(String, nullable=True)
@ -2123,6 +2180,11 @@ else:
back_populates="user", back_populates="user",
cascade="all, delete-orphan", cascade="all, delete-orphan",
) )
premium_token_purchases = relationship(
"PremiumTokenPurchase",
back_populates="user",
cascade="all, delete-orphan",
)
# Page usage tracking for ETL services # Page usage tracking for ETL services
pages_limit = Column( pages_limit = Column(
@ -2133,6 +2195,19 @@ else:
) )
pages_used = Column(Integer, nullable=False, default=0, server_default="0") pages_used = Column(Integer, nullable=False, default=0, server_default="0")
premium_tokens_limit = Column(
BigInteger,
nullable=False,
default=config.PREMIUM_TOKEN_LIMIT,
server_default=str(config.PREMIUM_TOKEN_LIMIT),
)
premium_tokens_used = Column(
BigInteger, nullable=False, default=0, server_default="0"
)
premium_tokens_reserved = Column(
BigInteger, nullable=False, default=0, server_default="0"
)
# User profile (can be set manually for non-OAuth users) # User profile (can be set manually for non-OAuth users)
display_name = Column(String, nullable=True) display_name = Column(String, nullable=True)
avatar_url = Column(String, nullable=True) avatar_url = Column(String, nullable=True)

View file

@ -0,0 +1,15 @@
"""Shared SlowAPI limiter instance used by app.py and route modules."""
from limits.storage import MemoryStorage
from slowapi import Limiter
from slowapi.util import get_remote_address
from app.config import config
limiter = Limiter(
key_func=get_remote_address,
storage_uri=config.REDIS_APP_URL,
default_limits=["1024/minute"],
in_memory_fallback_enabled=True,
in_memory_fallback=[MemoryStorage()],
)

View file

@ -0,0 +1,610 @@
"""Public API endpoints for anonymous (no-login) chat."""
from __future__ import annotations
import logging
import secrets
import uuid
from pathlib import PurePosixPath
from typing import Any
from fastapi import APIRouter, HTTPException, Request, Response, UploadFile, status
from fastapi.responses import StreamingResponse
from pydantic import BaseModel, Field
from app.config import config
from app.etl_pipeline.file_classifier import (
DIRECT_CONVERT_EXTENSIONS,
PLAINTEXT_EXTENSIONS,
)
from app.rate_limiter import limiter
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/v1/public/anon-chat", tags=["anonymous-chat"])
ANON_COOKIE_NAME = "surfsense_anon_session"
ANON_COOKIE_MAX_AGE = config.ANON_TOKEN_QUOTA_TTL_DAYS * 86400
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
def _get_or_create_session_id(request: Request, response: Response) -> str:
"""Read the signed session cookie or create a new one."""
session_id = request.cookies.get(ANON_COOKIE_NAME)
if session_id and len(session_id) == 43:
return session_id
session_id = secrets.token_urlsafe(32)
response.set_cookie(
key=ANON_COOKIE_NAME,
value=session_id,
max_age=ANON_COOKIE_MAX_AGE,
httponly=True,
samesite="lax",
secure=request.url.scheme == "https",
path="/",
)
return session_id
def _get_client_ip(request: Request) -> str:
forwarded = request.headers.get("x-forwarded-for")
return (
forwarded.split(",")[0].strip()
if forwarded
else (request.client.host if request.client else "unknown")
)
# ---------------------------------------------------------------------------
# Schemas
# ---------------------------------------------------------------------------
class AnonChatRequest(BaseModel):
model_slug: str = Field(..., max_length=100)
messages: list[dict[str, Any]] = Field(..., min_length=1)
disabled_tools: list[str] | None = None
turnstile_token: str | None = None
class AnonQuotaResponse(BaseModel):
used: int
limit: int
remaining: int
status: str
warning_threshold: int
captcha_required: bool = False
class AnonModelResponse(BaseModel):
id: int
name: str
description: str | None = None
provider: str
model_name: str
billing_tier: str = "free"
is_premium: bool = False
seo_slug: str | None = None
seo_enabled: bool = False
seo_title: str | None = None
seo_description: str | None = None
quota_reserve_tokens: int | None = None
# ---------------------------------------------------------------------------
# Routes
# ---------------------------------------------------------------------------
@router.get("/models", response_model=list[AnonModelResponse])
async def list_anonymous_models():
"""Return all models enabled for anonymous access."""
if not config.NOLOGIN_MODE_ENABLED:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="No-login mode is not enabled.",
)
models = []
for cfg in config.GLOBAL_LLM_CONFIGS:
if cfg.get("anonymous_enabled", False):
models.append(
AnonModelResponse(
id=cfg.get("id", 0),
name=cfg.get("name", ""),
description=cfg.get("description"),
provider=cfg.get("provider", ""),
model_name=cfg.get("model_name", ""),
billing_tier=cfg.get("billing_tier", "free"),
is_premium=cfg.get("billing_tier", "free") == "premium",
seo_slug=cfg.get("seo_slug"),
seo_enabled=cfg.get("seo_enabled", False),
seo_title=cfg.get("seo_title"),
seo_description=cfg.get("seo_description"),
quota_reserve_tokens=cfg.get("quota_reserve_tokens"),
)
)
return models
@router.get("/models/{slug}", response_model=AnonModelResponse)
async def get_anonymous_model(slug: str):
"""Return a single model by its SEO slug."""
if not config.NOLOGIN_MODE_ENABLED:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="No-login mode is not enabled.",
)
for cfg in config.GLOBAL_LLM_CONFIGS:
if cfg.get("anonymous_enabled", False) and cfg.get("seo_slug") == slug:
return AnonModelResponse(
id=cfg.get("id", 0),
name=cfg.get("name", ""),
description=cfg.get("description"),
provider=cfg.get("provider", ""),
model_name=cfg.get("model_name", ""),
billing_tier=cfg.get("billing_tier", "free"),
is_premium=cfg.get("billing_tier", "free") == "premium",
seo_slug=cfg.get("seo_slug"),
seo_enabled=cfg.get("seo_enabled", False),
seo_title=cfg.get("seo_title"),
seo_description=cfg.get("seo_description"),
quota_reserve_tokens=cfg.get("quota_reserve_tokens"),
)
raise HTTPException(status_code=404, detail="Model not found")
@router.get("/quota", response_model=AnonQuotaResponse)
@limiter.limit("30/minute")
async def get_anonymous_quota(request: Request, response: Response):
"""Return current token usage for the anonymous session.
Reports the *stricter* of session and IP buckets so that opening a
new browser on the same IP doesn't show a misleadingly fresh quota.
"""
if not config.NOLOGIN_MODE_ENABLED:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="No-login mode is not enabled.",
)
from app.services.token_quota_service import (
TokenQuotaService,
compute_anon_identity_key,
compute_ip_quota_key,
)
client_ip = _get_client_ip(request)
session_id = _get_or_create_session_id(request, response)
session_key = compute_anon_identity_key(session_id)
session_result = await TokenQuotaService.anon_get_usage(session_key)
ip_key = compute_ip_quota_key(client_ip)
ip_result = await TokenQuotaService.anon_get_usage(ip_key)
# Use whichever bucket has higher usage — that's the real constraint
result = ip_result if ip_result.used > session_result.used else session_result
captcha_needed = False
if config.TURNSTILE_ENABLED:
req_count = await TokenQuotaService.anon_get_request_count(client_ip)
captcha_needed = req_count >= config.ANON_CAPTCHA_REQUEST_THRESHOLD
return AnonQuotaResponse(
used=result.used,
limit=result.limit,
remaining=result.remaining,
status=result.status.value,
warning_threshold=config.ANON_TOKEN_WARNING_THRESHOLD,
captcha_required=captcha_needed,
)
@router.post("/stream")
@limiter.limit("15/minute")
async def stream_anonymous_chat(
body: AnonChatRequest,
request: Request,
response: Response,
):
"""Stream a chat response for an anonymous user with quota enforcement."""
if not config.NOLOGIN_MODE_ENABLED:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="No-login mode is not enabled.",
)
from app.agents.new_chat.llm_config import (
AgentConfig,
create_chat_litellm_from_agent_config,
)
from app.services.token_quota_service import (
TokenQuotaService,
compute_anon_identity_key,
compute_ip_quota_key,
)
from app.services.turnstile_service import verify_turnstile_token
# Find the model config by slug
model_cfg = None
for cfg in config.GLOBAL_LLM_CONFIGS:
if (
cfg.get("anonymous_enabled", False)
and cfg.get("seo_slug") == body.model_slug
):
model_cfg = cfg
break
if model_cfg is None:
raise HTTPException(
status_code=404, detail="Model not found or not available for anonymous use"
)
client_ip = _get_client_ip(request)
# --- Concurrent stream limit ---
slot_acquired = await TokenQuotaService.anon_acquire_stream_slot(
client_ip, max_concurrent=config.ANON_MAX_CONCURRENT_STREAMS
)
if not slot_acquired:
raise HTTPException(
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
detail={
"code": "ANON_TOO_MANY_STREAMS",
"message": f"Max {config.ANON_MAX_CONCURRENT_STREAMS} concurrent chats allowed. Please wait for a response to finish.",
},
)
try:
# --- CAPTCHA enforcement (check count without incrementing; count
# is bumped only after a successful response in _generate) ---
if config.TURNSTILE_ENABLED:
req_count = await TokenQuotaService.anon_get_request_count(client_ip)
if req_count >= config.ANON_CAPTCHA_REQUEST_THRESHOLD:
if not body.turnstile_token:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail={
"code": "CAPTCHA_REQUIRED",
"message": "Please complete the CAPTCHA to continue chatting.",
},
)
valid = await verify_turnstile_token(body.turnstile_token, client_ip)
if not valid:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail={
"code": "CAPTCHA_INVALID",
"message": "CAPTCHA verification failed. Please try again.",
},
)
await TokenQuotaService.anon_reset_request_count(client_ip)
# Build identity keys
session_id = _get_or_create_session_id(request, response)
session_key = compute_anon_identity_key(session_id)
ip_key = compute_ip_quota_key(client_ip)
# Reserve tokens
reserve_amount = min(
model_cfg.get("quota_reserve_tokens", config.QUOTA_MAX_RESERVE_PER_CALL),
config.QUOTA_MAX_RESERVE_PER_CALL,
)
request_id = uuid.uuid4().hex[:16]
quota_result = await TokenQuotaService.anon_reserve(
session_key=session_key,
ip_key=ip_key,
request_id=request_id,
reserve_tokens=reserve_amount,
)
if not quota_result.allowed:
raise HTTPException(
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
detail={
"code": "ANON_QUOTA_EXCEEDED",
"message": "You've used all your free tokens. Create an account for 5M more!",
"used": quota_result.used,
"limit": quota_result.limit,
},
)
# Create agent config from YAML
agent_config = AgentConfig.from_yaml_config(model_cfg)
llm = create_chat_litellm_from_agent_config(agent_config)
if not llm:
await TokenQuotaService.anon_release(session_key, ip_key, request_id)
raise HTTPException(status_code=500, detail="Failed to create LLM instance")
# Server-side tool allow-list enforcement
anon_allowed_tools = {"web_search"}
client_disabled = set(body.disabled_tools) if body.disabled_tools else set()
enabled_for_agent = anon_allowed_tools - client_disabled
except HTTPException:
await TokenQuotaService.anon_release_stream_slot(client_ip)
raise
async def _generate():
from langchain_core.messages import HumanMessage
from app.agents.new_chat.chat_deepagent import create_surfsense_deep_agent
from app.agents.new_chat.checkpointer import get_checkpointer
from app.db import shielded_async_session
from app.services.connector_service import ConnectorService
from app.services.new_streaming_service import VercelStreamingService
from app.services.token_tracking_service import start_turn
from app.tasks.chat.stream_new_chat import StreamResult, _stream_agent_events
accumulator = start_turn()
streaming_service = VercelStreamingService()
try:
async with shielded_async_session() as session:
connector_service = ConnectorService(session, search_space_id=None)
checkpointer = await get_checkpointer()
anon_thread_id = f"anon-{session_id}-{request_id}"
agent = await create_surfsense_deep_agent(
llm=llm,
search_space_id=0,
db_session=session,
connector_service=connector_service,
checkpointer=checkpointer,
user_id=None,
thread_id=None,
agent_config=agent_config,
enabled_tools=list(enabled_for_agent),
disabled_tools=None,
anon_session_id=session_id,
)
user_query = ""
for msg in reversed(body.messages):
if msg.get("role") == "user":
user_query = msg.get("content", "")
break
langchain_messages = [HumanMessage(content=user_query)]
input_state = {
"messages": langchain_messages,
"search_space_id": 0,
}
langgraph_config = {
"configurable": {"thread_id": anon_thread_id},
"recursion_limit": 40,
}
yield streaming_service.format_message_start()
yield streaming_service.format_start_step()
initial_step_id = "thinking-1"
query_preview = user_query[:80] + (
"..." if len(user_query) > 80 else ""
)
initial_items = [f"Processing: {query_preview}"]
yield streaming_service.format_thinking_step(
step_id=initial_step_id,
title="Understanding your request",
status="in_progress",
items=initial_items,
)
stream_result = StreamResult()
async for sse in _stream_agent_events(
agent=agent,
config=langgraph_config,
input_data=input_state,
streaming_service=streaming_service,
result=stream_result,
step_prefix="thinking",
initial_step_id=initial_step_id,
initial_step_title="Understanding your request",
initial_step_items=initial_items,
):
yield sse
# Finalize quota with actual tokens
actual_tokens = accumulator.grand_total
finalize_result = await TokenQuotaService.anon_finalize(
session_key=session_key,
ip_key=ip_key,
request_id=request_id,
actual_tokens=actual_tokens,
)
# Count this as 1 completed response for CAPTCHA threshold
if config.TURNSTILE_ENABLED:
await TokenQuotaService.anon_increment_request_count(client_ip)
yield streaming_service.format_data(
"anon-quota",
{
"used": finalize_result.used,
"limit": finalize_result.limit,
"remaining": finalize_result.remaining,
"status": finalize_result.status.value,
},
)
if accumulator.per_message_summary():
yield streaming_service.format_data(
"token-usage",
{
"usage": accumulator.per_message_summary(),
"prompt_tokens": accumulator.total_prompt_tokens,
"completion_tokens": accumulator.total_completion_tokens,
"total_tokens": accumulator.grand_total,
},
)
yield streaming_service.format_finish_step()
yield streaming_service.format_finish()
yield streaming_service.format_done()
except Exception as e:
logger.exception("Anonymous chat stream error")
await TokenQuotaService.anon_release(session_key, ip_key, request_id)
yield streaming_service.format_error(f"Error during chat: {e!s}")
yield streaming_service.format_done()
finally:
await TokenQuotaService.anon_release_stream_slot(client_ip)
return StreamingResponse(
_generate(),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no",
},
)
# ---------------------------------------------------------------------------
# Anonymous Document Upload (1-doc limit, plaintext/direct-convert only)
# ---------------------------------------------------------------------------
ANON_ALLOWED_EXTENSIONS = PLAINTEXT_EXTENSIONS | DIRECT_CONVERT_EXTENSIONS
ANON_DOC_REDIS_PREFIX = "anon:doc:"
class AnonDocResponse(BaseModel):
filename: str
size_bytes: int
status: str = "uploaded"
@router.post("/upload", response_model=AnonDocResponse)
@limiter.limit("5/minute")
async def upload_anonymous_document(
file: UploadFile,
request: Request,
response: Response,
):
"""Upload a single document for anonymous chat (1-doc limit per session)."""
if not config.NOLOGIN_MODE_ENABLED:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="No-login mode is not enabled.",
)
session_id = _get_or_create_session_id(request, response)
if not file.filename:
raise HTTPException(status_code=400, detail="No filename provided")
ext = PurePosixPath(file.filename).suffix.lower()
if ext not in ANON_ALLOWED_EXTENSIONS:
raise HTTPException(
status_code=415,
detail=(
"File type not supported for anonymous upload. "
"Create an account to upload PDFs, Word documents, images, audio, and 20+ more file types. "
"Allowed extensions: text, code, CSV, HTML files."
),
)
max_size = config.ANON_MAX_UPLOAD_SIZE_MB * 1024 * 1024
content = await file.read()
if len(content) > max_size:
raise HTTPException(
status_code=413,
detail=f"File too large. Max size is {config.ANON_MAX_UPLOAD_SIZE_MB} MB.",
)
import json as _json
import redis.asyncio as aioredis
redis_client = aioredis.from_url(config.REDIS_APP_URL, decode_responses=True)
redis_key = f"{ANON_DOC_REDIS_PREFIX}{session_id}"
try:
existing = await redis_client.exists(redis_key)
if existing:
raise HTTPException(
status_code=409,
detail="Document limit reached. Create an account to upload more.",
)
text_content: str
if ext in PLAINTEXT_EXTENSIONS:
text_content = content.decode("utf-8", errors="replace")
elif ext in DIRECT_CONVERT_EXTENSIONS:
if ext in {".csv", ".tsv"}:
text_content = content.decode("utf-8", errors="replace")
else:
try:
from markdownify import markdownify
text_content = markdownify(
content.decode("utf-8", errors="replace")
)
except ImportError:
text_content = content.decode("utf-8", errors="replace")
else:
text_content = content.decode("utf-8", errors="replace")
doc_data = _json.dumps(
{
"filename": file.filename,
"size_bytes": len(content),
"content": text_content,
}
)
ttl_seconds = config.ANON_TOKEN_QUOTA_TTL_DAYS * 86400
await redis_client.set(redis_key, doc_data, ex=ttl_seconds)
finally:
await redis_client.aclose()
return AnonDocResponse(
filename=file.filename,
size_bytes=len(content),
)
@router.get("/document")
async def get_anonymous_document(request: Request, response: Response):
"""Get metadata of the uploaded document for the anonymous session."""
if not config.NOLOGIN_MODE_ENABLED:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="No-login mode is not enabled.",
)
session_id = _get_or_create_session_id(request, response)
import json as _json
import redis.asyncio as aioredis
redis_client = aioredis.from_url(config.REDIS_APP_URL, decode_responses=True)
redis_key = f"{ANON_DOC_REDIS_PREFIX}{session_id}"
try:
data = await redis_client.get(redis_key)
if not data:
raise HTTPException(status_code=404, detail="No document uploaded")
doc = _json.loads(data)
return {
"filename": doc["filename"],
"size_bytes": doc["size_bytes"],
}
finally:
await redis_client.aclose()

View file

@ -76,6 +76,14 @@ async def get_global_new_llm_configs(
"citations_enabled": True, "citations_enabled": True,
"is_global": True, "is_global": True,
"is_auto_mode": True, "is_auto_mode": True,
"billing_tier": "free",
"is_premium": False,
"anonymous_enabled": False,
"seo_enabled": False,
"seo_slug": None,
"seo_title": None,
"seo_description": None,
"quota_reserve_tokens": None,
} }
) )
@ -97,6 +105,14 @@ async def get_global_new_llm_configs(
), ),
"citations_enabled": cfg.get("citations_enabled", True), "citations_enabled": cfg.get("citations_enabled", True),
"is_global": True, "is_global": True,
"billing_tier": cfg.get("billing_tier", "free"),
"is_premium": cfg.get("billing_tier", "free") == "premium",
"anonymous_enabled": cfg.get("anonymous_enabled", False),
"seo_enabled": cfg.get("seo_enabled", False),
"seo_slug": cfg.get("seo_slug"),
"seo_title": cfg.get("seo_title"),
"seo_description": cfg.get("seo_description"),
"quota_reserve_tokens": cfg.get("quota_reserve_tokens"),
} }
safe_configs.append(safe_config) safe_configs.append(safe_config)

View file

@ -13,13 +13,24 @@ from sqlalchemy.ext.asyncio import AsyncSession
from stripe import SignatureVerificationError, StripeClient, StripeError from stripe import SignatureVerificationError, StripeClient, StripeError
from app.config import config from app.config import config
from app.db import PagePurchase, PagePurchaseStatus, User, get_async_session from app.db import (
PagePurchase,
PagePurchaseStatus,
PremiumTokenPurchase,
PremiumTokenPurchaseStatus,
User,
get_async_session,
)
from app.schemas.stripe import ( from app.schemas.stripe import (
CreateCheckoutSessionRequest, CreateCheckoutSessionRequest,
CreateCheckoutSessionResponse, CreateCheckoutSessionResponse,
CreateTokenCheckoutSessionRequest,
CreateTokenCheckoutSessionResponse,
PagePurchaseHistoryResponse, PagePurchaseHistoryResponse,
StripeStatusResponse, StripeStatusResponse,
StripeWebhookResponse, StripeWebhookResponse,
TokenPurchaseHistoryResponse,
TokenStripeStatusResponse,
) )
from app.users import current_active_user from app.users import current_active_user
@ -151,6 +162,26 @@ async def _mark_purchase_failed(
return StripeWebhookResponse() return StripeWebhookResponse()
async def _mark_token_purchase_failed(
db_session: AsyncSession, checkout_session_id: str
) -> StripeWebhookResponse:
purchase = (
await db_session.execute(
select(PremiumTokenPurchase)
.where(
PremiumTokenPurchase.stripe_checkout_session_id == checkout_session_id
)
.with_for_update()
)
).scalar_one_or_none()
if purchase is not None and purchase.status == PremiumTokenPurchaseStatus.PENDING:
purchase.status = PremiumTokenPurchaseStatus.FAILED
await db_session.commit()
return StripeWebhookResponse()
async def _fulfill_completed_purchase( async def _fulfill_completed_purchase(
db_session: AsyncSession, checkout_session: Any db_session: AsyncSession, checkout_session: Any
) -> StripeWebhookResponse: ) -> StripeWebhookResponse:
@ -201,6 +232,86 @@ async def _fulfill_completed_purchase(
return StripeWebhookResponse() return StripeWebhookResponse()
async def _fulfill_completed_token_purchase(
db_session: AsyncSession, checkout_session: Any
) -> StripeWebhookResponse:
"""Grant premium tokens to the user after a confirmed Stripe payment."""
checkout_session_id = str(checkout_session.id)
purchase = (
await db_session.execute(
select(PremiumTokenPurchase)
.where(
PremiumTokenPurchase.stripe_checkout_session_id == checkout_session_id
)
.with_for_update()
)
).scalar_one_or_none()
if purchase is None:
metadata = _get_metadata(checkout_session)
user_id = metadata.get("user_id")
quantity = int(metadata.get("quantity", "0"))
tokens_per_unit = int(metadata.get("tokens_per_unit", "0"))
if not user_id or quantity <= 0 or tokens_per_unit <= 0:
logger.error(
"Skipping token fulfillment for session %s: incomplete metadata %s",
checkout_session_id,
metadata,
)
return StripeWebhookResponse()
purchase = PremiumTokenPurchase(
user_id=uuid.UUID(user_id),
stripe_checkout_session_id=checkout_session_id,
stripe_payment_intent_id=_normalize_optional_string(
getattr(checkout_session, "payment_intent", None)
),
quantity=quantity,
tokens_granted=quantity * tokens_per_unit,
amount_total=getattr(checkout_session, "amount_total", None),
currency=getattr(checkout_session, "currency", None),
status=PremiumTokenPurchaseStatus.PENDING,
)
db_session.add(purchase)
await db_session.flush()
if purchase.status == PremiumTokenPurchaseStatus.COMPLETED:
return StripeWebhookResponse()
user = (
(
await db_session.execute(
select(User).where(User.id == purchase.user_id).with_for_update(of=User)
)
)
.unique()
.scalar_one_or_none()
)
if user is None:
logger.error(
"Skipping token fulfillment for session %s: user %s not found",
purchase.stripe_checkout_session_id,
purchase.user_id,
)
return StripeWebhookResponse()
purchase.status = PremiumTokenPurchaseStatus.COMPLETED
purchase.completed_at = datetime.now(UTC)
purchase.amount_total = getattr(checkout_session, "amount_total", None)
purchase.currency = getattr(checkout_session, "currency", None)
purchase.stripe_payment_intent_id = _normalize_optional_string(
getattr(checkout_session, "payment_intent", None)
)
user.premium_tokens_limit = (
max(user.premium_tokens_used, user.premium_tokens_limit)
+ purchase.tokens_granted
)
await db_session.commit()
return StripeWebhookResponse()
@router.post("/create-checkout-session", response_model=CreateCheckoutSessionResponse) @router.post("/create-checkout-session", response_model=CreateCheckoutSessionResponse)
async def create_checkout_session( async def create_checkout_session(
body: CreateCheckoutSessionRequest, body: CreateCheckoutSessionRequest,
@ -333,6 +444,10 @@ async def stripe_webhook(
) )
return StripeWebhookResponse() return StripeWebhookResponse()
metadata = _get_metadata(checkout_session)
purchase_type = metadata.get("purchase_type", "page_packs")
if purchase_type == "premium_tokens":
return await _fulfill_completed_token_purchase(db_session, checkout_session)
return await _fulfill_completed_purchase(db_session, checkout_session) return await _fulfill_completed_purchase(db_session, checkout_session)
if event.type in { if event.type in {
@ -340,6 +455,12 @@ async def stripe_webhook(
"checkout.session.expired", "checkout.session.expired",
}: }:
checkout_session = event.data.object checkout_session = event.data.object
metadata = _get_metadata(checkout_session)
purchase_type = metadata.get("purchase_type", "page_packs")
if purchase_type == "premium_tokens":
return await _mark_token_purchase_failed(
db_session, str(checkout_session.id)
)
return await _mark_purchase_failed(db_session, str(checkout_session.id)) return await _mark_purchase_failed(db_session, str(checkout_session.id))
return StripeWebhookResponse() return StripeWebhookResponse()
@ -369,3 +490,146 @@ async def get_page_purchases(
) )
return PagePurchaseHistoryResponse(purchases=purchases) return PagePurchaseHistoryResponse(purchases=purchases)
# =============================================================================
# Premium Token Purchase Routes
# =============================================================================
def _ensure_token_buying_enabled() -> None:
if not config.STRIPE_TOKEN_BUYING_ENABLED:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="Premium token purchases are temporarily unavailable.",
)
def _get_token_checkout_urls(search_space_id: int) -> tuple[str, str]:
if not config.NEXT_FRONTEND_URL:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="NEXT_FRONTEND_URL is not configured.",
)
base_url = config.NEXT_FRONTEND_URL.rstrip("/")
success_url = f"{base_url}/dashboard/{search_space_id}/purchase-success"
cancel_url = f"{base_url}/dashboard/{search_space_id}/purchase-cancel"
return success_url, cancel_url
def _get_required_token_price_id() -> str:
if not config.STRIPE_PREMIUM_TOKEN_PRICE_ID:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="STRIPE_PREMIUM_TOKEN_PRICE_ID is not configured.",
)
return config.STRIPE_PREMIUM_TOKEN_PRICE_ID
@router.post("/create-token-checkout-session")
async def create_token_checkout_session(
body: CreateTokenCheckoutSessionRequest,
user: User = Depends(current_active_user),
db_session: AsyncSession = Depends(get_async_session),
):
"""Create a Stripe Checkout Session for buying premium token packs."""
_ensure_token_buying_enabled()
stripe_client = get_stripe_client()
price_id = _get_required_token_price_id()
success_url, cancel_url = _get_token_checkout_urls(body.search_space_id)
tokens_granted = body.quantity * config.STRIPE_TOKENS_PER_UNIT
try:
checkout_session = stripe_client.v1.checkout.sessions.create(
params={
"mode": "payment",
"success_url": success_url,
"cancel_url": cancel_url,
"line_items": [
{
"price": price_id,
"quantity": body.quantity,
}
],
"client_reference_id": str(user.id),
"customer_email": user.email,
"metadata": {
"user_id": str(user.id),
"quantity": str(body.quantity),
"tokens_per_unit": str(config.STRIPE_TOKENS_PER_UNIT),
"purchase_type": "premium_tokens",
},
}
)
except StripeError as exc:
logger.exception("Failed to create token checkout session for user %s", user.id)
raise HTTPException(
status_code=status.HTTP_502_BAD_GATEWAY,
detail="Unable to create Stripe checkout session.",
) from exc
checkout_url = getattr(checkout_session, "url", None)
if not checkout_url:
raise HTTPException(
status_code=status.HTTP_502_BAD_GATEWAY,
detail="Stripe checkout session did not return a URL.",
)
db_session.add(
PremiumTokenPurchase(
user_id=user.id,
stripe_checkout_session_id=str(checkout_session.id),
stripe_payment_intent_id=_normalize_optional_string(
getattr(checkout_session, "payment_intent", None)
),
quantity=body.quantity,
tokens_granted=tokens_granted,
amount_total=getattr(checkout_session, "amount_total", None),
currency=getattr(checkout_session, "currency", None),
status=PremiumTokenPurchaseStatus.PENDING,
)
)
await db_session.commit()
return CreateTokenCheckoutSessionResponse(checkout_url=checkout_url)
@router.get("/token-status")
async def get_token_status(
user: User = Depends(current_active_user),
):
"""Return token-buying availability and current premium quota for frontend."""
used = user.premium_tokens_used
limit = user.premium_tokens_limit
return TokenStripeStatusResponse(
token_buying_enabled=config.STRIPE_TOKEN_BUYING_ENABLED,
premium_tokens_used=used,
premium_tokens_limit=limit,
premium_tokens_remaining=max(0, limit - used),
)
@router.get("/token-purchases")
async def get_token_purchases(
user: User = Depends(current_active_user),
db_session: AsyncSession = Depends(get_async_session),
offset: int = 0,
limit: int = 50,
):
"""Return the authenticated user's premium token purchase history."""
limit = min(limit, 100)
purchases = (
(
await db_session.execute(
select(PremiumTokenPurchase)
.where(PremiumTokenPurchase.user_id == user.id)
.order_by(PremiumTokenPurchase.created_at.desc())
.offset(offset)
.limit(limit)
)
)
.scalars()
.all()
)
return TokenPurchaseHistoryResponse(purchases=purchases)

View file

@ -164,6 +164,15 @@ class GlobalNewLLMConfigRead(BaseModel):
is_global: bool = True # Always true for global configs is_global: bool = True # Always true for global configs
is_auto_mode: bool = False # True only for Auto mode (ID 0) is_auto_mode: bool = False # True only for Auto mode (ID 0)
billing_tier: str = "free"
is_premium: bool = False
anonymous_enabled: bool = False
seo_enabled: bool = False
seo_slug: str | None = None
seo_title: str | None = None
seo_description: str | None = None
quota_reserve_tokens: int | None = None
# ============================================================================= # =============================================================================
# LLM Preferences Schemas (for role assignments) # LLM Preferences Schemas (for role assignments)

View file

@ -54,3 +54,48 @@ class StripeWebhookResponse(BaseModel):
"""Generic acknowledgement for Stripe webhook delivery.""" """Generic acknowledgement for Stripe webhook delivery."""
received: bool = True received: bool = True
class CreateTokenCheckoutSessionRequest(BaseModel):
"""Request body for creating a premium token purchase checkout session."""
quantity: int = Field(ge=1, le=100)
search_space_id: int = Field(ge=1)
class CreateTokenCheckoutSessionResponse(BaseModel):
"""Response containing the Stripe-hosted checkout URL."""
checkout_url: str
class TokenPurchaseRead(BaseModel):
"""Serialized premium token purchase record."""
id: uuid.UUID
stripe_checkout_session_id: str
stripe_payment_intent_id: str | None = None
quantity: int
tokens_granted: int
amount_total: int | None = None
currency: str | None = None
status: str
completed_at: datetime | None = None
created_at: datetime
model_config = ConfigDict(from_attributes=True)
class TokenPurchaseHistoryResponse(BaseModel):
"""Response containing the user's premium token purchases."""
purchases: list[TokenPurchaseRead]
class TokenStripeStatusResponse(BaseModel):
"""Response describing token-buying availability and current quota."""
token_buying_enabled: bool
premium_tokens_used: int = 0
premium_tokens_limit: int = 0
premium_tokens_remaining: int = 0

View file

@ -135,9 +135,12 @@ class LLMRouterService:
logger.debug("LLM Router already initialized, skipping") logger.debug("LLM Router already initialized, skipping")
return return
# Build model list from global configs auto_configs = [
c for c in global_configs if c.get("billing_tier", "free") != "premium"
]
model_list = [] model_list = []
for config in global_configs: for config in auto_configs:
deployment = cls._config_to_deployment(config) deployment = cls._config_to_deployment(config)
if deployment: if deployment:
model_list.append(deployment) model_list.append(deployment)

View file

@ -0,0 +1,621 @@
"""
Atomic token quota service for anonymous and registered users.
Provides reserve/finalize/release/get_usage operations with race-safe
implementation using Redis Lua scripts (anonymous) and Postgres row locks
(registered premium).
"""
from __future__ import annotations
import hashlib
import logging
from enum import StrEnum
from typing import Any
import redis.asyncio as aioredis
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config
logger = logging.getLogger(__name__)
class QuotaScope(StrEnum):
ANONYMOUS = "anonymous"
PREMIUM = "premium"
class QuotaStatus(StrEnum):
OK = "ok"
WARNING = "warning"
BLOCKED = "blocked"
class QuotaResult:
__slots__ = ("allowed", "limit", "remaining", "reserved", "status", "used")
def __init__(
self,
allowed: bool,
status: QuotaStatus,
used: int,
limit: int,
reserved: int = 0,
remaining: int = 0,
):
self.allowed = allowed
self.status = status
self.used = used
self.limit = limit
self.reserved = reserved
self.remaining = remaining
def to_dict(self) -> dict[str, Any]:
return {
"allowed": self.allowed,
"status": self.status.value,
"used": self.used,
"limit": self.limit,
"reserved": self.reserved,
"remaining": self.remaining,
}
# ---------------------------------------------------------------------------
# Redis Lua scripts for atomic anonymous quota operations
# ---------------------------------------------------------------------------
# KEYS[1] = quota key (e.g. "anon_quota:session:<session_id>")
# ARGV[1] = reserve_tokens
# ARGV[2] = limit
# ARGV[3] = warning_threshold
# ARGV[4] = request_id
# ARGV[5] = ttl_seconds
# Returns: [allowed(0/1), status("ok"/"warning"/"blocked"), used, reserved]
_RESERVE_LUA = """
local key = KEYS[1]
local reserve = tonumber(ARGV[1])
local limit = tonumber(ARGV[2])
local warning = tonumber(ARGV[3])
local req_id = ARGV[4]
local ttl = tonumber(ARGV[5])
local used = tonumber(redis.call('HGET', key, 'used') or '0')
local reserved = tonumber(redis.call('HGET', key, 'reserved') or '0')
local effective = used + reserved + reserve
if effective > limit then
return {0, 'blocked', used, reserved}
end
redis.call('HINCRBY', key, 'reserved', reserve)
redis.call('HSET', key, 'req:' .. req_id, reserve)
redis.call('EXPIRE', key, ttl)
local new_reserved = reserved + reserve
local status = 'ok'
if (used + new_reserved) >= warning then
status = 'warning'
end
return {1, status, used, new_reserved}
"""
# KEYS[1] = quota key
# ARGV[1] = request_id
# ARGV[2] = actual_tokens
# ARGV[3] = warning_threshold
# Returns: [used, reserved, status]
_FINALIZE_LUA = """
local key = KEYS[1]
local req_id = ARGV[1]
local actual = tonumber(ARGV[2])
local warning = tonumber(ARGV[3])
local orig_reserve = tonumber(redis.call('HGET', key, 'req:' .. req_id) or '0')
if orig_reserve == 0 then
return {tonumber(redis.call('HGET', key, 'used') or '0'), tonumber(redis.call('HGET', key, 'reserved') or '0'), 'ok'}
end
redis.call('HDEL', key, 'req:' .. req_id)
redis.call('HINCRBY', key, 'reserved', -orig_reserve)
redis.call('HINCRBY', key, 'used', actual)
local used = tonumber(redis.call('HGET', key, 'used') or '0')
local reserved = tonumber(redis.call('HGET', key, 'reserved') or '0')
local status = 'ok'
if used >= warning then
status = 'warning'
end
return {used, reserved, status}
"""
# KEYS[1] = quota key
# ARGV[1] = request_id
# Returns: 1 if released, 0 if not found
_RELEASE_LUA = """
local key = KEYS[1]
local req_id = ARGV[1]
local orig_reserve = tonumber(redis.call('HGET', key, 'req:' .. req_id) or '0')
if orig_reserve == 0 then
return 0
end
redis.call('HDEL', key, 'req:' .. req_id)
redis.call('HINCRBY', key, 'reserved', -orig_reserve)
return 1
"""
def _get_anon_redis() -> aioredis.Redis:
return aioredis.from_url(config.REDIS_APP_URL, decode_responses=True)
def compute_anon_identity_key(
session_id: str,
ip_hash: str | None = None,
) -> str:
"""Build the Redis hash key for anonymous quota tracking.
Uses the signed session cookie as primary identity. The IP hash
is tracked separately so cookie-reset evasion is caught.
"""
return f"anon_quota:session:{session_id}"
def compute_ip_quota_key(ip_address: str) -> str:
"""Build IP-only quota key. UA is excluded so rotating User-Agent cannot bypass limits."""
h = hashlib.sha256(ip_address.encode()).hexdigest()[:16]
return f"anon_quota:ip:{h}"
# ---------------------------------------------------------------------------
# Concurrent stream limiter (per-IP)
# ---------------------------------------------------------------------------
# Atomic acquire: returns 1 if slot acquired, 0 if at capacity.
# KEYS[1] = stream counter key ARGV[1] = max_concurrent ARGV[2] = safety_ttl
_ACQUIRE_STREAM_LUA = """
local key = KEYS[1]
local max_c = tonumber(ARGV[1])
local ttl = tonumber(ARGV[2])
local cur = tonumber(redis.call('GET', key) or '0')
if cur >= max_c then
return 0
end
redis.call('INCR', key)
redis.call('EXPIRE', key, ttl)
return 1
"""
# Atomic release: DECR with floor at 0
_RELEASE_STREAM_LUA = """
local key = KEYS[1]
local cur = tonumber(redis.call('GET', key) or '0')
if cur <= 0 then
return 0
end
redis.call('DECR', key)
return 1
"""
def compute_stream_slot_key(ip_address: str) -> str:
h = hashlib.sha256(ip_address.encode()).hexdigest()[:16]
return f"anon:streams:{h}"
def compute_request_count_key(ip_address: str) -> str:
h = hashlib.sha256(ip_address.encode()).hexdigest()[:16]
return f"anon:reqcount:{h}"
class TokenQuotaService:
"""Unified quota service for anonymous (Redis) and premium (Postgres) scopes."""
# ------------------------------------------------------------------
# Concurrent stream limiter
# ------------------------------------------------------------------
@staticmethod
async def anon_acquire_stream_slot(
ip_address: str,
max_concurrent: int = 2,
safety_ttl: int = 300,
) -> bool:
key = compute_stream_slot_key(ip_address)
r = _get_anon_redis()
try:
result = await r.eval(
_ACQUIRE_STREAM_LUA, 1, key, str(max_concurrent), str(safety_ttl)
)
return bool(result)
finally:
await r.aclose()
@staticmethod
async def anon_release_stream_slot(ip_address: str) -> None:
key = compute_stream_slot_key(ip_address)
r = _get_anon_redis()
try:
await r.eval(_RELEASE_STREAM_LUA, 1, key)
finally:
await r.aclose()
# ------------------------------------------------------------------
# Per-IP request counter (for CAPTCHA triggering)
# ------------------------------------------------------------------
@staticmethod
async def anon_increment_request_count(ip_address: str, ttl: int = 86400) -> int:
"""Increment and return current request count for this IP. TTL resets daily."""
key = compute_request_count_key(ip_address)
r = _get_anon_redis()
try:
pipe = r.pipeline()
pipe.incr(key)
pipe.expire(key, ttl)
results = await pipe.execute()
return int(results[0])
finally:
await r.aclose()
@staticmethod
async def anon_get_request_count(ip_address: str) -> int:
key = compute_request_count_key(ip_address)
r = _get_anon_redis()
try:
val = await r.get(key)
return int(val) if val else 0
finally:
await r.aclose()
@staticmethod
async def anon_reset_request_count(ip_address: str) -> None:
key = compute_request_count_key(ip_address)
r = _get_anon_redis()
try:
await r.delete(key)
finally:
await r.aclose()
# ------------------------------------------------------------------
# Anonymous (Redis-backed)
# ------------------------------------------------------------------
@staticmethod
async def anon_reserve(
session_key: str,
ip_key: str | None,
request_id: str,
reserve_tokens: int,
) -> QuotaResult:
limit = config.ANON_TOKEN_LIMIT
warning = config.ANON_TOKEN_WARNING_THRESHOLD
ttl = config.ANON_TOKEN_QUOTA_TTL_DAYS * 86400
r = _get_anon_redis()
try:
result = await r.eval(
_RESERVE_LUA,
1,
session_key,
str(reserve_tokens),
str(limit),
str(warning),
request_id,
str(ttl),
)
allowed = bool(result[0])
status_str = result[1] if isinstance(result[1], str) else result[1].decode()
used = int(result[2])
reserved = int(result[3])
if ip_key:
ip_result = await r.eval(
_RESERVE_LUA,
1,
ip_key,
str(reserve_tokens),
str(limit),
str(warning),
request_id,
str(ttl),
)
ip_allowed = bool(ip_result[0])
ip_used = int(ip_result[2])
if not ip_allowed and allowed:
await r.eval(_RELEASE_LUA, 1, session_key, request_id)
allowed = False
status_str = "blocked"
used = max(used, ip_used)
status = QuotaStatus(status_str)
remaining = max(0, limit - used - reserved)
return QuotaResult(
allowed=allowed,
status=status,
used=used,
limit=limit,
reserved=reserved,
remaining=remaining,
)
finally:
await r.aclose()
@staticmethod
async def anon_finalize(
session_key: str,
ip_key: str | None,
request_id: str,
actual_tokens: int,
) -> QuotaResult:
warning = config.ANON_TOKEN_WARNING_THRESHOLD
limit = config.ANON_TOKEN_LIMIT
r = _get_anon_redis()
try:
result = await r.eval(
_FINALIZE_LUA,
1,
session_key,
request_id,
str(actual_tokens),
str(warning),
)
used = int(result[0])
reserved = int(result[1])
status_str = result[2] if isinstance(result[2], str) else result[2].decode()
if ip_key:
await r.eval(
_FINALIZE_LUA,
1,
ip_key,
request_id,
str(actual_tokens),
str(warning),
)
status = QuotaStatus(status_str)
remaining = max(0, limit - used - reserved)
return QuotaResult(
allowed=True,
status=status,
used=used,
limit=limit,
reserved=reserved,
remaining=remaining,
)
finally:
await r.aclose()
@staticmethod
async def anon_release(
session_key: str,
ip_key: str | None,
request_id: str,
) -> None:
r = _get_anon_redis()
try:
await r.eval(_RELEASE_LUA, 1, session_key, request_id)
if ip_key:
await r.eval(_RELEASE_LUA, 1, ip_key, request_id)
finally:
await r.aclose()
@staticmethod
async def anon_get_usage(session_key: str) -> QuotaResult:
limit = config.ANON_TOKEN_LIMIT
warning = config.ANON_TOKEN_WARNING_THRESHOLD
r = _get_anon_redis()
try:
data = await r.hgetall(session_key)
used = int(data.get("used", 0))
reserved = int(data.get("reserved", 0))
remaining = max(0, limit - used - reserved)
if used >= limit:
status = QuotaStatus.BLOCKED
elif used >= warning:
status = QuotaStatus.WARNING
else:
status = QuotaStatus.OK
return QuotaResult(
allowed=used < limit,
status=status,
used=used,
limit=limit,
reserved=reserved,
remaining=remaining,
)
finally:
await r.aclose()
# ------------------------------------------------------------------
# Premium (Postgres-backed)
# ------------------------------------------------------------------
@staticmethod
async def premium_reserve(
db_session: AsyncSession,
user_id: Any,
request_id: str,
reserve_tokens: int,
) -> QuotaResult:
from app.db import User
user = (
(
await db_session.execute(
select(User).where(User.id == user_id).with_for_update(of=User)
)
)
.unique()
.scalar_one_or_none()
)
if user is None:
return QuotaResult(
allowed=False,
status=QuotaStatus.BLOCKED,
used=0,
limit=0,
)
limit = user.premium_tokens_limit
used = user.premium_tokens_used
reserved = user.premium_tokens_reserved
effective = used + reserved + reserve_tokens
if effective > limit:
remaining = max(0, limit - used - reserved)
await db_session.rollback()
return QuotaResult(
allowed=False,
status=QuotaStatus.BLOCKED,
used=used,
limit=limit,
reserved=reserved,
remaining=remaining,
)
user.premium_tokens_reserved = reserved + reserve_tokens
await db_session.commit()
new_reserved = reserved + reserve_tokens
remaining = max(0, limit - used - new_reserved)
warning_threshold = int(limit * 0.8)
if (used + new_reserved) >= limit:
status = QuotaStatus.BLOCKED
elif (used + new_reserved) >= warning_threshold:
status = QuotaStatus.WARNING
else:
status = QuotaStatus.OK
return QuotaResult(
allowed=True,
status=status,
used=used,
limit=limit,
reserved=new_reserved,
remaining=remaining,
)
@staticmethod
async def premium_finalize(
db_session: AsyncSession,
user_id: Any,
request_id: str,
actual_tokens: int,
reserved_tokens: int,
) -> QuotaResult:
from app.db import User
user = (
(
await db_session.execute(
select(User).where(User.id == user_id).with_for_update(of=User)
)
)
.unique()
.scalar_one_or_none()
)
if user is None:
return QuotaResult(
allowed=False, status=QuotaStatus.BLOCKED, used=0, limit=0
)
user.premium_tokens_reserved = max(
0, user.premium_tokens_reserved - reserved_tokens
)
user.premium_tokens_used = user.premium_tokens_used + actual_tokens
await db_session.commit()
limit = user.premium_tokens_limit
used = user.premium_tokens_used
reserved = user.premium_tokens_reserved
remaining = max(0, limit - used - reserved)
warning_threshold = int(limit * 0.8)
if used >= limit:
status = QuotaStatus.BLOCKED
elif used >= warning_threshold:
status = QuotaStatus.WARNING
else:
status = QuotaStatus.OK
return QuotaResult(
allowed=True,
status=status,
used=used,
limit=limit,
reserved=reserved,
remaining=remaining,
)
@staticmethod
async def premium_release(
db_session: AsyncSession,
user_id: Any,
reserved_tokens: int,
) -> None:
from app.db import User
user = (
(
await db_session.execute(
select(User).where(User.id == user_id).with_for_update(of=User)
)
)
.unique()
.scalar_one_or_none()
)
if user is not None:
user.premium_tokens_reserved = max(
0, user.premium_tokens_reserved - reserved_tokens
)
await db_session.commit()
@staticmethod
async def premium_get_usage(
db_session: AsyncSession,
user_id: Any,
) -> QuotaResult:
from app.db import User
user = (
(await db_session.execute(select(User).where(User.id == user_id)))
.unique()
.scalar_one_or_none()
)
if user is None:
return QuotaResult(
allowed=False, status=QuotaStatus.BLOCKED, used=0, limit=0
)
limit = user.premium_tokens_limit
used = user.premium_tokens_used
reserved = user.premium_tokens_reserved
remaining = max(0, limit - used - reserved)
warning_threshold = int(limit * 0.8)
if used >= limit:
status = QuotaStatus.BLOCKED
elif used >= warning_threshold:
status = QuotaStatus.WARNING
else:
status = QuotaStatus.OK
return QuotaResult(
allowed=used < limit,
status=status,
used=used,
limit=limit,
reserved=reserved,
remaining=remaining,
)

View file

@ -0,0 +1,52 @@
"""Cloudflare Turnstile CAPTCHA verification service."""
from __future__ import annotations
import logging
import httpx
from app.config import config
logger = logging.getLogger(__name__)
TURNSTILE_VERIFY_URL = "https://challenges.cloudflare.com/turnstile/v0/siteverify"
async def verify_turnstile_token(token: str, remote_ip: str | None = None) -> bool:
"""Verify a Turnstile response token with Cloudflare.
Returns True when the token is valid and the challenge was solved by a
real user. Returns False (never raises) on network errors or invalid
tokens so callers can treat it as a simple boolean gate.
"""
if not config.TURNSTILE_ENABLED:
return True
secret = config.TURNSTILE_SECRET_KEY
if not secret:
logger.warning("TURNSTILE_SECRET_KEY is not set; skipping verification")
return True
payload: dict[str, str] = {
"secret": secret,
"response": token,
}
if remote_ip:
payload["remoteip"] = remote_ip
try:
async with httpx.AsyncClient(timeout=10) as client:
resp = await client.post(TURNSTILE_VERIFY_URL, data=payload)
resp.raise_for_status()
data = resp.json()
success = data.get("success", False)
if not success:
logger.info(
"Turnstile verification failed: %s",
data.get("error-codes", []),
)
return bool(success)
except Exception:
logger.exception("Turnstile verification request failed")
return False

View file

@ -1,4 +1,4 @@
"""Reconcile pending Stripe page purchases that might miss webhook fulfillment.""" """Reconcile pending Stripe purchases that might miss webhook fulfillment."""
from __future__ import annotations from __future__ import annotations
@ -11,7 +11,12 @@ from stripe import StripeClient, StripeError
from app.celery_app import celery_app from app.celery_app import celery_app
from app.config import config from app.config import config
from app.db import PagePurchase, PagePurchaseStatus from app.db import (
PagePurchase,
PagePurchaseStatus,
PremiumTokenPurchase,
PremiumTokenPurchaseStatus,
)
from app.routes import stripe_routes from app.routes import stripe_routes
from app.tasks.celery_tasks import get_celery_session_maker from app.tasks.celery_tasks import get_celery_session_maker
@ -126,7 +131,108 @@ async def _reconcile_pending_page_purchases() -> None:
await db_session.rollback() await db_session.rollback()
logger.info( logger.info(
"Stripe reconciliation completed. fulfilled=%s failed=%s checked=%s", "Stripe page reconciliation completed. fulfilled=%s failed=%s checked=%s",
fulfilled_count,
failed_count,
len(pending_purchases),
)
@celery_app.task(name="reconcile_pending_stripe_token_purchases")
def reconcile_pending_stripe_token_purchases_task():
"""Recover paid token purchases that were left pending due to missed webhook handling."""
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(_reconcile_pending_token_purchases())
finally:
loop.close()
async def _reconcile_pending_token_purchases() -> None:
"""Reconcile stale pending token purchases against Stripe source of truth."""
stripe_client = get_stripe_client()
if stripe_client is None:
return
lookback_minutes = max(config.STRIPE_RECONCILIATION_LOOKBACK_MINUTES, 0)
batch_size = max(config.STRIPE_RECONCILIATION_BATCH_SIZE, 1)
cutoff = datetime.now(UTC) - timedelta(minutes=lookback_minutes)
async with get_celery_session_maker()() as db_session:
pending_purchases = (
(
await db_session.execute(
select(PremiumTokenPurchase)
.where(
PremiumTokenPurchase.status
== PremiumTokenPurchaseStatus.PENDING,
PremiumTokenPurchase.created_at <= cutoff,
)
.order_by(PremiumTokenPurchase.created_at.asc())
.limit(batch_size)
)
)
.scalars()
.all()
)
if not pending_purchases:
logger.debug(
"Stripe token reconciliation found no pending purchases older than %s minutes.",
lookback_minutes,
)
return
logger.info(
"Stripe token reconciliation checking %s pending purchases (cutoff=%s, batch=%s).",
len(pending_purchases),
lookback_minutes,
batch_size,
)
fulfilled_count = 0
failed_count = 0
for purchase in pending_purchases:
checkout_session_id = purchase.stripe_checkout_session_id
try:
checkout_session = stripe_client.v1.checkout.sessions.retrieve(
checkout_session_id
)
except StripeError:
logger.exception(
"Stripe token reconciliation failed to retrieve checkout session %s",
checkout_session_id,
)
await db_session.rollback()
continue
payment_status = getattr(checkout_session, "payment_status", None)
session_status = getattr(checkout_session, "status", None)
try:
if payment_status in {"paid", "no_payment_required"}:
await stripe_routes._fulfill_completed_token_purchase(
db_session, checkout_session
)
fulfilled_count += 1
elif session_status == "expired":
await stripe_routes._mark_token_purchase_failed(
db_session, str(checkout_session.id)
)
failed_count += 1
except Exception:
logger.exception(
"Stripe token reconciliation failed while processing checkout session %s",
checkout_session_id,
)
await db_session.rollback()
logger.info(
"Stripe token reconciliation completed. fulfilled=%s failed=%s checked=%s",
fulfilled_count, fulfilled_count,
failed_count, failed_count,
len(pending_purchases), len(pending_purchases),

View file

@ -1175,6 +1175,10 @@ async def stream_new_chat(
accumulator = start_turn() accumulator = start_turn()
# Premium quota tracking state
_premium_reserved = 0
_premium_request_id: str | None = None
session = async_session_maker() session = async_session_maker()
try: try:
# Mark AI as responding to this user for live collaboration # Mark AI as responding to this user for live collaboration
@ -1220,6 +1224,34 @@ async def stream_new_chat(
llm_config_id, llm_config_id,
) )
# Premium quota reservation
if agent_config and agent_config.is_premium and user_id:
import uuid as _uuid
from app.config import config as _app_config
from app.services.token_quota_service import TokenQuotaService
_premium_request_id = _uuid.uuid4().hex[:16]
reserve_amount = min(
agent_config.quota_reserve_tokens
or _app_config.QUOTA_MAX_RESERVE_PER_CALL,
_app_config.QUOTA_MAX_RESERVE_PER_CALL,
)
async with shielded_async_session() as quota_session:
quota_result = await TokenQuotaService.premium_reserve(
db_session=quota_session,
user_id=UUID(user_id),
request_id=_premium_request_id,
reserve_tokens=reserve_amount,
)
_premium_reserved = reserve_amount
if not quota_result.allowed:
yield streaming_service.format_error(
"Premium token quota exceeded. Please purchase more tokens to continue using premium models."
)
yield streaming_service.format_done()
return
if not llm: if not llm:
yield streaming_service.format_error("Failed to create LLM instance") yield streaming_service.format_error("Failed to create LLM instance")
yield streaming_service.format_done() yield streaming_service.format_done()
@ -1626,6 +1658,26 @@ async def stream_new_chat(
chat_id, generated_title chat_id, generated_title
) )
# Finalize premium quota with actual tokens
if _premium_request_id and user_id:
try:
from app.services.token_quota_service import TokenQuotaService
async with shielded_async_session() as quota_session:
await TokenQuotaService.premium_finalize(
db_session=quota_session,
user_id=UUID(user_id),
request_id=_premium_request_id,
actual_tokens=accumulator.grand_total,
reserved_tokens=_premium_reserved,
)
except Exception:
logging.getLogger(__name__).warning(
"Failed to finalize premium quota for user %s",
user_id,
exc_info=True,
)
usage_summary = accumulator.per_message_summary() usage_summary = accumulator.per_message_summary()
_perf_log.info( _perf_log.info(
"[token_usage] normal new_chat: calls=%d total=%d summary=%s", "[token_usage] normal new_chat: calls=%d total=%d summary=%s",
@ -1700,6 +1752,23 @@ async def stream_new_chat(
# (CancelledError is a BaseException), and the rest of the # (CancelledError is a BaseException), and the rest of the
# finally block — including session.close() — would never run. # finally block — including session.close() — would never run.
with anyio.CancelScope(shield=True): with anyio.CancelScope(shield=True):
# Release premium reservation if not finalized
if _premium_request_id and _premium_reserved > 0 and user_id:
try:
from app.services.token_quota_service import TokenQuotaService
async with shielded_async_session() as quota_session:
await TokenQuotaService.premium_release(
db_session=quota_session,
user_id=UUID(user_id),
reserved_tokens=_premium_reserved,
)
_premium_reserved = 0
except Exception:
logging.getLogger(__name__).warning(
"Failed to release premium quota for user %s", user_id
)
try: try:
await session.rollback() await session.rollback()
await clear_ai_responding(session, chat_id) await clear_ai_responding(session, chat_id)

View file

@ -10,4 +10,8 @@ DATABASE_URL=postgresql://postgres:[YOUR-PASSWORD]@db.sdsf.supabase.co:5432/post
NEXT_PUBLIC_DEPLOYMENT_MODE="self-hosted" or "cloud" NEXT_PUBLIC_DEPLOYMENT_MODE="self-hosted" or "cloud"
# PostHog analytics (optional, leave empty to disable) # PostHog analytics (optional, leave empty to disable)
NEXT_PUBLIC_POSTHOG_KEY= NEXT_PUBLIC_POSTHOG_KEY=
# Cloudflare Turnstile CAPTCHA for anonymous chat abuse prevention
# Get your site key from https://dash.cloudflare.com/ -> Turnstile
NEXT_PUBLIC_TURNSTILE_SITE_KEY=

View file

@ -0,0 +1,24 @@
"use client";
import type { ReactNode } from "react";
import { use } from "react";
import { FreeLayoutDataProvider } from "@/components/layout/providers/FreeLayoutDataProvider";
import { AnonymousModeProvider } from "@/contexts/anonymous-mode";
import { LoginGateProvider } from "@/contexts/login-gate";
interface FreeModelLayoutProps {
children: ReactNode;
params: Promise<{ model_slug: string }>;
}
export default function FreeModelLayout({ children, params }: FreeModelLayoutProps) {
const { model_slug } = use(params);
return (
<AnonymousModeProvider initialModelSlug={model_slug}>
<LoginGateProvider>
<FreeLayoutDataProvider>{children}</FreeLayoutDataProvider>
</LoginGateProvider>
</AnonymousModeProvider>
);
}

View file

@ -0,0 +1,254 @@
import { SquareArrowOutUpRight } from "lucide-react";
import type { Metadata } from "next";
import Link from "next/link";
import { notFound } from "next/navigation";
import { FreeChatPage } from "@/components/free-chat/free-chat-page";
import { BreadcrumbNav } from "@/components/seo/breadcrumb-nav";
import { FAQJsonLd, JsonLd } from "@/components/seo/json-ld";
import { Button } from "@/components/ui/button";
import { Separator } from "@/components/ui/separator";
import type { AnonModel } from "@/contracts/types/anonymous-chat.types";
import { BACKEND_URL } from "@/lib/env-config";
interface PageProps {
params: Promise<{ model_slug: string }>;
}
async function getModel(slug: string): Promise<AnonModel | null> {
try {
const res = await fetch(
`${BACKEND_URL}/api/v1/public/anon-chat/models/${encodeURIComponent(slug)}`,
{ next: { revalidate: 300 } }
);
if (!res.ok) return null;
return res.json();
} catch {
return null;
}
}
async function getAllModels(): Promise<AnonModel[]> {
try {
const res = await fetch(`${BACKEND_URL}/api/v1/public/anon-chat/models`, {
next: { revalidate: 300 },
});
if (!res.ok) return [];
return res.json();
} catch {
return [];
}
}
function buildSeoTitle(model: AnonModel): string {
if (model.seo_title) return model.seo_title;
return `${model.name} Free Online Without Login | No Sign-Up AI Chat | SurfSense`;
}
function buildSeoDescription(model: AnonModel): string {
if (model.seo_description) return model.seo_description;
return `Use ${model.name} free online without login. No sign-up required. Chat with ${model.name} by ${model.provider} instantly on SurfSense, the open source ChatGPT alternative with no login.`;
}
function buildModelFaq(model: AnonModel) {
return [
{
question: `Can I use ${model.name} without login?`,
answer: `Yes. ${model.name} is available on SurfSense without login. No account creation, no email, no password. Just open the page and start chatting with ${model.name} for free.`,
},
{
question: `Is ${model.name} really free on SurfSense?`,
answer: `Yes! You can use ${model.name} completely free without login or sign-up. SurfSense gives you 1 million free tokens to use across any model, including ${model.name}.`,
},
{
question: `How do I use ${model.name} with no login?`,
answer: `Just start typing in the chat box above. ${model.name} will respond instantly. No login wall, no sign-up form, no verification. Your conversations are not stored in any database.`,
},
{
question: `What can I do with ${model.name} on SurfSense?`,
answer: `You can ask questions, get explanations, write content, brainstorm ideas, debug code, and more. ${model.name} is a powerful AI assistant available for free without login on SurfSense.`,
},
{
question: `How is SurfSense different from using ${model.name} directly?`,
answer: `SurfSense gives you free access without login to ${model.name} and many other AI models in one place. Create a free account to unlock document Q&A, team collaboration, and integrations with Slack, Google Drive, Notion, and more.`,
},
];
}
export async function generateMetadata({ params }: PageProps): Promise<Metadata> {
const { model_slug } = await params;
const model = await getModel(model_slug);
if (!model) return { title: "Model Not Found | SurfSense" };
const title = buildSeoTitle(model);
const description = buildSeoDescription(model);
const canonicalUrl = `https://surfsense.com/free/${model.seo_slug}`;
const modelNameLower = model.name.toLowerCase();
return {
title,
description,
alternates: { canonical: canonicalUrl },
keywords: [
`${modelNameLower} free`,
`free ${modelNameLower}`,
`${modelNameLower} online`,
`${modelNameLower} online free`,
`${modelNameLower} without login`,
`${modelNameLower} no login`,
`${modelNameLower} no sign up`,
`${modelNameLower} login free`,
`${modelNameLower} free without login`,
`${modelNameLower} free no login`,
`${modelNameLower} chat free`,
`${modelNameLower} free online`,
`use ${modelNameLower} for free`,
`use ${modelNameLower} without login`,
`${modelNameLower} alternative`,
`${modelNameLower} alternative free`,
"chatgpt no login",
"chatgpt without login",
"free ai chat no login",
"ai chat without login",
"free ai apps",
],
openGraph: {
title,
description,
url: canonicalUrl,
siteName: "SurfSense",
type: "website",
images: [
{
url: "/og-image.png",
width: 1200,
height: 630,
alt: `${model.name} Free Chat on SurfSense`,
},
],
},
twitter: {
card: "summary_large_image",
title,
description,
images: ["/og-image.png"],
},
};
}
export async function generateStaticParams() {
const models = await getAllModels();
return models.filter((m) => m.seo_slug).map((m) => ({ model_slug: m.seo_slug! }));
}
export default async function FreeModelPage({ params }: PageProps) {
const { model_slug } = await params;
const [model, allModels] = await Promise.all([getModel(model_slug), getAllModels()]);
if (!model) notFound();
const description = buildSeoDescription(model);
const faqItems = buildModelFaq(model);
const relatedModels = allModels
.filter((m) => m.seo_slug && m.seo_slug !== model.seo_slug)
.slice(0, 4);
return (
<>
{/* Invisible SEO metadata */}
<JsonLd
data={{
"@context": "https://schema.org",
"@type": "WebApplication",
name: `${model.name} Free Chat Without Login - SurfSense`,
description,
url: `https://surfsense.com/free/${model.seo_slug}`,
applicationCategory: "ChatApplication",
operatingSystem: "Web",
offers: {
"@type": "Offer",
price: "0",
priceCurrency: "USD",
description: `Free access to ${model.name} AI chat without login`,
},
provider: {
"@type": "Organization",
name: "SurfSense",
url: "https://surfsense.com",
},
isPartOf: {
"@type": "WebSite",
name: "SurfSense",
url: "https://surfsense.com",
},
}}
/>
<FAQJsonLd questions={faqItems} />
{/* Chat fills the entire viewport area inside LayoutShell */}
<div className="h-full">
<FreeChatPage />
</div>
{/* SEO content: in DOM for crawlers, clipped by parent overflow-hidden */}
<div className="border-t bg-background">
<article className="container mx-auto px-4 py-10 max-w-3xl">
<BreadcrumbNav
items={[
{ name: "Home", href: "/" },
{ name: "Free AI Chat", href: "/free" },
{ name: model.name, href: `/free/${model.seo_slug}` },
]}
/>
<header className="mt-6 mb-6">
<h1 className="text-2xl font-bold mb-2">Chat with {model.name} Free, No Login</h1>
<p className="text-sm text-muted-foreground leading-relaxed">
Use <strong>{model.name}</strong> free online without login or sign-up. No account, no
email, no password needed. Powered by SurfSense.
</p>
</header>
<Separator className="my-8" />
<section>
<h2 className="text-xl font-bold mb-4">
{model.name} Free: Frequently Asked Questions
</h2>
<dl className="flex flex-col gap-3">
{faqItems.map((item) => (
<div key={item.question} className="rounded-lg border bg-card p-4">
<dt className="font-medium text-sm">{item.question}</dt>
<dd className="mt-1.5 text-sm text-muted-foreground leading-relaxed">
{item.answer}
</dd>
</div>
))}
</dl>
</section>
{relatedModels.length > 0 && (
<>
<Separator className="my-8" />
<nav aria-label="Other free AI models">
<h2 className="text-xl font-bold mb-4">Try Other Free AI Models</h2>
<div className="flex flex-wrap gap-2">
{relatedModels.map((m) => (
<Button key={m.id} variant="outline" size="sm" asChild>
<Link href={`/free/${m.seo_slug}`}>
{m.name}
<SquareArrowOutUpRight className="size-3" />
</Link>
</Button>
))}
<Button variant="outline" size="sm" asChild>
<Link href="/free">View All Models</Link>
</Button>
</div>
</nav>
</>
)}
</article>
</div>
</>
);
}

View file

@ -0,0 +1,387 @@
import { SquareArrowOutUpRight } from "lucide-react";
import type { Metadata } from "next";
import Link from "next/link";
import { BreadcrumbNav } from "@/components/seo/breadcrumb-nav";
import { FAQJsonLd, JsonLd } from "@/components/seo/json-ld";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
import { Separator } from "@/components/ui/separator";
import {
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from "@/components/ui/table";
import type { AnonModel } from "@/contracts/types/anonymous-chat.types";
import { BACKEND_URL } from "@/lib/env-config";
export const metadata: Metadata = {
title: "ChatGPT Free Online Without Login | Chat GPT No Login, Claude AI Free | SurfSense",
description:
"Use ChatGPT free online without login. Chat with GPT-4, Claude AI, Gemini and more for free. No sign-up required. Open source NotebookLM alternative with free AI chat and document Q&A.",
keywords: [
"chatgpt free",
"chat gpt free",
"free chatgpt",
"free chat gpt",
"chatgpt online",
"chat gpt online",
"online chatgpt",
"chatgpt free online",
"chatgpt online free",
"chat gpt free online",
"chatgpt no login",
"chatgpt without login",
"chat gpt login free",
"chat gpt login",
"free chatgpt without login",
"free chatgpt no login",
"ai chat no login",
"ai chat without login",
"claude ai without login",
"claude no login",
"chatgpt for free",
"gpt chat free",
"claude ai free",
"claude free",
"free claude ai",
"free claude",
"chatgpt alternative free",
"free chatgpt alternative",
"chatgpt free alternative",
"free alternative to chatgpt",
"alternative to chatgpt free",
"ai like chatgpt",
"sites like chatgpt",
"free ai chatbot like chatgpt",
"free ai chatbots like chatgpt",
"apps like chatgpt for free",
"best free alternative to chatgpt",
"free ai apps",
"ai with no restrictions",
"notebooklm alternative",
],
alternates: {
canonical: "https://surfsense.com/free",
},
openGraph: {
title: "ChatGPT Free Online Without Login | Claude AI Free No Login | SurfSense",
description:
"Use ChatGPT free online without login. Chat with GPT-4, Claude AI, Gemini and 100+ AI models. Open source NotebookLM alternative.",
url: "https://surfsense.com/free",
siteName: "SurfSense",
type: "website",
images: [
{
url: "/og-image.png",
width: 1200,
height: 630,
alt: "SurfSense - ChatGPT Free Online, Claude AI Free, No Login Required",
},
],
},
twitter: {
card: "summary_large_image",
title: "ChatGPT Free Online Without Login | Claude AI Free No Login | SurfSense",
description:
"Use ChatGPT free online without login. Chat with GPT-4, Claude AI, Gemini and more. No sign-up needed.",
images: ["/og-image.png"],
},
};
async function getModels(): Promise<AnonModel[]> {
try {
const res = await fetch(`${BACKEND_URL}/api/v1/public/anon-chat/models`, {
next: { revalidate: 300 },
});
if (!res.ok) return [];
return res.json();
} catch {
return [];
}
}
const FAQ_ITEMS = [
{
question: "Can I use ChatGPT without login?",
answer:
"Yes. SurfSense lets you use ChatGPT without login or any sign-up. Just pick a model and start chatting. No email, no password, no account needed. You get 1 million free tokens to use across ChatGPT, Claude AI, Gemini, and other models.",
},
{
question: "Is ChatGPT really free on SurfSense?",
answer:
"Yes. SurfSense gives you free access to ChatGPT (GPT-4), Claude AI, Gemini, and other models without login. You get 1 million free tokens across any model with no sign-up required.",
},
{
question: "How do I use ChatGPT no login?",
answer:
"Go to any model page on SurfSense and start typing your message. There is no login wall, no account creation, and no verification step. ChatGPT no login works instantly in your browser.",
},
{
question: "What AI models can I use for free without login?",
answer:
"SurfSense offers free access without login to models from OpenAI (GPT-4, GPT-4 Turbo), Anthropic (Claude 3, Claude free), Google (Gemini), DeepSeek, Mistral, Llama, and more. All available as a free ChatGPT alternative online with no login required.",
},
{
question: "What happens after I use 1 million free tokens?",
answer:
"After your free tokens, create a free SurfSense account to unlock 5 million more. Premium model tokens can be purchased at $1 per million tokens. Non-premium models remain unlimited for registered users.",
},
{
question: "Is Claude AI available without login?",
answer:
"Yes. You can use Claude AI free without login on SurfSense. Both Claude 3 and other Anthropic models are available with no sign-up, alongside ChatGPT and Gemini.",
},
{
question: "How is SurfSense different from ChatGPT?",
answer:
"SurfSense is an open source NotebookLM alternative that gives you access to multiple AI models in one place without login. Unlike ChatGPT alone, SurfSense includes document Q&A with citations, integrations with Slack, Google Drive, Notion, and Confluence, plus team collaboration features.",
},
{
question: "Is SurfSense a free ChatGPT alternative?",
answer:
"Yes. SurfSense is a free, open source alternative to ChatGPT that works without login. It gives you access to Claude AI free, Gemini, and other AI models alongside document Q&A with citations, team collaboration, and 30+ integrations.",
},
{
question: "Is my data private when using free AI chat without login?",
answer:
"Anonymous chat sessions are not stored in any database. No account means no personal data collected. SurfSense is open source, so you can self-host for complete data control and privacy.",
},
];
export default async function FreeHubPage() {
const models = await getModels();
const seoModels = models.filter((m) => m.seo_slug);
return (
<main className="min-h-screen pt-20">
<JsonLd
data={{
"@context": "https://schema.org",
"@type": "CollectionPage",
name: "ChatGPT Free Online Without Login - SurfSense",
description:
"Use ChatGPT, Claude AI, Gemini and more AI models free online without login or sign-up. Open source NotebookLM alternative with no login required.",
url: "https://surfsense.com/free",
isPartOf: { "@type": "WebSite", name: "SurfSense", url: "https://surfsense.com" },
mainEntity: {
"@type": "ItemList",
numberOfItems: seoModels.length,
itemListElement: seoModels.map((m, i) => ({
"@type": "ListItem",
position: i + 1,
name: m.name,
url: `https://surfsense.com/free/${m.seo_slug}`,
})),
},
}}
/>
<FAQJsonLd questions={FAQ_ITEMS} />
<article className="container mx-auto px-4 pb-20">
<BreadcrumbNav
items={[
{ name: "Home", href: "/" },
{ name: "Free AI Chat", href: "/free" },
]}
/>
{/* Hero */}
<section className="mt-8 text-center max-w-3xl mx-auto">
<h1 className="text-4xl md:text-5xl font-bold tracking-tight">
ChatGPT Free Online Without Login
</h1>
<p className="mt-4 text-lg text-muted-foreground max-w-2xl mx-auto leading-relaxed">
Use <strong>ChatGPT</strong>, <strong>Claude AI</strong>, <strong>Gemini</strong>, and
other AI models free online without login. No sign-up, no email, no password. Pick a
model and start chatting instantly.
</p>
<div className="flex flex-wrap items-center justify-center gap-3 mt-6">
<Badge variant="secondary" className="px-3 py-1.5 text-sm">
No login required
</Badge>
<Badge variant="secondary" className="px-3 py-1.5 text-sm">
1M free tokens
</Badge>
<Badge variant="secondary" className="px-3 py-1.5 text-sm">
{seoModels.length} AI models
</Badge>
<Badge variant="secondary" className="px-3 py-1.5 text-sm">
Open source
</Badge>
</div>
</section>
<Separator className="my-12 max-w-4xl mx-auto" />
{/* Model Table */}
{seoModels.length > 0 ? (
<section
className="max-w-4xl mx-auto"
aria-label="Free AI models available without login"
>
<h2 className="text-2xl font-bold mb-2">Free AI Models Available Without Login</h2>
<p className="text-sm text-muted-foreground mb-6">
All models below work without login or sign-up. Click any model to start a free AI
chat instantly.
</p>
<div className="overflow-hidden rounded-lg border">
<Table>
<TableHeader>
<TableRow>
<TableHead className="w-[45%]">Model</TableHead>
<TableHead>Provider</TableHead>
<TableHead>Tier</TableHead>
<TableHead className="text-right w-[100px]" />
</TableRow>
</TableHeader>
<TableBody>
{seoModels.map((model) => (
<TableRow key={model.id}>
<TableCell>
<Link
href={`/free/${model.seo_slug}`}
className="group flex flex-col gap-0.5"
>
<span className="font-medium group-hover:underline">{model.name}</span>
{model.description && (
<span className="text-xs text-muted-foreground line-clamp-1">
{model.description}
</span>
)}
</Link>
</TableCell>
<TableCell>
<Badge variant="outline">{model.provider}</Badge>
</TableCell>
<TableCell>
{model.is_premium ? (
<Badge className="bg-purple-100 text-purple-700 dark:bg-purple-900/50 dark:text-purple-300 border-0">
Premium
</Badge>
) : (
<Badge variant="secondary">Free</Badge>
)}
</TableCell>
<TableCell className="text-right">
<Button variant="ghost" size="sm" asChild>
<Link href={`/free/${model.seo_slug}`}>
Chat
<SquareArrowOutUpRight className="size-3" />
</Link>
</Button>
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
</div>
</section>
) : (
<section className="mt-12 text-center max-w-4xl mx-auto">
<p className="text-muted-foreground">
No models are currently available. Please check back later.
</p>
</section>
)}
<Separator className="my-12 max-w-4xl mx-auto" />
{/* Why SurfSense */}
<section className="max-w-4xl mx-auto">
<h2 className="text-2xl font-bold mb-6">
Why Use SurfSense as Your Free ChatGPT Alternative
</h2>
<div className="grid grid-cols-1 md:grid-cols-3 gap-6">
<div className="rounded-lg border bg-card p-5">
<h3 className="font-semibold mb-1.5">Multiple AI Models in One Place</h3>
<p className="text-sm text-muted-foreground leading-relaxed">
Access ChatGPT, Claude AI free, Gemini, DeepSeek, and more. Works like sites like
ChatGPT but with all AI models available, not just GPT. A true free AI chatbot like
ChatGPT and beyond.
</p>
</div>
<div className="rounded-lg border bg-card p-5">
<h3 className="font-semibold mb-1.5">No Login, No Sign-Up Required</h3>
<p className="text-sm text-muted-foreground leading-relaxed">
Start using ChatGPT free online immediately. No email, no password, no verification.
Get ChatGPT no login access and Claude AI free access from one platform. AI with no
restrictions on which model you can use.
</p>
</div>
<div className="rounded-lg border bg-card p-5">
<h3 className="font-semibold mb-1.5">Open Source NotebookLM Alternative</h3>
<p className="text-sm text-muted-foreground leading-relaxed">
SurfSense is a free, open source NotebookLM alternative with document Q&A and
citations, integrations with Slack, Google Drive, Notion, and Confluence, plus team
collaboration and self-hosting support.
</p>
</div>
</div>
</section>
<Separator className="my-12 max-w-4xl mx-auto" />
{/* CTA */}
<section className="max-w-3xl mx-auto text-center">
<h2 className="text-2xl font-bold mb-3">Want More Features?</h2>
<p className="text-muted-foreground mb-6 leading-relaxed">
Create a free SurfSense account to unlock 5 million tokens, document uploads with
citations, team collaboration, and integrations with Slack, Google Drive, Notion, and
30+ more tools.
</p>
<Button size="lg" asChild>
<Link href="/register">Create Free Account</Link>
</Button>
</section>
<Separator className="my-12 max-w-4xl mx-auto" />
{/* FAQ */}
<section className="max-w-3xl mx-auto">
<h2 className="text-2xl font-bold text-center mb-8">Frequently Asked Questions</h2>
<dl className="flex flex-col gap-4">
{FAQ_ITEMS.map((item) => (
<div key={item.question} className="rounded-lg border bg-card p-5">
<dt className="font-medium text-sm">{item.question}</dt>
<dd className="mt-2 text-sm text-muted-foreground leading-relaxed">
{item.answer}
</dd>
</div>
))}
</dl>
</section>
{/* Internal links */}
<nav aria-label="Related pages" className="mt-16 max-w-3xl mx-auto">
<h2 className="text-lg font-semibold mb-3">Explore SurfSense</h2>
<ul className="flex flex-wrap gap-2">
<li>
<Button variant="outline" size="sm" asChild>
<Link href="/pricing">Pricing</Link>
</Button>
</li>
<li>
<Button variant="outline" size="sm" asChild>
<Link href="/docs">Documentation</Link>
</Button>
</li>
<li>
<Button variant="outline" size="sm" asChild>
<Link href="/blog">Blog</Link>
</Button>
</li>
<li>
<Button variant="outline" size="sm" asChild>
<Link href="/register">Sign Up Free</Link>
</Button>
</li>
</ul>
</nav>
</article>
</main>
);
}

View file

@ -7,6 +7,11 @@ import { Navbar } from "@/components/homepage/navbar";
export default function HomePageLayout({ children }: { children: React.ReactNode }) { export default function HomePageLayout({ children }: { children: React.ReactNode }) {
const pathname = usePathname(); const pathname = usePathname();
const isAuthPage = pathname === "/login" || pathname === "/register"; const isAuthPage = pathname === "/login" || pathname === "/register";
const isFreeModelChat = /^\/free\/[^/]+$/.test(pathname);
if (isFreeModelChat) {
return <>{children}</>;
}
return ( return (
<main className="min-h-screen bg-linear-to-b from-gray-50 to-gray-100 text-gray-900 dark:from-black dark:to-gray-900 dark:text-white overflow-x-hidden"> <main className="min-h-screen bg-linear-to-b from-gray-50 to-gray-100 text-gray-900 dark:from-black dark:to-gray-900 dark:text-white overflow-x-hidden">

View file

@ -0,0 +1,49 @@
"use client";
import { motion } from "motion/react";
import { useState } from "react";
import { BuyPagesContent } from "@/components/settings/buy-pages-content";
import { BuyTokensContent } from "@/components/settings/buy-tokens-content";
import { cn } from "@/lib/utils";
const TABS = [
{ id: "pages", label: "Pages" },
{ id: "tokens", label: "Premium Tokens" },
] as const;
type TabId = (typeof TABS)[number]["id"];
export default function BuyMorePage() {
const [activeTab, setActiveTab] = useState<TabId>("pages");
return (
<div className="flex min-h-[calc(100vh-64px)] select-none items-center justify-center px-4 py-8">
<motion.div
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }}
transition={{ duration: 0.3 }}
className="w-full max-w-md space-y-6"
>
<div className="flex items-center justify-center rounded-lg border bg-muted/30 p-1">
{TABS.map((tab) => (
<button
key={tab.id}
type="button"
onClick={() => setActiveTab(tab.id)}
className={cn(
"flex-1 rounded-md px-3 py-1.5 text-sm font-medium transition-colors",
activeTab === tab.id
? "bg-background text-foreground shadow-sm"
: "text-muted-foreground hover:text-foreground"
)}
>
{tab.label}
</button>
))}
</div>
{activeTab === "pages" ? <BuyPagesContent /> : <BuyTokensContent />}
</motion.div>
</div>
);
}

View file

@ -1,19 +1,16 @@
"use client"; "use client";
import { motion } from "motion/react"; import { useParams, useRouter } from "next/navigation";
import { BuyPagesContent } from "@/components/settings/buy-pages-content"; import { useEffect } from "react";
export default function BuyPagesPage() { export default function BuyPagesPage() {
return ( const router = useRouter();
<div className="flex min-h-[calc(100vh-64px)] select-none items-center justify-center px-4 py-8"> const params = useParams();
<motion.div const searchSpaceId = params?.search_space_id ?? "";
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }} useEffect(() => {
transition={{ duration: 0.3 }} router.replace(`/dashboard/${searchSpaceId}/buy-more`);
className="w-full max-w-md space-y-6" }, [router, searchSpaceId]);
>
<BuyPagesContent /> return null;
</motion.div>
</div>
);
} }

View file

@ -0,0 +1,16 @@
"use client";
import { useParams, useRouter } from "next/navigation";
import { useEffect } from "react";
export default function BuyTokensPage() {
const router = useRouter();
const params = useParams();
const searchSpaceId = params?.search_space_id ?? "";
useEffect(() => {
router.replace(`/dashboard/${searchSpaceId}/buy-more`);
}, [router, searchSpaceId]);
return null;
}

View file

@ -23,16 +23,14 @@ export default function PurchaseCancelPage() {
<CardHeader className="text-center"> <CardHeader className="text-center">
<CircleSlash2 className="mx-auto h-10 w-10 text-muted-foreground" /> <CircleSlash2 className="mx-auto h-10 w-10 text-muted-foreground" />
<CardTitle className="text-2xl">Checkout canceled</CardTitle> <CardTitle className="text-2xl">Checkout canceled</CardTitle>
<CardDescription> <CardDescription>No charge was made and your account is unchanged.</CardDescription>
No charge was made and your current pages are unchanged.
</CardDescription>
</CardHeader> </CardHeader>
<CardContent className="text-center text-sm text-muted-foreground"> <CardContent className="text-center text-sm text-muted-foreground">
You can return to the pricing options and try again whenever you&apos;re ready. You can return to the pricing options and try again whenever you&apos;re ready.
</CardContent> </CardContent>
<CardFooter className="flex flex-col gap-2 sm:flex-row"> <CardFooter className="flex flex-col gap-2 sm:flex-row">
<Button asChild className="w-full"> <Button asChild className="w-full">
<Link href={`/dashboard/${searchSpaceId}/more-pages`}>Back to Buy Pages</Link> <Link href={`/dashboard/${searchSpaceId}/buy-more`}>Back to Pricing</Link>
</Button> </Button>
<Button asChild variant="outline" className="w-full"> <Button asChild variant="outline" className="w-full">
<Link href={`/dashboard/${searchSpaceId}/new-chat`}>Back to Dashboard</Link> <Link href={`/dashboard/${searchSpaceId}/new-chat`}>Back to Dashboard</Link>

View file

@ -23,6 +23,7 @@ export default function PurchaseSuccessPage() {
useEffect(() => { useEffect(() => {
void queryClient.invalidateQueries({ queryKey: USER_QUERY_KEY }); void queryClient.invalidateQueries({ queryKey: USER_QUERY_KEY });
void queryClient.invalidateQueries({ queryKey: ["token-status"] });
}, [queryClient]); }, [queryClient]);
return ( return (
@ -31,13 +32,11 @@ export default function PurchaseSuccessPage() {
<CardHeader className="text-center"> <CardHeader className="text-center">
<CheckCircle2 className="mx-auto h-10 w-10 text-emerald-500" /> <CheckCircle2 className="mx-auto h-10 w-10 text-emerald-500" />
<CardTitle className="text-2xl">Purchase complete</CardTitle> <CardTitle className="text-2xl">Purchase complete</CardTitle>
<CardDescription> <CardDescription>Your purchase is being applied to your account now.</CardDescription>
Your additional pages are being applied to your account now.
</CardDescription>
</CardHeader> </CardHeader>
<CardContent className="space-y-3 text-center"> <CardContent className="space-y-3 text-center">
<p className="text-sm text-muted-foreground"> <p className="text-sm text-muted-foreground">
Your sidebar usage meter should refresh automatically in a moment. Your usage meters should refresh automatically in a moment.
</p> </p>
</CardContent> </CardContent>
<CardFooter className="flex flex-col gap-2"> <CardFooter className="flex flex-col gap-2">
@ -45,7 +44,7 @@ export default function PurchaseSuccessPage() {
<Link href={`/dashboard/${searchSpaceId}/new-chat`}>Back to Dashboard</Link> <Link href={`/dashboard/${searchSpaceId}/new-chat`}>Back to Dashboard</Link>
</Button> </Button>
<Button asChild variant="outline" className="w-full"> <Button asChild variant="outline" className="w-full">
<Link href={`/dashboard/${searchSpaceId}/more-pages`}>Buy More Pages</Link> <Link href={`/dashboard/${searchSpaceId}/buy-more`}>Buy More</Link>
</Button> </Button>
</CardFooter> </CardFooter>
</Card> </Card>

View file

@ -14,14 +14,31 @@ const changelogSource = loader({
}); });
const BASE_URL = "https://surfsense.com"; const BASE_URL = "https://surfsense.com";
const BACKEND_URL = process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL || "http://localhost:8000";
export default function sitemap(): MetadataRoute.Sitemap { async function getFreeModelSlugs(): Promise<string[]> {
try {
const res = await fetch(`${BACKEND_URL}/api/v1/public/anon-chat/models`, {
next: { revalidate: 3600 },
});
if (!res.ok) return [];
const models = await res.json();
return models
.filter((m: { seo_slug?: string }) => m.seo_slug)
.map((m: { seo_slug: string }) => m.seo_slug);
} catch {
return [];
}
}
export default async function sitemap(): Promise<MetadataRoute.Sitemap> {
const now = new Date(); const now = new Date();
now.setMinutes(0, 0, 0); now.setMinutes(0, 0, 0);
const lastModified = now; const lastModified = now;
const staticPages: MetadataRoute.Sitemap = [ const staticPages: MetadataRoute.Sitemap = [
{ url: `${BASE_URL}/`, lastModified, changeFrequency: "daily", priority: 1 }, { url: `${BASE_URL}/`, lastModified, changeFrequency: "daily", priority: 1 },
{ url: `${BASE_URL}/free`, lastModified, changeFrequency: "daily", priority: 0.95 },
{ url: `${BASE_URL}/pricing`, lastModified, changeFrequency: "weekly", priority: 0.9 }, { url: `${BASE_URL}/pricing`, lastModified, changeFrequency: "weekly", priority: 0.9 },
{ url: `${BASE_URL}/contact`, lastModified, changeFrequency: "monthly", priority: 0.7 }, { url: `${BASE_URL}/contact`, lastModified, changeFrequency: "monthly", priority: 0.7 },
{ url: `${BASE_URL}/blog`, lastModified, changeFrequency: "daily", priority: 0.9 }, { url: `${BASE_URL}/blog`, lastModified, changeFrequency: "daily", priority: 0.9 },
@ -34,6 +51,14 @@ export default function sitemap(): MetadataRoute.Sitemap {
{ url: `${BASE_URL}/register`, lastModified, changeFrequency: "monthly", priority: 0.5 }, { url: `${BASE_URL}/register`, lastModified, changeFrequency: "monthly", priority: 0.5 },
]; ];
const slugs = await getFreeModelSlugs();
const freeModelPages: MetadataRoute.Sitemap = slugs.map((slug) => ({
url: `${BASE_URL}/free/${slug}`,
lastModified,
changeFrequency: "daily" as const,
priority: 0.9,
}));
const docsPages: MetadataRoute.Sitemap = docsSource.getPages().map((page) => ({ const docsPages: MetadataRoute.Sitemap = docsSource.getPages().map((page) => ({
url: `${BASE_URL}${page.url}`, url: `${BASE_URL}${page.url}`,
lastModified, lastModified,
@ -55,5 +80,5 @@ export default function sitemap(): MetadataRoute.Sitemap {
priority: 0.5, priority: 0.5,
})); }));
return [...staticPages, ...docsPages, ...blogPages, ...changelogPages]; return [...staticPages, ...freeModelPages, ...docsPages, ...blogPages, ...changelogPages];
} }

View file

@ -2,6 +2,7 @@ import { atomWithQuery } from "jotai-tanstack-query";
import type { LLMModel } from "@/contracts/enums/llm-models"; import type { LLMModel } from "@/contracts/enums/llm-models";
import { LLM_MODELS } from "@/contracts/enums/llm-models"; import { LLM_MODELS } from "@/contracts/enums/llm-models";
import { newLLMConfigApiService } from "@/lib/apis/new-llm-config-api.service"; import { newLLMConfigApiService } from "@/lib/apis/new-llm-config-api.service";
import { getBearerToken } from "@/lib/auth-utils";
import { cacheKeys } from "@/lib/query-client/cache-keys"; import { cacheKeys } from "@/lib/query-client/cache-keys";
import { activeSearchSpaceIdAtom } from "../search-spaces/search-space-query.atoms"; import { activeSearchSpaceIdAtom } from "../search-spaces/search-space-query.atoms";
@ -30,6 +31,7 @@ export const globalNewLLMConfigsAtom = atomWithQuery(() => {
return { return {
queryKey: cacheKeys.newLLMConfigs.global(), queryKey: cacheKeys.newLLMConfigs.global(),
staleTime: 10 * 60 * 1000, // 10 minutes - global configs rarely change staleTime: 10 * 60 * 1000, // 10 minutes - global configs rarely change
enabled: !!getBearerToken(),
queryFn: async () => { queryFn: async () => {
return newLLMConfigApiService.getGlobalConfigs(); return newLLMConfigApiService.getGlobalConfigs();
}, },

View file

@ -76,9 +76,7 @@ export function AnnouncementToastProvider() {
for (let i = 0; i < importantUntoasted.length; i++) { for (let i = 0; i < importantUntoasted.length; i++) {
const announcement = importantUntoasted[i]; const announcement = importantUntoasted[i];
staggerTimers.push( staggerTimers.push(setTimeout(() => showAnnouncementToast(announcement), i * 800));
setTimeout(() => showAnnouncementToast(announcement), i * 800)
);
} }
}, 1500); }, 1500);

View file

@ -36,12 +36,14 @@ interface DocumentUploadDialogContextType {
const DocumentUploadDialogContext = createContext<DocumentUploadDialogContextType | null>(null); const DocumentUploadDialogContext = createContext<DocumentUploadDialogContextType | null>(null);
export const useDocumentUploadDialog = () => { const NOOP_DIALOG: DocumentUploadDialogContextType = {
openDialog: () => {},
closeDialog: () => {},
};
export const useDocumentUploadDialog = (): DocumentUploadDialogContextType => {
const context = useContext(DocumentUploadDialogContext); const context = useContext(DocumentUploadDialogContext);
if (!context) { return context ?? NOOP_DIALOG;
throw new Error("useDocumentUploadDialog must be used within DocumentUploadDialogProvider");
}
return context;
}; };
// Provider component // Provider component

View file

@ -1,10 +1,12 @@
"use client"; "use client";
import { FileText } from "lucide-react";
import type { FC } from "react"; import type { FC } from "react";
import { useState } from "react"; import { useState } from "react";
import { useCitationMetadata } from "@/components/assistant-ui/citation-metadata-context"; import { useCitationMetadata } from "@/components/assistant-ui/citation-metadata-context";
import { SourceDetailPanel } from "@/components/new-chat/source-detail-panel"; import { SourceDetailPanel } from "@/components/new-chat/source-detail-panel";
import { Citation } from "@/components/tool-ui/citation"; import { Citation } from "@/components/tool-ui/citation";
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
interface InlineCitationProps { interface InlineCitationProps {
chunkId: number; chunkId: number;
@ -14,10 +16,28 @@ interface InlineCitationProps {
/** /**
* Inline citation for knowledge-base chunks (numeric chunk IDs). * Inline citation for knowledge-base chunks (numeric chunk IDs).
* Renders a clickable badge showing the actual chunk ID that opens the SourceDetailPanel. * Renders a clickable badge showing the actual chunk ID that opens the SourceDetailPanel.
* Negative chunk IDs indicate anonymous/synthetic uploads and render as a static badge.
*/ */
export const InlineCitation: FC<InlineCitationProps> = ({ chunkId, isDocsChunk = false }) => { export const InlineCitation: FC<InlineCitationProps> = ({ chunkId, isDocsChunk = false }) => {
const [isOpen, setIsOpen] = useState(false); const [isOpen, setIsOpen] = useState(false);
if (chunkId < 0) {
return (
<Tooltip>
<TooltipTrigger asChild>
<span
className="ml-0.5 inline-flex h-5 min-w-5 items-center justify-center gap-0.5 rounded-md bg-primary/10 px-1.5 text-[11px] font-medium text-primary align-baseline shadow-sm"
role="note"
>
<FileText className="size-3" />
doc
</span>
</TooltipTrigger>
<TooltipContent>Uploaded document</TooltipContent>
</Tooltip>
);
}
return ( return (
<SourceDetailPanel <SourceDetailPanel
open={isOpen} open={isOpen}

View file

@ -106,11 +106,11 @@ function preprocessMarkdown(content: string): string {
return content; return content;
} }
// Matches [citation:...] with numeric IDs (incl. doc- prefix, comma-separated), // Matches [citation:...] with numeric IDs (incl. negative, doc- prefix, comma-separated),
// URL-based IDs from live web search, or urlciteN placeholders from preprocess. // URL-based IDs from live web search, or urlciteN placeholders from preprocess.
// Also matches Chinese brackets 【】 and handles zero-width spaces that LLM sometimes inserts. // Also matches Chinese brackets 【】 and handles zero-width spaces that LLM sometimes inserts.
const CITATION_REGEX = const CITATION_REGEX =
/[[【]\u200B?citation:\s*(https?:\/\/[^\]】\u200B]+|urlcite\d+|(?:doc-)?\d+(?:\s*,\s*(?:doc-)?\d+)*)\s*\u200B?[\]】]/g; /[[【]\u200B?citation:\s*(https?:\/\/[^\]】\u200B]+|urlcite\d+|(?:doc-)?-?\d+(?:\s*,\s*(?:doc-)?-?\d+)*)\s*\u200B?[\]】]/g;
/** /**
* Parses text and replaces [citation:XXX] patterns with citation components. * Parses text and replaces [citation:XXX] patterns with citation components.

View file

@ -27,6 +27,7 @@ export function DocumentsFilters({
aiSortEnabled = false, aiSortEnabled = false,
aiSortBusy = false, aiSortBusy = false,
onToggleAiSort, onToggleAiSort,
onUploadClick,
}: { }: {
typeCounts: Partial<Record<DocumentTypeEnum, number>>; typeCounts: Partial<Record<DocumentTypeEnum, number>>;
onSearch: (v: string) => void; onSearch: (v: string) => void;
@ -37,12 +38,14 @@ export function DocumentsFilters({
aiSortEnabled?: boolean; aiSortEnabled?: boolean;
aiSortBusy?: boolean; aiSortBusy?: boolean;
onToggleAiSort?: () => void; onToggleAiSort?: () => void;
onUploadClick?: () => void;
}) { }) {
const t = useTranslations("documents"); const t = useTranslations("documents");
const id = React.useId(); const id = React.useId();
const inputRef = useRef<HTMLInputElement>(null); const inputRef = useRef<HTMLInputElement>(null);
const { openDialog: openUploadDialog } = useDocumentUploadDialog(); const { openDialog: openUploadDialog } = useDocumentUploadDialog();
const handleUpload = onUploadClick ?? openUploadDialog;
const [typeSearchQuery, setTypeSearchQuery] = useState(""); const [typeSearchQuery, setTypeSearchQuery] = useState("");
const [scrollPos, setScrollPos] = useState<"top" | "middle" | "bottom">("top"); const [scrollPos, setScrollPos] = useState<"top" | "middle" | "bottom">("top");
@ -254,7 +257,7 @@ export function DocumentsFilters({
{/* Upload Button */} {/* Upload Button */}
<Button <Button
data-joyride="upload-button" data-joyride="upload-button"
onClick={openUploadDialog} onClick={handleUpload}
variant="outline" variant="outline"
size="sm" size="sm"
className="h-9 shrink-0 gap-1.5 bg-white text-gray-700 border-white hover:bg-gray-50 dark:bg-white dark:text-gray-800 dark:hover:bg-gray-100" className="h-9 shrink-0 gap-1.5 bg-white text-gray-700 border-white hover:bg-gray-50 dark:bg-white dark:text-gray-800 dark:hover:bg-gray-100"

View file

@ -0,0 +1,271 @@
"use client";
import { ArrowUp, Loader2, Square } from "lucide-react";
import { useCallback, useEffect, useRef, useState } from "react";
import type { AnonModel, AnonQuotaResponse } from "@/contracts/types/anonymous-chat.types";
import { anonymousChatApiService } from "@/lib/apis/anonymous-chat-api.service";
import { readSSEStream } from "@/lib/chat/streaming-state";
import { cn } from "@/lib/utils";
import { QuotaBar } from "./quota-bar";
import { QuotaWarningBanner } from "./quota-warning-banner";
interface Message {
id: string;
role: "user" | "assistant";
content: string;
}
interface AnonymousChatProps {
model: AnonModel;
}
export function AnonymousChat({ model }: AnonymousChatProps) {
const [messages, setMessages] = useState<Message[]>([]);
const [input, setInput] = useState("");
const [isStreaming, setIsStreaming] = useState(false);
const [quota, setQuota] = useState<AnonQuotaResponse | null>(null);
const abortRef = useRef<AbortController | null>(null);
const messagesEndRef = useRef<HTMLDivElement>(null);
const textareaRef = useRef<HTMLTextAreaElement>(null);
useEffect(() => {
anonymousChatApiService.getQuota().then(setQuota).catch(console.error);
}, []);
useEffect(() => {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
}, [messages]);
const autoResizeTextarea = useCallback(() => {
const textarea = textareaRef.current;
if (textarea) {
textarea.style.height = "auto";
textarea.style.height = `${Math.min(textarea.scrollHeight, 200)}px`;
}
}, []);
const handleSubmit = useCallback(async () => {
const trimmed = input.trim();
if (!trimmed || isStreaming) return;
if (quota && quota.used >= quota.limit) return;
const userMsg: Message = { id: crypto.randomUUID(), role: "user", content: trimmed };
const assistantId = crypto.randomUUID();
const assistantMsg: Message = { id: assistantId, role: "assistant", content: "" };
setMessages((prev) => [...prev, userMsg, assistantMsg]);
setInput("");
setIsStreaming(true);
if (textareaRef.current) {
textareaRef.current.style.height = "auto";
}
const controller = new AbortController();
abortRef.current = controller;
try {
const chatHistory = [...messages, userMsg].map((m) => ({
role: m.role,
content: m.content,
}));
const response = await fetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL || "http://localhost:8000"}/api/v1/public/anon-chat/stream`,
{
method: "POST",
headers: { "Content-Type": "application/json" },
credentials: "include",
body: JSON.stringify({
model_slug: model.seo_slug,
messages: chatHistory,
}),
signal: controller.signal,
}
);
if (!response.ok) {
if (response.status === 429) {
const errorData = await response.json();
setQuota({
used: errorData.detail?.used ?? quota?.limit ?? 1000000,
limit: errorData.detail?.limit ?? quota?.limit ?? 1000000,
remaining: 0,
status: "exceeded",
warning_threshold: quota?.warning_threshold ?? 800000,
});
setMessages((prev) => prev.filter((m) => m.id !== assistantId));
return;
}
throw new Error(`Stream error: ${response.status}`);
}
for await (const event of readSSEStream(response)) {
if (controller.signal.aborted) break;
if (event.type === "text-delta") {
setMessages((prev) =>
prev.map((m) => (m.id === assistantId ? { ...m, content: m.content + event.delta } : m))
);
} else if (event.type === "error") {
setMessages((prev) =>
prev.map((m) =>
m.id === assistantId ? { ...m, content: m.content || event.errorText } : m
)
);
} else if ("type" in event && event.type === "data-token-usage") {
// After streaming completes, refresh quota
anonymousChatApiService.getQuota().then(setQuota).catch(console.error);
}
}
} catch (err) {
if (err instanceof DOMException && err.name === "AbortError") return;
console.error("Chat stream error:", err);
setMessages((prev) =>
prev.map((m) =>
m.id === assistantId && !m.content
? { ...m, content: "An error occurred. Please try again." }
: m
)
);
} finally {
setIsStreaming(false);
abortRef.current = null;
anonymousChatApiService.getQuota().then(setQuota).catch(console.error);
}
}, [input, isStreaming, messages, model.seo_slug, quota]);
const handleCancel = useCallback(() => {
abortRef.current?.abort();
}, []);
const handleKeyDown = (e: React.KeyboardEvent) => {
if (e.key === "Enter" && !e.shiftKey) {
e.preventDefault();
handleSubmit();
}
};
const isExceeded = quota ? quota.used >= quota.limit : false;
return (
<div className="flex flex-col h-[calc(100vh-8rem)] max-w-3xl mx-auto">
{quota && (
<QuotaWarningBanner
used={quota.used}
limit={quota.limit}
warningThreshold={quota.warning_threshold}
className="mb-3"
/>
)}
<div className="flex-1 overflow-y-auto space-y-4 pb-4 min-h-0">
{messages.length === 0 && (
<div className="flex flex-col items-center justify-center h-full text-center px-4">
<div className="rounded-full bg-linear-to-r from-purple-500/10 to-blue-500/10 p-4 mb-4">
<div className="h-10 w-10 rounded-full bg-linear-to-r from-purple-500 to-blue-500 flex items-center justify-center">
<span className="text-white text-lg font-bold">
{model.name.charAt(0).toUpperCase()}
</span>
</div>
</div>
<h2 className="text-xl font-semibold mb-2">{model.name}</h2>
{model.description && (
<p className="text-sm text-muted-foreground max-w-md">{model.description}</p>
)}
<p className="text-xs text-muted-foreground mt-4">
Free to use &middot; No login required &middot; Start typing below
</p>
</div>
)}
{messages.map((msg) => (
<div
key={msg.id}
className={cn("flex gap-3 px-4", msg.role === "user" ? "justify-end" : "justify-start")}
>
{msg.role === "assistant" && (
<div className="h-7 w-7 rounded-full bg-linear-to-r from-purple-500 to-blue-500 flex items-center justify-center shrink-0 mt-0.5">
<span className="text-white text-xs font-bold">
{model.name.charAt(0).toUpperCase()}
</span>
</div>
)}
<div
className={cn(
"rounded-2xl px-4 py-2.5 max-w-[80%] text-sm leading-relaxed",
msg.role === "user"
? "bg-primary text-primary-foreground"
: "bg-muted text-foreground"
)}
>
{msg.role === "assistant" && !msg.content && isStreaming ? (
<Loader2 className="h-4 w-4 animate-spin text-muted-foreground" />
) : (
<div className="whitespace-pre-wrap wrap-break-word">{msg.content}</div>
)}
</div>
</div>
))}
<div ref={messagesEndRef} />
</div>
<div className="border-t pt-3 pb-2 space-y-2">
{quota && (
<QuotaBar
used={quota.used}
limit={quota.limit}
warningThreshold={quota.warning_threshold}
/>
)}
<div className="relative">
<textarea
ref={textareaRef}
value={input}
onChange={(e) => {
setInput(e.target.value);
autoResizeTextarea();
}}
onKeyDown={handleKeyDown}
placeholder={
isExceeded
? "Token limit reached. Create a free account to continue."
: `Message ${model.name}...`
}
disabled={isExceeded}
rows={1}
className={cn(
"w-full resize-none rounded-xl border bg-background px-4 py-3 pr-12 text-sm",
"placeholder:text-muted-foreground focus:outline-none focus:ring-2 focus:ring-ring",
"disabled:cursor-not-allowed disabled:opacity-50",
"min-h-[44px] max-h-[200px]"
)}
/>
{isStreaming ? (
<button
type="button"
onClick={handleCancel}
className="absolute right-2 bottom-2 flex h-8 w-8 items-center justify-center rounded-lg bg-foreground text-background transition-colors hover:opacity-80"
>
<Square className="h-3.5 w-3.5" fill="currentColor" />
</button>
) : (
<button
type="button"
onClick={handleSubmit}
disabled={!input.trim() || isExceeded}
className="absolute right-2 bottom-2 flex h-8 w-8 items-center justify-center rounded-lg bg-foreground text-background transition-colors hover:opacity-80 disabled:opacity-40 disabled:cursor-not-allowed"
>
<ArrowUp className="h-4 w-4" />
</button>
)}
</div>
<p className="text-center text-[10px] text-muted-foreground">
{model.name} via SurfSense &middot; Responses may be inaccurate
</p>
</div>
</div>
);
}

View file

@ -0,0 +1,395 @@
"use client";
import {
type AppendMessage,
AssistantRuntimeProvider,
type ThreadMessageLike,
useExternalStoreRuntime,
} from "@assistant-ui/react";
import { Turnstile, type TurnstileInstance } from "@marsidev/react-turnstile";
import { ShieldCheck } from "lucide-react";
import { useCallback, useEffect, useRef, useState } from "react";
import { ThinkingStepsDataUI } from "@/components/assistant-ui/thinking-steps";
import {
createTokenUsageStore,
type TokenUsageData,
TokenUsageProvider,
} from "@/components/assistant-ui/token-usage-context";
import { useAnonymousMode } from "@/contexts/anonymous-mode";
import {
addToolCall,
appendText,
buildContentForUI,
type ContentPartsState,
FrameBatchedUpdater,
readSSEStream,
type ThinkingStepData,
updateThinkingSteps,
updateToolCall,
} from "@/lib/chat/streaming-state";
import { BACKEND_URL } from "@/lib/env-config";
import { FreeModelSelector } from "./free-model-selector";
import { FreeThread } from "./free-thread";
const TOOLS_WITH_UI = new Set(["web_search", "document_qna"]);
const TURNSTILE_SITE_KEY = process.env.NEXT_PUBLIC_TURNSTILE_SITE_KEY ?? "";
/** Try to parse a CAPTCHA_REQUIRED or CAPTCHA_INVALID code from a non-ok response. */
function parseCaptchaError(status: number, body: string): string | null {
if (status !== 403) return null;
try {
const json = JSON.parse(body);
const code = json?.detail?.code ?? json?.error?.code;
if (code === "CAPTCHA_REQUIRED" || code === "CAPTCHA_INVALID") return code;
} catch {
/* not JSON */
}
return null;
}
export function FreeChatPage() {
const anonMode = useAnonymousMode();
const modelSlug = anonMode.isAnonymous ? anonMode.modelSlug : "";
const resetKey = anonMode.isAnonymous ? anonMode.resetKey : 0;
const [messages, setMessages] = useState<ThreadMessageLike[]>([]);
const [isRunning, setIsRunning] = useState(false);
const [tokenUsageStore] = useState(() => createTokenUsageStore());
const abortControllerRef = useRef<AbortController | null>(null);
// Turnstile CAPTCHA state
const [captchaRequired, setCaptchaRequired] = useState(false);
const turnstileRef = useRef<TurnstileInstance | null>(null);
const turnstileTokenRef = useRef<string | null>(null);
const pendingRetryRef = useRef<{
messageHistory: { role: string; content: string }[];
userMsgId: string;
} | null>(null);
useEffect(() => {
setMessages([]);
tokenUsageStore.clear();
if (abortControllerRef.current) {
abortControllerRef.current.abort();
abortControllerRef.current = null;
}
setIsRunning(false);
setCaptchaRequired(false);
turnstileTokenRef.current = null;
pendingRetryRef.current = null;
}, [resetKey, modelSlug, tokenUsageStore]);
const cancelRun = useCallback(() => {
if (abortControllerRef.current) {
abortControllerRef.current.abort();
abortControllerRef.current = null;
}
setIsRunning(false);
}, []);
/**
* Core streaming logic shared by initial sends and CAPTCHA retries.
* Returns "captcha" if the server demands a CAPTCHA, otherwise void.
*/
const doStream = useCallback(
async (
messageHistory: { role: string; content: string }[],
assistantMsgId: string,
signal: AbortSignal,
turnstileToken: string | null
): Promise<"captcha" | void> => {
const reqBody: Record<string, unknown> = {
model_slug: modelSlug,
messages: messageHistory,
};
if (turnstileToken) reqBody.turnstile_token = turnstileToken;
const response = await fetch(`${BACKEND_URL}/api/v1/public/anon-chat/stream`, {
method: "POST",
headers: { "Content-Type": "application/json" },
credentials: "include",
body: JSON.stringify(reqBody),
signal,
});
if (!response.ok) {
const body = await response.text().catch(() => "");
const captchaCode = parseCaptchaError(response.status, body);
if (captchaCode) return "captcha";
throw new Error(body || `Server error: ${response.status}`);
}
const currentThinkingSteps = new Map<string, ThinkingStepData>();
const batcher = new FrameBatchedUpdater();
const contentPartsState: ContentPartsState = {
contentParts: [],
currentTextPartIndex: -1,
toolCallIndices: new Map(),
};
const { toolCallIndices } = contentPartsState;
const flushMessages = () => {
setMessages((prev) =>
prev.map((m) =>
m.id === assistantMsgId
? { ...m, content: buildContentForUI(contentPartsState, TOOLS_WITH_UI) }
: m
)
);
};
const scheduleFlush = () => batcher.schedule(flushMessages);
try {
for await (const parsed of readSSEStream(response)) {
switch (parsed.type) {
case "text-delta":
appendText(contentPartsState, parsed.delta);
scheduleFlush();
break;
case "tool-input-start":
addToolCall(contentPartsState, TOOLS_WITH_UI, parsed.toolCallId, parsed.toolName, {});
batcher.flush();
break;
case "tool-input-available":
if (toolCallIndices.has(parsed.toolCallId)) {
updateToolCall(contentPartsState, parsed.toolCallId, { args: parsed.input || {} });
} else {
addToolCall(
contentPartsState,
TOOLS_WITH_UI,
parsed.toolCallId,
parsed.toolName,
parsed.input || {}
);
}
batcher.flush();
break;
case "tool-output-available":
updateToolCall(contentPartsState, parsed.toolCallId, { result: parsed.output });
batcher.flush();
break;
case "data-thinking-step": {
const stepData = parsed.data as ThinkingStepData;
if (stepData?.id) {
currentThinkingSteps.set(stepData.id, stepData);
if (updateThinkingSteps(contentPartsState, currentThinkingSteps)) scheduleFlush();
}
break;
}
case "data-token-usage":
tokenUsageStore.set(assistantMsgId, parsed.data as TokenUsageData);
break;
case "error":
throw new Error(parsed.errorText || "Server error");
}
}
batcher.flush();
} catch (err) {
batcher.dispose();
throw err;
}
},
[modelSlug, tokenUsageStore]
);
const onNew = useCallback(
async (message: AppendMessage) => {
let userQuery = "";
for (const part of message.content) {
if (part.type === "text") userQuery += part.text;
}
if (!userQuery.trim()) return;
const userMsgId = `msg-user-${Date.now()}`;
setMessages((prev) => [
...prev,
{
id: userMsgId,
role: "user" as const,
content: [{ type: "text" as const, text: userQuery }],
createdAt: new Date(),
},
]);
setIsRunning(true);
const controller = new AbortController();
abortControllerRef.current = controller;
const assistantMsgId = `msg-assistant-${Date.now()}`;
setMessages((prev) => [
...prev,
{
id: assistantMsgId,
role: "assistant" as const,
content: [{ type: "text" as const, text: "" }],
createdAt: new Date(),
},
]);
const messageHistory = messages
.filter((m) => m.role === "user" || m.role === "assistant")
.map((m) => {
let text = "";
for (const part of m.content) {
if (typeof part === "object" && part.type === "text" && "text" in part) {
text += (part as { type: "text"; text: string }).text;
}
}
return { role: m.role as string, content: text };
})
.filter((m) => m.content.length > 0);
messageHistory.push({ role: "user", content: userQuery.trim() });
try {
const result = await doStream(
messageHistory,
assistantMsgId,
controller.signal,
turnstileTokenRef.current
);
// Consume the token after use regardless of outcome
turnstileTokenRef.current = null;
if (result === "captcha" && TURNSTILE_SITE_KEY) {
// Remove the empty assistant placeholder; keep the user message
setMessages((prev) => prev.filter((m) => m.id !== assistantMsgId));
pendingRetryRef.current = { messageHistory, userMsgId };
setCaptchaRequired(true);
setIsRunning(false);
abortControllerRef.current = null;
return;
}
} catch (error) {
if (error instanceof Error && error.name === "AbortError") return;
console.error("[FreeChatPage] Chat error:", error);
const errorText = error instanceof Error ? error.message : "An unexpected error occurred";
setMessages((prev) =>
prev.map((m) =>
m.id === assistantMsgId
? { ...m, content: [{ type: "text" as const, text: `Error: ${errorText}` }] }
: m
)
);
} finally {
setIsRunning(false);
abortControllerRef.current = null;
}
},
[messages, doStream]
);
/** Called when Turnstile resolves successfully. Stores the token and auto-retries. */
const handleTurnstileSuccess = useCallback(
async (token: string) => {
turnstileTokenRef.current = token;
setCaptchaRequired(false);
const pending = pendingRetryRef.current;
if (!pending) return;
pendingRetryRef.current = null;
setIsRunning(true);
const controller = new AbortController();
abortControllerRef.current = controller;
const assistantMsgId = `msg-assistant-${Date.now()}`;
setMessages((prev) => [
...prev,
{
id: assistantMsgId,
role: "assistant" as const,
content: [{ type: "text" as const, text: "" }],
createdAt: new Date(),
},
]);
try {
const result = await doStream(
pending.messageHistory,
assistantMsgId,
controller.signal,
token
);
turnstileTokenRef.current = null;
if (result === "captcha") {
setMessages((prev) => prev.filter((m) => m.id !== assistantMsgId));
pendingRetryRef.current = pending;
setCaptchaRequired(true);
turnstileRef.current?.reset();
}
} catch (error) {
if (error instanceof Error && error.name === "AbortError") return;
console.error("[FreeChatPage] Retry error:", error);
const errorText = error instanceof Error ? error.message : "An unexpected error occurred";
setMessages((prev) =>
prev.map((m) =>
m.id === assistantMsgId
? { ...m, content: [{ type: "text" as const, text: `Error: ${errorText}` }] }
: m
)
);
} finally {
setIsRunning(false);
abortControllerRef.current = null;
}
},
[doStream]
);
const convertMessage = useCallback(
(message: ThreadMessageLike): ThreadMessageLike => message,
[]
);
const runtime = useExternalStoreRuntime({
messages,
isRunning,
onNew,
convertMessage,
onCancel: cancelRun,
});
return (
<TokenUsageProvider store={tokenUsageStore}>
<AssistantRuntimeProvider runtime={runtime}>
<ThinkingStepsDataUI />
<div className="flex h-full flex-col overflow-hidden">
<div className="flex h-14 shrink-0 items-center justify-between border-b border-border/40 px-4">
<FreeModelSelector />
</div>
{captchaRequired && TURNSTILE_SITE_KEY && (
<div className="flex flex-col items-center gap-3 border-b border-border/40 bg-muted/30 py-4">
<div className="flex items-center gap-2 text-sm text-muted-foreground">
<ShieldCheck className="h-4 w-4" />
<span>Quick verification to continue chatting</span>
</div>
<Turnstile
ref={turnstileRef}
siteKey={TURNSTILE_SITE_KEY}
onSuccess={handleTurnstileSuccess}
onError={() => turnstileRef.current?.reset()}
onExpire={() => turnstileRef.current?.reset()}
options={{ theme: "auto", size: "normal" }}
/>
</div>
)}
<div className="flex flex-1 min-h-0 overflow-hidden">
<div className="flex-1 flex flex-col min-w-0">
<FreeThread />
</div>
</div>
</div>
</AssistantRuntimeProvider>
</TokenUsageProvider>
);
}

View file

@ -0,0 +1,262 @@
"use client";
import { ComposerPrimitive, useAui, useAuiState } from "@assistant-ui/react";
import { ArrowUpIcon, Globe, Paperclip, SquareIcon } from "lucide-react";
import { type FC, useCallback, useRef, useState } from "react";
import { toast } from "sonner";
import { TooltipIconButton } from "@/components/assistant-ui/tooltip-icon-button";
import { Switch } from "@/components/ui/switch";
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
import { useAnonymousMode } from "@/contexts/anonymous-mode";
import { useLoginGate } from "@/contexts/login-gate";
import { BACKEND_URL } from "@/lib/env-config";
import { cn } from "@/lib/utils";
const ANON_ALLOWED_EXTENSIONS = new Set([
".md",
".markdown",
".txt",
".text",
".json",
".jsonl",
".yaml",
".yml",
".toml",
".ini",
".cfg",
".conf",
".xml",
".css",
".scss",
".py",
".js",
".jsx",
".ts",
".tsx",
".java",
".kt",
".go",
".rs",
".rb",
".php",
".c",
".h",
".cpp",
".hpp",
".cs",
".swift",
".sh",
".sql",
".log",
".rst",
".tex",
".vue",
".svelte",
".astro",
".tf",
".proto",
".csv",
".tsv",
".html",
".htm",
".xhtml",
]);
const ACCEPT_EXTENSIONS = Array.from(ANON_ALLOWED_EXTENSIONS).join(",");
export const FreeComposer: FC = () => {
const aui = useAui();
const isRunning = useAuiState(({ thread }) => thread.isRunning);
const isEmpty = useAuiState(({ thread }) => thread.isEmpty);
const { gate } = useLoginGate();
const anonMode = useAnonymousMode();
const [text, setText] = useState("");
const [webSearchEnabled, setWebSearchEnabled] = useState(true);
const fileInputRef = useRef<HTMLInputElement>(null);
const hasUploadedDoc = anonMode.isAnonymous && anonMode.uploadedDoc !== null;
const handleTextChange = useCallback(
(e: React.ChangeEvent<HTMLTextAreaElement>) => {
setText(e.target.value);
aui.composer().setText(e.target.value);
},
[aui]
);
const handleKeyDown = useCallback(
(e: React.KeyboardEvent<HTMLTextAreaElement>) => {
if (e.key === "/" && text === "") {
e.preventDefault();
gate("use saved prompts");
return;
}
if (e.key === "@") {
e.preventDefault();
gate("mention documents");
return;
}
if (e.key === "Enter" && !e.shiftKey) {
e.preventDefault();
if (text.trim()) {
aui.composer().send();
setText("");
}
}
},
[text, aui, gate]
);
const handleUploadClick = useCallback(() => {
if (hasUploadedDoc) {
gate("upload more documents");
return;
}
fileInputRef.current?.click();
}, [hasUploadedDoc, gate]);
const handleFileChange = useCallback(
async (e: React.ChangeEvent<HTMLInputElement>) => {
const file = e.target.files?.[0];
if (!file) return;
e.target.value = "";
const ext = `.${file.name.split(".").pop()?.toLowerCase()}`;
if (!ANON_ALLOWED_EXTENSIONS.has(ext)) {
gate("upload PDFs, Word documents, images, and more");
return;
}
try {
const formData = new FormData();
formData.append("file", file);
const res = await fetch(`${BACKEND_URL}/api/v1/public/anon-chat/upload`, {
method: "POST",
credentials: "include",
body: formData,
});
if (res.status === 409) {
gate("upload more documents");
return;
}
if (!res.ok) {
const body = await res.json().catch(() => ({}));
throw new Error(body.detail || `Upload failed: ${res.status}`);
}
const data = await res.json();
if (anonMode.isAnonymous) {
anonMode.setUploadedDoc({
filename: data.filename,
sizeBytes: data.size_bytes,
});
}
toast.success(`Uploaded "${data.filename}"`);
} catch (err) {
console.error("Upload failed:", err);
toast.error(err instanceof Error ? err.message : "Upload failed");
}
},
[gate, anonMode]
);
return (
<ComposerPrimitive.Root className="aui-composer-root relative mx-auto flex w-full max-w-(--thread-max-width) flex-col rounded-2xl border border-border/40 bg-background shadow-xs transition-shadow focus-within:shadow-md dark:bg-neutral-900">
{hasUploadedDoc && anonMode.isAnonymous && (
<div className="flex items-center gap-2 px-3 pt-2">
<Paperclip className="size-3.5 text-muted-foreground" />
<span className="text-xs text-muted-foreground truncate">
{anonMode.uploadedDoc?.filename}
</span>
<span className="text-xs text-muted-foreground/60">(1/1)</span>
</div>
)}
<textarea
placeholder="Ask anything..."
value={text}
onChange={handleTextChange}
onKeyDown={handleKeyDown}
rows={1}
className={cn(
"w-full resize-none bg-transparent px-4 pt-3 pb-0 text-sm",
"placeholder:text-muted-foreground focus:outline-none",
"min-h-[44px] max-h-[200px]"
)}
style={{ fieldSizing: "content" } as React.CSSProperties}
/>
<div className="flex items-center justify-between gap-2 px-3 pb-2 pt-1">
<div className="flex items-center gap-2">
<input
ref={fileInputRef}
type="file"
accept={ACCEPT_EXTENSIONS}
className="hidden"
onChange={handleFileChange}
/>
<Tooltip>
<TooltipTrigger asChild>
<button
type="button"
onClick={handleUploadClick}
className={cn(
"flex items-center gap-1.5 rounded-md px-2 py-1 text-xs transition-colors",
"text-muted-foreground hover:text-foreground hover:bg-accent/50",
hasUploadedDoc && "text-primary"
)}
>
<Paperclip className="size-3.5" />
{hasUploadedDoc ? "1/1" : "Upload"}
</button>
</TooltipTrigger>
<TooltipContent>
{hasUploadedDoc
? "Document limit reached. Create an account for more."
: "Upload a document (text files only)"}
</TooltipContent>
</Tooltip>
<div className="h-4 w-px bg-border/60" />
<Tooltip>
<TooltipTrigger asChild>
<label htmlFor="free-web-search-toggle" className="flex items-center gap-1.5 cursor-pointer select-none rounded-md px-2 py-1 text-xs text-muted-foreground hover:text-foreground hover:bg-accent/50 transition-colors">
<Globe className="size-3.5" />
<span className="hidden sm:inline">Web</span>
<Switch
id="free-web-search-toggle"
checked={webSearchEnabled}
onCheckedChange={setWebSearchEnabled}
className="scale-75"
/>
</label>
</TooltipTrigger>
<TooltipContent>Toggle web search</TooltipContent>
</Tooltip>
</div>
<div className="flex items-center gap-1">
{!isRunning ? (
<ComposerPrimitive.Send asChild>
<TooltipIconButton tooltip="Send" variant="default" className="size-8 rounded-full">
<ArrowUpIcon />
</TooltipIconButton>
</ComposerPrimitive.Send>
) : (
<ComposerPrimitive.Cancel asChild>
<TooltipIconButton
tooltip="Cancel"
variant="destructive"
className="size-8 rounded-full"
>
<SquareIcon className="size-3.5" />
</TooltipIconButton>
</ComposerPrimitive.Cancel>
)}
</div>
</div>
</ComposerPrimitive.Root>
);
};

View file

@ -0,0 +1,195 @@
"use client";
import { Bot, Check, ChevronDown, Search } from "lucide-react";
import { useRouter } from "next/navigation";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
import { Popover, PopoverContent, PopoverTrigger } from "@/components/ui/popover";
import { useAnonymousMode } from "@/contexts/anonymous-mode";
import type { AnonModel } from "@/contracts/types/anonymous-chat.types";
import { anonymousChatApiService } from "@/lib/apis/anonymous-chat-api.service";
import { getProviderIcon } from "@/lib/provider-icons";
import { cn } from "@/lib/utils";
export function FreeModelSelector({ className }: { className?: string }) {
const router = useRouter();
const anonMode = useAnonymousMode();
const currentSlug = anonMode.isAnonymous ? anonMode.modelSlug : "";
const [open, setOpen] = useState(false);
const [models, setModels] = useState<AnonModel[]>([]);
const [searchQuery, setSearchQuery] = useState("");
const [focusedIndex, setFocusedIndex] = useState(-1);
const searchInputRef = useRef<HTMLInputElement>(null);
useEffect(() => {
anonymousChatApiService.getModels().then(setModels).catch(console.error);
}, []);
useEffect(() => {
if (open) {
setSearchQuery("");
setFocusedIndex(-1);
requestAnimationFrame(() => searchInputRef.current?.focus());
}
}, [open]);
const currentModel = useMemo(
() => models.find((m) => m.seo_slug === currentSlug) ?? null,
[models, currentSlug]
);
const filteredModels = useMemo(() => {
if (!searchQuery.trim()) return models;
const q = searchQuery.toLowerCase();
return models.filter(
(m) =>
m.name.toLowerCase().includes(q) ||
m.model_name.toLowerCase().includes(q) ||
m.provider.toLowerCase().includes(q)
);
}, [models, searchQuery]);
const handleSelect = useCallback(
(model: AnonModel) => {
setOpen(false);
if (model.seo_slug === currentSlug) return;
if (anonMode.isAnonymous) {
anonMode.setModelSlug(model.seo_slug ?? "");
anonMode.resetChat();
}
router.replace(`/free/${model.seo_slug}`);
},
[currentSlug, anonMode, router]
);
const handleKeyDown = useCallback(
(e: React.KeyboardEvent<HTMLInputElement>) => {
const count = filteredModels.length;
if (count === 0) return;
switch (e.key) {
case "ArrowDown":
e.preventDefault();
setFocusedIndex((p) => (p < count - 1 ? p + 1 : 0));
break;
case "ArrowUp":
e.preventDefault();
setFocusedIndex((p) => (p > 0 ? p - 1 : count - 1));
break;
case "Enter":
e.preventDefault();
if (focusedIndex >= 0 && focusedIndex < count) {
handleSelect(filteredModels[focusedIndex]);
}
break;
}
},
[filteredModels, focusedIndex, handleSelect]
);
return (
<Popover open={open} onOpenChange={setOpen}>
<PopoverTrigger asChild>
<Button
variant="ghost"
size="sm"
role="combobox"
aria-expanded={open}
className={cn(
"h-8 gap-2 px-3 text-sm bg-main-panel hover:bg-accent/50 dark:hover:bg-white/6 border border-border/40 select-none",
className
)}
>
{currentModel ? (
<>
{getProviderIcon(currentModel.provider, { className: "size-4" })}
<span className="max-w-[160px] truncate">{currentModel.name}</span>
</>
) : (
<>
<Bot className="size-4 text-muted-foreground" />
<span className="text-muted-foreground">Select Model</span>
</>
)}
<ChevronDown className="h-3.5 w-3.5 text-muted-foreground ml-1 shrink-0" />
</Button>
</PopoverTrigger>
<PopoverContent
className="w-[320px] p-0 rounded-lg shadow-lg overflow-hidden bg-white border-border/60 dark:bg-neutral-900 dark:border dark:border-white/5 select-none"
align="start"
sideOffset={8}
onCloseAutoFocus={(e) => e.preventDefault()}
>
<div className="relative">
<Search className="absolute left-3 top-1/2 -translate-y-1/2 size-3.5 text-muted-foreground pointer-events-none" />
<input
ref={searchInputRef}
placeholder="Search models"
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
onKeyDown={handleKeyDown}
className="w-full pl-8 pr-3 py-2.5 text-sm bg-transparent focus:outline-none placeholder:text-muted-foreground"
/>
</div>
<div className="overflow-y-auto max-h-[320px] py-1 space-y-0.5">
{filteredModels.length === 0 ? (
<div className="flex flex-col items-center justify-center gap-2 py-8 px-4">
<Search className="size-6 text-muted-foreground" />
<p className="text-sm text-muted-foreground">No models found</p>
</div>
) : (
filteredModels.map((model, index) => {
const isSelected = model.seo_slug === currentSlug;
const isFocused = focusedIndex === index;
return (
<div
key={model.id}
role="option"
tabIndex={0}
aria-selected={isSelected}
onClick={() => handleSelect(model)}
onKeyDown={(e) => {
if (e.key === "Enter" || e.key === " ") {
e.preventDefault();
handleSelect(model);
}
}}
onMouseEnter={() => setFocusedIndex(index)}
className={cn(
"group flex items-center gap-2.5 px-3 py-2 rounded-xl cursor-pointer",
"transition-all duration-150 mx-2",
"hover:bg-accent/40",
isSelected && "bg-primary/6 dark:bg-primary/8",
isFocused && "bg-accent/50"
)}
>
<div className="shrink-0">
{getProviderIcon(model.provider, { className: "size-5" })}
</div>
<div className="flex-1 min-w-0">
<div className="flex items-center gap-1.5">
<span className="font-medium text-sm truncate">{model.name}</span>
{model.is_premium && (
<Badge
variant="secondary"
className="text-[9px] px-1 py-0 h-3.5 bg-purple-100 text-purple-700 dark:bg-purple-900/50 dark:text-purple-300 border-0"
>
Premium
</Badge>
)}
</div>
<span className="text-xs text-muted-foreground truncate block">
{model.model_name}
</span>
</div>
{isSelected && <Check className="size-4 text-primary shrink-0" />}
</div>
);
})
)}
</div>
</PopoverContent>
</Popover>
);
}

View file

@ -0,0 +1,45 @@
"use client";
import { Lock } from "lucide-react";
import Link from "next/link";
import type { FC } from "react";
import { Button } from "@/components/ui/button";
interface GatedTabProps {
title: string;
description: string;
}
const GatedTab: FC<GatedTabProps> = ({ title, description }) => (
<div className="flex flex-col items-center justify-center gap-3 p-8 text-center">
<div className="rounded-full bg-muted p-3">
<Lock className="size-5 text-muted-foreground" />
</div>
<h3 className="text-sm font-medium">{title}</h3>
<p className="text-xs text-muted-foreground max-w-[200px]">{description}</p>
<Button size="sm" asChild>
<Link href="/register">Create Free Account</Link>
</Button>
</div>
);
export const ReportsGatedPlaceholder: FC = () => (
<GatedTab
title="Generate Reports"
description="Create a free account to generate structured reports from your conversations."
/>
);
export const EditorGatedPlaceholder: FC = () => (
<GatedTab
title="Document Editor"
description="Create a free account to use the AI-powered document editor."
/>
);
export const HitlGatedPlaceholder: FC = () => (
<GatedTab
title="Human-in-the-Loop Editing"
description="Create a free account to collaborate with AI on document edits."
/>
);

View file

@ -0,0 +1,82 @@
"use client";
import { AuiIf, ThreadPrimitive } from "@assistant-ui/react";
import { ArrowDownIcon } from "lucide-react";
import type { FC } from "react";
import { AssistantMessage } from "@/components/assistant-ui/assistant-message";
import { EditComposer } from "@/components/assistant-ui/edit-composer";
import { TooltipIconButton } from "@/components/assistant-ui/tooltip-icon-button";
import { UserMessage } from "@/components/assistant-ui/user-message";
import { FreeComposer } from "./free-composer";
const FreeThreadWelcome: FC = () => {
return (
<div className="aui-thread-welcome-root mx-auto flex w-full max-w-(--thread-max-width) grow flex-col items-center px-4 relative">
<div className="aui-thread-welcome-message absolute bottom-[calc(50%+5rem)] left-0 right-0 flex flex-col items-center text-center">
<h1 className="aui-thread-welcome-message-inner text-3xl md:text-5xl select-none">
What can I help with?
</h1>
</div>
<div className="w-full flex items-start justify-center absolute top-[calc(50%-3.5rem)] left-0 right-0">
<FreeComposer />
</div>
</div>
);
};
const ThreadScrollToBottom: FC = () => {
return (
<ThreadPrimitive.ScrollToBottom asChild>
<TooltipIconButton
tooltip="Scroll to bottom"
variant="outline"
className="aui-thread-scroll-to-bottom -top-12 absolute z-10 self-center rounded-full p-4 disabled:invisible dark:bg-main-panel dark:hover:bg-accent"
>
<ArrowDownIcon />
</TooltipIconButton>
</ThreadPrimitive.ScrollToBottom>
);
};
export const FreeThread: FC = () => {
return (
<ThreadPrimitive.Root
className="aui-root aui-thread-root @container flex h-full min-h-0 flex-col bg-main-panel"
style={{
["--thread-max-width" as string]: "44rem",
}}
>
<ThreadPrimitive.Viewport
turnAnchor="top"
className="aui-thread-viewport relative flex flex-1 min-h-0 flex-col overflow-y-auto px-4 pt-4"
style={{ scrollbarGutter: "stable" }}
>
<AuiIf condition={({ thread }) => thread.isEmpty}>
<FreeThreadWelcome />
</AuiIf>
<ThreadPrimitive.Messages
components={{
UserMessage,
EditComposer,
AssistantMessage,
}}
/>
<AuiIf condition={({ thread }) => !thread.isEmpty}>
<div className="grow" />
</AuiIf>
<ThreadPrimitive.ViewportFooter
className="aui-thread-viewport-footer sticky bottom-0 z-10 mx-auto flex w-full max-w-(--thread-max-width) flex-col gap-4 overflow-visible rounded-t-3xl bg-main-panel pb-4 md:pb-6"
style={{ paddingBottom: "max(1rem, env(safe-area-inset-bottom))" }}
>
<ThreadScrollToBottom />
<AuiIf condition={({ thread }) => !thread.isEmpty}>
<FreeComposer />
</AuiIf>
</ThreadPrimitive.ViewportFooter>
</ThreadPrimitive.Viewport>
</ThreadPrimitive.Root>
);
};

View file

@ -0,0 +1,57 @@
"use client";
import { OctagonAlert, Orbit } from "lucide-react";
import Link from "next/link";
import { Progress } from "@/components/ui/progress";
import { cn } from "@/lib/utils";
interface QuotaBarProps {
used: number;
limit: number;
warningThreshold: number;
className?: string;
}
export function QuotaBar({ used, limit, warningThreshold, className }: QuotaBarProps) {
const percentage = Math.min((used / limit) * 100, 100);
const remaining = Math.max(limit - used, 0);
const isWarning = used >= warningThreshold;
const isExceeded = used >= limit;
return (
<div className={cn("space-y-1.5", className)}>
<div className="flex justify-between items-center text-xs">
<span className="text-muted-foreground">
{used.toLocaleString()} / {limit.toLocaleString()} tokens
</span>
{isExceeded ? (
<span className="font-medium text-red-500">Limit reached</span>
) : isWarning ? (
<span className="font-medium text-amber-500 flex items-center gap-1">
<OctagonAlert className="h-3 w-3" />
{remaining.toLocaleString()} remaining
</span>
) : (
<span className="font-medium">{percentage.toFixed(0)}%</span>
)}
</div>
<Progress
value={percentage}
className={cn(
"h-1.5",
isExceeded && "[&>div]:bg-red-500",
isWarning && !isExceeded && "[&>div]:bg-amber-500"
)}
/>
{isExceeded && (
<Link
href="/register"
className="flex items-center justify-center gap-1.5 rounded-md bg-linear-to-r from-purple-600 to-blue-600 px-3 py-1.5 text-xs font-medium text-white transition-opacity hover:opacity-90"
>
<Orbit className="h-3 w-3" />
Create free account for 5M more tokens
</Link>
)}
</div>
);
}

View file

@ -0,0 +1,84 @@
"use client";
import { OctagonAlert, Orbit, X } from "lucide-react";
import Link from "next/link";
import { useState } from "react";
import { cn } from "@/lib/utils";
interface QuotaWarningBannerProps {
used: number;
limit: number;
warningThreshold: number;
className?: string;
}
export function QuotaWarningBanner({
used,
limit,
warningThreshold,
className,
}: QuotaWarningBannerProps) {
const [dismissed, setDismissed] = useState(false);
const isWarning = used >= warningThreshold && used < limit;
const isExceeded = used >= limit;
if (dismissed || (!isWarning && !isExceeded)) return null;
if (isExceeded) {
return (
<div
className={cn(
"rounded-lg border border-red-200 bg-red-50 dark:border-red-800 dark:bg-red-950/50 p-4",
className
)}
>
<div className="flex items-start gap-3">
<OctagonAlert className="h-5 w-5 text-red-500 shrink-0 mt-0.5" />
<div className="flex-1 space-y-2">
<p className="text-sm font-medium text-red-800 dark:text-red-200">
Free token limit reached
</p>
<p className="text-xs text-red-600 dark:text-red-300">
You&apos;ve used all {limit.toLocaleString()} free tokens. Create a free account to
get 5 million tokens and access to all models.
</p>
<Link
href="/register"
className="inline-flex items-center gap-1.5 rounded-md bg-linear-to-r from-purple-600 to-blue-600 px-4 py-2 text-sm font-medium text-white transition-opacity hover:opacity-90"
>
<Orbit className="h-4 w-4" />
Create Free Account
</Link>
</div>
</div>
</div>
);
}
return (
<div
className={cn(
"rounded-lg border border-amber-200 bg-amber-50 dark:border-amber-800 dark:bg-amber-950/50 p-3",
className
)}
>
<div className="flex items-center gap-3">
<OctagonAlert className="h-4 w-4 text-amber-500 shrink-0" />
<p className="flex-1 text-xs text-amber-700 dark:text-amber-300">
You&apos;ve used {used.toLocaleString()} of {limit.toLocaleString()} free tokens.{" "}
<Link href="/register" className="font-medium underline hover:no-underline">
Create an account
</Link>{" "}
for 5M free tokens.
</p>
<button
type="button"
onClick={() => setDismissed(true)}
className="text-amber-400 hover:text-amber-600 dark:hover:text-amber-200"
>
<X className="h-4 w-4" />
</button>
</div>
</div>
);
}

View file

@ -35,6 +35,7 @@ export const Navbar = ({ scrolledBgClassName }: NavbarProps = {}) => {
const [isScrolled, setIsScrolled] = useState(false); const [isScrolled, setIsScrolled] = useState(false);
const navItems = [ const navItems = [
{ name: "Free\u00A0AI", link: "/free" },
{ name: "Pricing", link: "/pricing" }, { name: "Pricing", link: "/pricing" },
{ name: "Blog", link: "/blog" }, { name: "Blog", link: "/blog" },
{ name: "Changelog", link: "/changelog" }, { name: "Changelog", link: "/changelog" },

View file

@ -0,0 +1,142 @@
"use client";
import { Inbox, Megaphone, SquareLibrary } from "lucide-react";
import { useRouter } from "next/navigation";
import type { ReactNode } from "react";
import { Fragment, useCallback, useEffect, useMemo, useState } from "react";
import { useAnonymousMode } from "@/contexts/anonymous-mode";
import { useLoginGate } from "@/contexts/login-gate";
import { useIsMobile } from "@/hooks/use-mobile";
import { anonymousChatApiService } from "@/lib/apis/anonymous-chat-api.service";
import type { ChatItem, NavItem, PageUsage, SearchSpace } from "../types/layout.types";
import { LayoutShell } from "../ui/shell";
interface FreeLayoutDataProviderProps {
children: ReactNode;
}
const GUEST_SPACE: SearchSpace = {
id: 0,
name: "SurfSense Free",
description: "Free AI chat without login",
isOwner: false,
memberCount: 1,
};
export function FreeLayoutDataProvider({ children }: FreeLayoutDataProviderProps) {
const router = useRouter();
const { gate } = useLoginGate();
const anonMode = useAnonymousMode();
const isMobile = useIsMobile();
const [quota, setQuota] = useState<{ used: number; limit: number } | null>(null);
const [isDocsSidebarOpen, setIsDocsSidebarOpen] = useState(false);
// Keep docs sidebar closed on mobile; auto-open only on desktop after hydration
useEffect(() => {
setIsDocsSidebarOpen(!isMobile);
}, [isMobile]);
useEffect(() => {
anonymousChatApiService
.getQuota()
.then((q) => {
setQuota({ used: q.used, limit: q.limit });
})
.catch(() => {});
}, []);
const resetChat = useCallback(() => {
if (anonMode.isAnonymous) {
anonMode.resetChat();
}
}, [anonMode]);
const gatedAction = useCallback((feature: string) => () => gate(feature), [gate]);
const navItems: NavItem[] = useMemo(
() =>
[
{
title: "Inbox",
url: "#inbox",
icon: Inbox,
isActive: false,
},
isMobile
? {
title: "Documents",
url: "#documents",
icon: SquareLibrary,
isActive: false,
}
: null,
{
title: "Announcements",
url: "#announcements",
icon: Megaphone,
isActive: false,
},
].filter((item): item is NavItem => item !== null),
[isMobile]
);
const pageUsage: PageUsage | undefined = quota
? { pagesUsed: quota.used, pagesLimit: quota.limit }
: undefined;
const handleChatSelect = useCallback((_chat: ChatItem) => gate("view chat history"), [gate]);
const handleNavItemClick = useCallback(
(item: NavItem) => {
if (item.title === "Inbox") gate("use the inbox");
else if (item.title === "Documents") setIsDocsSidebarOpen((v) => !v);
else if (item.title === "Announcements") gate("view announcements");
},
[gate]
);
const handleSearchSpaceSelect = useCallback(
(_id: number) => gate("switch search spaces"),
[gate]
);
return (
<LayoutShell
searchSpaces={[GUEST_SPACE]}
activeSearchSpaceId={0}
onSearchSpaceSelect={handleSearchSpaceSelect}
onSearchSpaceSettings={gatedAction("search space settings")}
onAddSearchSpace={gatedAction("create search spaces")}
searchSpace={GUEST_SPACE}
navItems={navItems}
onNavItemClick={handleNavItemClick}
chats={[]}
sharedChats={[]}
activeChatId={null}
onNewChat={resetChat}
onChatSelect={handleChatSelect}
onChatRename={gatedAction("rename chats")}
onChatDelete={gatedAction("delete chats")}
onChatArchive={gatedAction("archive chats")}
onViewAllSharedChats={gatedAction("view shared chats")}
onViewAllPrivateChats={gatedAction("view chat history")}
user={{
email: "Guest",
name: "Guest",
}}
onSettings={gatedAction("search space settings")}
onManageMembers={gatedAction("team management")}
onUserSettings={gatedAction("account settings")}
onLogout={() => router.push("/register")}
pageUsage={pageUsage}
isChatPage
isLoadingChats={false}
documentsPanel={{
open: isDocsSidebarOpen,
onOpenChange: setIsDocsSidebarOpen,
}}
>
<Fragment>{children}</Fragment>
</LayoutShell>
);
}

View file

@ -22,6 +22,7 @@ export function Header({ mobileMenuTrigger }: HeaderProps) {
const activeTab = useAtomValue(activeTabAtom); const activeTab = useAtomValue(activeTabAtom);
const tabs = useAtomValue(tabsAtom); const tabs = useAtomValue(tabsAtom);
const isFreePage = pathname?.startsWith("/free") ?? false;
const isChatPage = pathname?.includes("/new-chat") ?? false; const isChatPage = pathname?.includes("/new-chat") ?? false;
const isDocumentTab = activeTab?.type === "document"; const isDocumentTab = activeTab?.type === "document";
const hasTabBar = tabs.length > 1; const hasTabBar = tabs.length > 1;
@ -30,6 +31,16 @@ export function Header({ mobileMenuTrigger }: HeaderProps) {
const hasThread = isChatPage && !isDocumentTab && currentThreadState.id !== null; const hasThread = isChatPage && !isDocumentTab && currentThreadState.id !== null;
// Free chat pages have their own header with model selector; only render mobile trigger
if (isFreePage) {
if (!mobileMenuTrigger) return null;
return (
<header className="sticky top-0 z-10 flex h-14 shrink-0 items-center gap-2 bg-main-panel/95 backdrop-blur supports-backdrop-filter:bg-main-panel/60 px-4">
{mobileMenuTrigger}
</header>
);
}
const threadForButton: ThreadRecord | null = const threadForButton: ThreadRecord | null =
hasThread && currentThreadState.id !== null hasThread && currentThreadState.id !== null
? { ? {

View file

@ -2,10 +2,23 @@
import { useQuery } from "@rocicorp/zero/react"; import { useQuery } from "@rocicorp/zero/react";
import { useAtom, useAtomValue, useSetAtom } from "jotai"; import { useAtom, useAtomValue, useSetAtom } from "jotai";
import { ChevronLeft, ChevronRight, FolderClock, Trash2, Unplug } from "lucide-react"; import {
ChevronLeft,
ChevronRight,
FileText,
FolderClock,
Lock,
Paperclip,
Trash2,
Unplug,
Upload,
X,
} from "lucide-react";
import Link from "next/link";
import { useParams } from "next/navigation"; import { useParams } from "next/navigation";
import { useTranslations } from "next-intl"; import { useTranslations } from "next-intl";
import { useCallback, useEffect, useMemo, useState } from "react"; import type React from "react";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { toast } from "sonner"; import { toast } from "sonner";
import { sidebarSelectedDocumentsAtom } from "@/atoms/chat/mentioned-documents.atom"; import { sidebarSelectedDocumentsAtom } from "@/atoms/chat/mentioned-documents.atom";
import { connectorDialogOpenAtom } from "@/atoms/connector-dialog/connector-dialog.atoms"; import { connectorDialogOpenAtom } from "@/atoms/connector-dialog/connector-dialog.atoms";
@ -45,8 +58,11 @@ import {
} from "@/components/ui/alert-dialog"; } from "@/components/ui/alert-dialog";
import { Avatar, AvatarFallback, AvatarGroup } from "@/components/ui/avatar"; import { Avatar, AvatarFallback, AvatarGroup } from "@/components/ui/avatar";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { Drawer, DrawerContent, DrawerHandle, DrawerTitle } from "@/components/ui/drawer";
import { Spinner } from "@/components/ui/spinner"; import { Spinner } from "@/components/ui/spinner";
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
import { useAnonymousMode, useIsAnonymous } from "@/contexts/anonymous-mode";
import { useLoginGate } from "@/contexts/login-gate";
import { getConnectorIcon } from "@/contracts/enums/connectorIcons"; import { getConnectorIcon } from "@/contracts/enums/connectorIcons";
import type { DocumentTypeEnum } from "@/contracts/types/document.types"; import type { DocumentTypeEnum } from "@/contracts/types/document.types";
import { useDebouncedValue } from "@/hooks/use-debounced-value"; import { useDebouncedValue } from "@/hooks/use-debounced-value";
@ -56,6 +72,7 @@ import { documentsApiService } from "@/lib/apis/documents-api.service";
import { foldersApiService } from "@/lib/apis/folders-api.service"; import { foldersApiService } from "@/lib/apis/folders-api.service";
import { searchSpacesApiService } from "@/lib/apis/search-spaces-api.service"; import { searchSpacesApiService } from "@/lib/apis/search-spaces-api.service";
import { authenticatedFetch } from "@/lib/auth-utils"; import { authenticatedFetch } from "@/lib/auth-utils";
import { BACKEND_URL } from "@/lib/env-config";
import { uploadFolderScan } from "@/lib/folder-sync-upload"; import { uploadFolderScan } from "@/lib/folder-sync-upload";
import { getSupportedExtensionsSet } from "@/lib/supported-extensions"; import { getSupportedExtensionsSet } from "@/lib/supported-extensions";
import { queries } from "@/zero/queries/index"; import { queries } from "@/zero/queries/index";
@ -86,7 +103,15 @@ interface DocumentsSidebarProps {
headerAction?: React.ReactNode; headerAction?: React.ReactNode;
} }
export function DocumentsSidebar({ export function DocumentsSidebar(props: DocumentsSidebarProps) {
const isAnonymous = useIsAnonymous();
if (isAnonymous) {
return <AnonymousDocumentsSidebar {...props} />;
}
return <AuthenticatedDocumentsSidebar {...props} />;
}
function AuthenticatedDocumentsSidebar({
open, open,
onOpenChange, onOpenChange,
isDocked = false, isDocked = false,
@ -1166,3 +1191,430 @@ export function DocumentsSidebar({
</SidebarSlideOutPanel> </SidebarSlideOutPanel>
); );
} }
// ---------------------------------------------------------------------------
// Anonymous Documents Sidebar
// ---------------------------------------------------------------------------
const ANON_ALLOWED_EXTENSIONS = new Set([
".md",
".markdown",
".txt",
".text",
".json",
".jsonl",
".yaml",
".yml",
".toml",
".ini",
".cfg",
".conf",
".xml",
".css",
".scss",
".py",
".js",
".jsx",
".ts",
".tsx",
".java",
".kt",
".go",
".rs",
".rb",
".php",
".c",
".h",
".cpp",
".hpp",
".cs",
".swift",
".sh",
".sql",
".log",
".rst",
".tex",
".vue",
".svelte",
".astro",
".tf",
".proto",
".csv",
".tsv",
".html",
".htm",
".xhtml",
]);
const ANON_ACCEPT = Array.from(ANON_ALLOWED_EXTENSIONS).join(",");
function AnonymousDocumentsSidebar({
open,
onOpenChange,
isDocked = false,
onDockedChange,
embedded = false,
headerAction,
}: DocumentsSidebarProps) {
const t = useTranslations("documents");
const tSidebar = useTranslations("sidebar");
const isMobile = !useMediaQuery("(min-width: 640px)");
const setRightPanelCollapsed = useSetAtom(rightPanelCollapsedAtom);
const anonMode = useAnonymousMode();
const { gate } = useLoginGate();
const fileInputRef = useRef<HTMLInputElement>(null);
const [isUploading, setIsUploading] = useState(false);
const [search, setSearch] = useState("");
const [sidebarDocs, setSidebarDocs] = useAtom(sidebarSelectedDocumentsAtom);
const mentionedDocIds = useMemo(() => new Set(sidebarDocs.map((d) => d.id)), [sidebarDocs]);
const handleToggleChatMention = useCallback(
(doc: { id: number; title: string; document_type: string }, isMentioned: boolean) => {
if (isMentioned) {
setSidebarDocs((prev) => prev.filter((d) => d.id !== doc.id));
} else {
setSidebarDocs((prev) => {
if (prev.some((d) => d.id === doc.id)) return prev;
return [
...prev,
{ id: doc.id, title: doc.title, document_type: doc.document_type as DocumentTypeEnum },
];
});
}
},
[setSidebarDocs]
);
const uploadedDoc = anonMode.isAnonymous ? anonMode.uploadedDoc : null;
const hasDoc = uploadedDoc !== null;
const handleAnonUploadClick = useCallback(() => {
if (hasDoc) {
gate("upload more documents");
return;
}
fileInputRef.current?.click();
}, [hasDoc, gate]);
const handleFileChange = useCallback(
async (e: React.ChangeEvent<HTMLInputElement>) => {
const file = e.target.files?.[0];
if (!file) return;
e.target.value = "";
const ext = `.${file.name.split(".").pop()?.toLowerCase()}`;
if (!ANON_ALLOWED_EXTENSIONS.has(ext)) {
gate("upload PDFs, Word documents, images, and more");
return;
}
setIsUploading(true);
try {
const formData = new FormData();
formData.append("file", file);
const res = await fetch(`${BACKEND_URL}/api/v1/public/anon-chat/upload`, {
method: "POST",
credentials: "include",
body: formData,
});
if (res.status === 409) {
gate("upload more documents");
return;
}
if (!res.ok) {
const body = await res.json().catch(() => ({}));
throw new Error(body.detail || `Upload failed: ${res.status}`);
}
const data = await res.json();
if (anonMode.isAnonymous) {
anonMode.setUploadedDoc({
filename: data.filename,
sizeBytes: data.size_bytes,
});
}
toast.success(`Uploaded "${data.filename}"`);
} catch (err) {
console.error("Upload failed:", err);
toast.error(err instanceof Error ? err.message : "Upload failed");
} finally {
setIsUploading(false);
}
},
[gate, anonMode]
);
const handleRemoveDoc = useCallback(() => {
if (anonMode.isAnonymous) {
anonMode.setUploadedDoc(null);
}
}, [anonMode]);
const treeDocuments: DocumentNodeDoc[] = useMemo(() => {
if (!anonMode.isAnonymous || !anonMode.uploadedDoc) return [];
return [
{
id: -1,
title: anonMode.uploadedDoc.filename,
document_type: "FILE",
folderId: null,
status: { state: "ready" } as { state: string; reason?: string | null },
},
];
}, [anonMode]);
const searchFilteredDocs = useMemo(() => {
const q = search.trim().toLowerCase();
if (!q) return treeDocuments;
return treeDocuments.filter((d) => d.title.toLowerCase().includes(q));
}, [treeDocuments, search]);
useEffect(() => {
const handleEscape = (e: KeyboardEvent) => {
if (e.key === "Escape" && open) {
if (isMobile) {
onOpenChange(false);
} else {
setRightPanelCollapsed(true);
}
}
};
document.addEventListener("keydown", handleEscape);
return () => document.removeEventListener("keydown", handleEscape);
}, [open, onOpenChange, isMobile, setRightPanelCollapsed]);
const documentsContent = (
<>
<input
ref={fileInputRef}
type="file"
accept={ANON_ACCEPT}
className="hidden"
onChange={handleFileChange}
disabled={isUploading}
/>
{/* Header */}
<div className="shrink-0 flex h-14 items-center px-4">
<div className="flex w-full items-center justify-between">
<div className="flex items-center gap-2">
<h2 className="select-none text-lg font-semibold">{t("title") || "Documents"}</h2>
</div>
<div className="flex items-center gap-1">
{isMobile && (
<Button
variant="ghost"
size="icon"
className="h-8 w-8 rounded-full"
onClick={() => onOpenChange(false)}
>
<X className="h-4 w-4 text-muted-foreground" />
<span className="sr-only">{tSidebar("close") || "Close"}</span>
</Button>
)}
{!isMobile && onDockedChange && (
<Tooltip>
<TooltipTrigger asChild>
<Button
variant="ghost"
size="icon"
className="h-8 w-8 rounded-full"
onClick={() => {
if (isDocked) {
onDockedChange(false);
onOpenChange(false);
} else {
onDockedChange(true);
}
}}
>
{isDocked ? (
<ChevronLeft className="h-4 w-4 text-muted-foreground" />
) : (
<ChevronRight className="h-4 w-4 text-muted-foreground" />
)}
<span className="sr-only">{isDocked ? "Collapse panel" : "Expand panel"}</span>
</Button>
</TooltipTrigger>
<TooltipContent className="z-80">
{isDocked ? "Collapse panel" : "Expand panel"}
</TooltipContent>
</Tooltip>
)}
{headerAction}
</div>
</div>
</div>
{/* Connectors strip (gated) */}
<div className="shrink-0 mx-4 mt-4 mb-4 flex select-none items-center gap-2 rounded-lg border bg-muted/50 transition-colors hover:bg-muted/80">
<button
type="button"
onClick={() => gate("connect your data sources")}
className="flex items-center gap-2 min-w-0 flex-1 text-left px-3 py-2"
>
<Unplug className="size-4 shrink-0 text-muted-foreground" />
<span className="truncate text-xs text-muted-foreground">Connect your connectors</span>
<AvatarGroup className="ml-auto shrink-0">
{(isMobile ? SHOWCASE_CONNECTORS.slice(0, 5) : SHOWCASE_CONNECTORS).map(
({ type, label }, i) => {
const avatar = (
<Avatar
key={type}
className="size-6"
style={{ zIndex: SHOWCASE_CONNECTORS.length - i }}
>
<AvatarFallback className="bg-muted text-[10px]">
{getConnectorIcon(type, "size-3.5")}
</AvatarFallback>
</Avatar>
);
if (isMobile) return avatar;
return (
<Tooltip key={type}>
<TooltipTrigger asChild>{avatar}</TooltipTrigger>
<TooltipContent side="top" className="text-xs">
{label}
</TooltipContent>
</Tooltip>
);
}
)}
</AvatarGroup>
</button>
</div>
{/* Filters & upload */}
<div className="flex-1 min-h-0 pt-0 flex flex-col">
<div className="px-4 pb-2">
<DocumentsFilters
typeCounts={hasDoc ? { FILE: 1 } : {}}
onSearch={setSearch}
searchValue={search}
onToggleType={() => {}}
activeTypes={[]}
onCreateFolder={() => gate("create folders")}
aiSortEnabled={false}
onUploadClick={handleAnonUploadClick}
/>
</div>
<div className="relative flex-1 min-h-0 overflow-auto">
<FolderTreeView
folders={[]}
documents={searchFilteredDocs}
expandedIds={new Set()}
onToggleExpand={() => {}}
mentionedDocIds={mentionedDocIds}
onToggleChatMention={handleToggleChatMention}
onToggleFolderSelect={() => {}}
onRenameFolder={() => gate("rename folders")}
onDeleteFolder={() => gate("delete folders")}
onMoveFolder={() => gate("organize folders")}
onCreateFolder={() => gate("create folders")}
searchQuery={search.trim() || undefined}
onPreviewDocument={() => gate("preview documents")}
onEditDocument={() => gate("edit documents")}
onDeleteDocument={async () => {
handleRemoveDoc();
setSidebarDocs((prev) => prev.filter((d) => d.id !== -1));
return true;
}}
onMoveDocument={() => gate("organize documents")}
onExportDocument={() => gate("export documents")}
onVersionHistory={() => gate("view version history")}
activeTypes={[]}
onDropIntoFolder={async () => gate("organize documents")}
onReorderFolder={async () => gate("organize folders")}
watchedFolderIds={new Set()}
onRescanFolder={() => gate("watch local folders")}
onStopWatchingFolder={() => gate("watch local folders")}
onExportFolder={() => gate("export folders")}
/>
{!hasDoc && (
<div className="px-4 py-8 text-center">
<button
type="button"
onClick={handleAnonUploadClick}
disabled={isUploading}
className="flex w-full items-center justify-center gap-2 rounded-lg border-2 border-dashed border-primary/30 px-4 py-6 text-sm text-primary transition-colors hover:border-primary/60 hover:bg-primary/5 cursor-pointer disabled:opacity-50 disabled:pointer-events-none"
>
<Upload className="size-4" />
{isUploading ? "Uploading..." : "Upload a document"}
</button>
<p className="mt-2 text-[11px] text-muted-foreground leading-relaxed">
Text, code, CSV, and HTML files only. Create an account for PDFs, images, and 30+
connectors.
</p>
</div>
)}
</div>
</div>
{/* CTA footer */}
<div className="border-t p-4 space-y-3">
<div className="flex items-center gap-2 text-xs text-muted-foreground">
<Lock className="size-3.5 shrink-0" />
<span>Create an account to unlock:</span>
</div>
<ul className="space-y-1.5 text-xs text-muted-foreground pl-5">
<li className="flex items-center gap-1.5">
<Paperclip className="size-3 shrink-0" /> PDF, Word, images, audio uploads
</li>
<li className="flex items-center gap-1.5">
<FileText className="size-3 shrink-0" /> Unlimited documents
</li>
</ul>
<Button size="sm" className="w-full" asChild>
<Link href="/register">Create Free Account</Link>
</Button>
</div>
</>
);
if (embedded) {
return (
<div className="flex h-full flex-col bg-sidebar text-sidebar-foreground">
{documentsContent}
</div>
);
}
if (isDocked && open && !isMobile) {
return (
<aside
className="h-full w-[380px] shrink-0 bg-sidebar text-sidebar-foreground flex flex-col border-r"
aria-label={t("title") || "Documents"}
>
{documentsContent}
</aside>
);
}
if (isMobile) {
return (
<Drawer open={open} onOpenChange={onOpenChange}>
<DrawerContent className="max-h-[75vh] flex flex-col">
<DrawerTitle className="sr-only">{t("title") || "Documents"}</DrawerTitle>
<DrawerHandle />
<div className="flex-1 min-h-0 flex flex-col overflow-hidden">{documentsContent}</div>
</DrawerContent>
</Drawer>
);
}
return (
<SidebarSlideOutPanel
open={open}
onOpenChange={onOpenChange}
ariaLabel={t("title") || "Documents"}
width={380}
>
{documentsContent}
</SidebarSlideOutPanel>
);
}

View file

@ -1,12 +1,6 @@
"use client"; "use client";
import { useQuery } from "@tanstack/react-query";
import { CreditCard, Zap } from "lucide-react";
import Link from "next/link";
import { useParams } from "next/navigation";
import { Badge } from "@/components/ui/badge";
import { Progress } from "@/components/ui/progress"; import { Progress } from "@/components/ui/progress";
import { stripeApiService } from "@/lib/apis/stripe-api.service";
interface PageUsageDisplayProps { interface PageUsageDisplayProps {
pagesUsed: number; pagesUsed: number;
@ -14,50 +8,17 @@ interface PageUsageDisplayProps {
} }
export function PageUsageDisplay({ pagesUsed, pagesLimit }: PageUsageDisplayProps) { export function PageUsageDisplay({ pagesUsed, pagesLimit }: PageUsageDisplayProps) {
const params = useParams();
const searchSpaceId = params?.search_space_id ?? "";
const usagePercentage = (pagesUsed / pagesLimit) * 100; const usagePercentage = (pagesUsed / pagesLimit) * 100;
const { data: stripeStatus } = useQuery({
queryKey: ["stripe-status"],
queryFn: () => stripeApiService.getStatus(),
});
const pageBuyingEnabled = stripeStatus?.page_buying_enabled ?? true;
return ( return (
<div className="px-3 py-3 border-t"> <div className="space-y-1.5">
<div className="space-y-1.5"> <div className="flex justify-between items-center text-xs">
<div className="flex justify-between items-center text-xs"> <span className="text-muted-foreground">
<span className="text-muted-foreground"> {pagesUsed.toLocaleString()} / {pagesLimit.toLocaleString()} pages
{pagesUsed.toLocaleString()} / {pagesLimit.toLocaleString()} pages </span>
</span> <span className="font-medium">{usagePercentage.toFixed(0)}%</span>
<span className="font-medium">{usagePercentage.toFixed(0)}%</span>
</div>
<Progress value={usagePercentage} className="h-1.5" />
<Link
href={`/dashboard/${searchSpaceId}/more-pages`}
className="group flex w-[calc(100%+0.75rem)] items-center justify-between rounded-md px-1.5 py-1 -mx-1.5 transition-colors hover:bg-accent"
>
<span className="flex items-center gap-1.5 text-xs text-muted-foreground group-hover:text-accent-foreground">
<Zap className="h-3 w-3 shrink-0" />
Get Free Pages
</span>
<Badge className="h-4 rounded px-1 text-[10px] font-semibold leading-none bg-emerald-600 text-white border-transparent hover:bg-emerald-600">
FREE
</Badge>
</Link>
{pageBuyingEnabled && (
<Link
href={`/dashboard/${searchSpaceId}/buy-pages`}
className="group flex w-[calc(100%+0.75rem)] items-center justify-between rounded-md px-1.5 py-1 -mx-1.5 transition-colors hover:bg-accent"
>
<span className="flex items-center gap-1.5 text-xs text-muted-foreground group-hover:text-accent-foreground">
<CreditCard className="h-3 w-3 shrink-0" />
Buy Pages
</span>
<span className="text-[10px] font-medium text-muted-foreground">$1/1k</span>
</Link>
)}
</div> </div>
<Progress value={usagePercentage} className="h-1.5" />
</div> </div>
); );
} }

View file

@ -0,0 +1,42 @@
"use client";
import { useQuery } from "@tanstack/react-query";
import { Progress } from "@/components/ui/progress";
import { useIsAnonymous } from "@/contexts/anonymous-mode";
import { stripeApiService } from "@/lib/apis/stripe-api.service";
export function PremiumTokenUsageDisplay() {
const isAnonymous = useIsAnonymous();
const { data: tokenStatus } = useQuery({
queryKey: ["token-status"],
queryFn: () => stripeApiService.getTokenStatus(),
staleTime: 60_000,
enabled: !isAnonymous,
});
if (!tokenStatus) return null;
const usagePercentage = Math.min(
(tokenStatus.premium_tokens_used / Math.max(tokenStatus.premium_tokens_limit, 1)) * 100,
100
);
const formatTokens = (n: number) => {
if (n >= 1_000_000) return `${(n / 1_000_000).toFixed(1)}M`;
if (n >= 1_000) return `${(n / 1_000).toFixed(0)}K`;
return n.toLocaleString();
};
return (
<div className="space-y-1.5">
<div className="flex justify-between items-center text-xs">
<span className="text-muted-foreground">
{formatTokens(tokenStatus.premium_tokens_used)} /{" "}
{formatTokens(tokenStatus.premium_tokens_limit)} tokens
</span>
<span className="font-medium">{usagePercentage.toFixed(0)}%</span>
</div>
<Progress value={usagePercentage} className="h-1.5 [&>div]:bg-purple-500" />
</div>
);
}

View file

@ -1,15 +1,21 @@
"use client"; "use client";
import { PenSquare } from "lucide-react"; import { CreditCard, PenSquare, Zap } from "lucide-react";
import Link from "next/link";
import { useParams } from "next/navigation";
import { useTranslations } from "next-intl"; import { useTranslations } from "next-intl";
import { useState } from "react"; import { useState } from "react";
import { Badge } from "@/components/ui/badge";
import { Progress } from "@/components/ui/progress";
import { Skeleton } from "@/components/ui/skeleton"; import { Skeleton } from "@/components/ui/skeleton";
import { useIsAnonymous } from "@/contexts/anonymous-mode";
import { cn } from "@/lib/utils"; import { cn } from "@/lib/utils";
import { SIDEBAR_MIN_WIDTH } from "../../hooks/useSidebarResize"; import { SIDEBAR_MIN_WIDTH } from "../../hooks/useSidebarResize";
import type { ChatItem, NavItem, PageUsage, SearchSpace, User } from "../../types/layout.types"; import type { ChatItem, NavItem, PageUsage, SearchSpace, User } from "../../types/layout.types";
import { ChatListItem } from "./ChatListItem"; import { ChatListItem } from "./ChatListItem";
import { NavSection } from "./NavSection"; import { NavSection } from "./NavSection";
import { PageUsageDisplay } from "./PageUsageDisplay"; import { PageUsageDisplay } from "./PageUsageDisplay";
import { PremiumTokenUsageDisplay } from "./PremiumTokenUsageDisplay";
import { SidebarButton } from "./SidebarButton"; import { SidebarButton } from "./SidebarButton";
import { SidebarCollapseButton } from "./SidebarCollapseButton"; import { SidebarCollapseButton } from "./SidebarCollapseButton";
import { SidebarHeader } from "./SidebarHeader"; import { SidebarHeader } from "./SidebarHeader";
@ -267,9 +273,7 @@ export function Sidebar({
<NavSection items={navItems} onItemClick={onNavItemClick} isCollapsed={isCollapsed} /> <NavSection items={navItems} onItemClick={onNavItemClick} isCollapsed={isCollapsed} />
)} )}
{pageUsage && !isCollapsed && ( <SidebarUsageFooter pageUsage={pageUsage} isCollapsed={isCollapsed} />
<PageUsageDisplay pagesUsed={pageUsage.pagesUsed} pagesLimit={pageUsage.pagesLimit} />
)}
<SidebarUserProfile <SidebarUserProfile
user={user} user={user}
@ -283,3 +287,86 @@ export function Sidebar({
</div> </div>
); );
} }
function SidebarUsageFooter({
pageUsage,
isCollapsed,
}: {
pageUsage?: PageUsage;
isCollapsed: boolean;
}) {
const params = useParams();
const searchSpaceId = params?.search_space_id ?? "";
const isAnonymous = useIsAnonymous();
if (isCollapsed) return null;
if (isAnonymous) {
return (
<div className="px-3 py-3 border-t space-y-3">
{pageUsage && (
<div className="space-y-1.5">
<div className="flex justify-between items-center text-xs">
<span className="text-muted-foreground">
{pageUsage.pagesUsed.toLocaleString()} / {pageUsage.pagesLimit.toLocaleString()}{" "}
tokens
</span>
<span className="font-medium">
{Math.min(
(pageUsage.pagesUsed / Math.max(pageUsage.pagesLimit, 1)) * 100,
100
).toFixed(0)}
%
</span>
</div>
<Progress
value={Math.min((pageUsage.pagesUsed / Math.max(pageUsage.pagesLimit, 1)) * 100, 100)}
className="h-1.5"
/>
</div>
)}
<Link
href="/register"
className="flex items-center justify-center gap-1.5 rounded-md bg-primary px-3 py-1.5 text-xs font-medium text-primary-foreground transition-opacity hover:opacity-90"
>
Create Free Account
</Link>
</div>
);
}
return (
<div className="px-3 py-3 border-t space-y-3">
<PremiumTokenUsageDisplay />
{pageUsage && (
<PageUsageDisplay pagesUsed={pageUsage.pagesUsed} pagesLimit={pageUsage.pagesLimit} />
)}
<div className="space-y-0.5">
<Link
href={`/dashboard/${searchSpaceId}/more-pages`}
className="group flex w-full items-center justify-between rounded-md px-1.5 py-1 transition-colors hover:bg-accent"
>
<span className="flex items-center gap-1.5 text-xs text-muted-foreground group-hover:text-accent-foreground">
<Zap className="h-3 w-3 shrink-0" />
Get Free Pages
</span>
<Badge className="h-4 rounded px-1 text-[10px] font-semibold leading-none bg-emerald-600 text-white border-transparent hover:bg-emerald-600">
FREE
</Badge>
</Link>
<Link
href={`/dashboard/${searchSpaceId}/buy-more`}
className="group flex w-full items-center justify-between rounded-md px-1.5 py-1 transition-colors hover:bg-accent"
>
<span className="flex items-center gap-1.5 text-xs text-muted-foreground group-hover:text-accent-foreground">
<CreditCard className="h-3 w-3 shrink-0" />
Buy More
</span>
<span className="text-[10px] font-medium text-muted-foreground">
$1/1k &middot; $1/1M
</span>
</Link>
</div>
</div>
);
}

View file

@ -859,6 +859,14 @@ export function ModelSelector({
Recommended Recommended
</Badge> </Badge>
)} )}
{"is_premium" in config && (config as Record<string, unknown>).is_premium && (
<Badge
variant="secondary"
className="text-[9px] px-1 py-0 h-3.5 bg-purple-100 text-purple-700 dark:bg-purple-900/50 dark:text-purple-300 border-0"
>
Premium
</Badge>
)}
</div> </div>
<div className="flex items-center gap-1.5 mt-0.5"> <div className="flex items-center gap-1.5 mt-0.5">
<span className="text-xs text-muted-foreground truncate"> <span className="text-xs text-muted-foreground truncate">

View file

@ -53,7 +53,7 @@ export function ZeroProvider({ children }: { children: React.ReactNode }) {
const context = useMemo( const context = useMemo(
() => (hasUser ? { userId: String(userId) } : undefined), () => (hasUser ? { userId: String(userId) } : undefined),
[hasUser, userId], [hasUser, userId]
); );
const opts = useMemo( const opts = useMemo(
@ -65,7 +65,7 @@ export function ZeroProvider({ children }: { children: React.ReactNode }) {
cacheURL, cacheURL,
auth, auth,
}), }),
[userID, context, auth], [userID, context, auth]
); );
return ( return (

View file

@ -0,0 +1,156 @@
"use client";
import { useMutation, useQuery } from "@tanstack/react-query";
import { Minus, Plus } from "lucide-react";
import { useParams } from "next/navigation";
import { useState } from "react";
import { toast } from "sonner";
import { Button } from "@/components/ui/button";
import { Progress } from "@/components/ui/progress";
import { Spinner } from "@/components/ui/spinner";
import { stripeApiService } from "@/lib/apis/stripe-api.service";
import { AppError } from "@/lib/error";
import { cn } from "@/lib/utils";
const TOKEN_PACK_SIZE = 1_000_000;
const PRICE_PER_PACK_USD = 1;
const PRESET_MULTIPLIERS = [1, 2, 5, 10, 25, 50] as const;
export function BuyTokensContent() {
const params = useParams();
const searchSpaceId = Number(params?.search_space_id);
const [quantity, setQuantity] = useState(1);
const { data: tokenStatus } = useQuery({
queryKey: ["token-status"],
queryFn: () => stripeApiService.getTokenStatus(),
});
const purchaseMutation = useMutation({
mutationFn: stripeApiService.createTokenCheckoutSession,
onSuccess: (response) => {
window.location.assign(response.checkout_url);
},
onError: (error) => {
if (error instanceof AppError && error.message) {
toast.error(error.message);
return;
}
toast.error("Failed to start checkout. Please try again.");
},
});
const totalTokens = quantity * TOKEN_PACK_SIZE;
const totalPrice = quantity * PRICE_PER_PACK_USD;
if (tokenStatus && !tokenStatus.token_buying_enabled) {
return (
<div className="w-full space-y-3 text-center">
<h2 className="text-xl font-bold tracking-tight">Buy Premium Tokens</h2>
<p className="text-sm text-muted-foreground">
Token purchases are temporarily unavailable.
</p>
</div>
);
}
const usagePercentage = tokenStatus
? Math.min(
(tokenStatus.premium_tokens_used / Math.max(tokenStatus.premium_tokens_limit, 1)) * 100,
100
)
: 0;
return (
<div className="w-full space-y-5">
<div className="text-center">
<h2 className="text-xl font-bold tracking-tight">Buy Premium Tokens</h2>
<p className="mt-1 text-sm text-muted-foreground">$1 per 1M tokens, pay as you go</p>
</div>
{tokenStatus && (
<div className="rounded-lg border bg-muted/20 p-3 space-y-1.5">
<div className="flex justify-between items-center text-xs">
<span className="text-muted-foreground">
{tokenStatus.premium_tokens_used.toLocaleString()} /{" "}
{tokenStatus.premium_tokens_limit.toLocaleString()} premium tokens
</span>
<span className="font-medium">{usagePercentage.toFixed(0)}%</span>
</div>
<Progress value={usagePercentage} className="h-1.5" />
<p className="text-[11px] text-muted-foreground">
{tokenStatus.premium_tokens_remaining.toLocaleString()} tokens remaining
</p>
</div>
)}
<div className="space-y-3">
<div className="flex items-center justify-center gap-3">
<button
type="button"
onClick={() => setQuantity((q) => Math.max(1, q - 1))}
disabled={quantity <= 1 || purchaseMutation.isPending}
className="flex h-8 w-8 items-center justify-center rounded-md border transition-colors hover:bg-muted disabled:opacity-40"
>
<Minus className="h-3.5 w-3.5" />
</button>
<span className="min-w-32 text-center text-lg font-semibold tabular-nums">
{(totalTokens / 1_000_000).toFixed(0)}M tokens
</span>
<button
type="button"
onClick={() => setQuantity((q) => Math.min(100, q + 1))}
disabled={quantity >= 100 || purchaseMutation.isPending}
className="flex h-8 w-8 items-center justify-center rounded-md border transition-colors hover:bg-muted disabled:opacity-40"
>
<Plus className="h-3.5 w-3.5" />
</button>
</div>
<div className="flex flex-wrap justify-center gap-1.5">
{PRESET_MULTIPLIERS.map((m) => (
<button
key={m}
type="button"
onClick={() => setQuantity(m)}
disabled={purchaseMutation.isPending}
className={cn(
"rounded-md border px-2.5 py-1 text-xs font-medium tabular-nums transition-colors disabled:opacity-60",
quantity === m
? "border-purple-500 bg-purple-500/10 text-purple-600 dark:text-purple-400"
: "border-border hover:border-purple-500/40 hover:bg-muted/40"
)}
>
{m}M
</button>
))}
</div>
<div className="flex items-center justify-between rounded-lg border bg-muted/30 px-3 py-2">
<span className="text-sm font-medium tabular-nums">
{(totalTokens / 1_000_000).toFixed(0)}M premium tokens
</span>
<span className="text-sm font-semibold tabular-nums">${totalPrice}</span>
</div>
<Button
className="w-full bg-purple-600 text-white hover:bg-purple-700"
disabled={purchaseMutation.isPending}
onClick={() => purchaseMutation.mutate({ quantity, search_space_id: searchSpaceId })}
>
{purchaseMutation.isPending ? (
<>
<Spinner size="xs" />
Redirecting
</>
) : (
<>
Buy {(totalTokens / 1_000_000).toFixed(0)}M Tokens for ${totalPrice}
</>
)}
</Button>
<p className="text-center text-[11px] text-muted-foreground">Secure checkout via Stripe</p>
</div>
</div>
);
}

View file

@ -0,0 +1,74 @@
"use client";
import { createContext, type ReactNode, useContext, useEffect, useMemo, useState } from "react";
import { anonymousChatApiService } from "@/lib/apis/anonymous-chat-api.service";
export interface AnonymousModeContextValue {
isAnonymous: true;
modelSlug: string;
setModelSlug: (slug: string) => void;
uploadedDoc: { filename: string; sizeBytes: number } | null;
setUploadedDoc: (doc: { filename: string; sizeBytes: number } | null) => void;
resetKey: number;
resetChat: () => void;
}
interface AuthenticatedContextValue {
isAnonymous: false;
}
type ContextValue = AnonymousModeContextValue | AuthenticatedContextValue;
const DEFAULT_VALUE: AuthenticatedContextValue = { isAnonymous: false };
const AnonymousModeContext = createContext<ContextValue>(DEFAULT_VALUE);
export function AnonymousModeProvider({
initialModelSlug,
children,
}: {
initialModelSlug: string;
children: ReactNode;
}) {
const [modelSlug, setModelSlug] = useState(initialModelSlug);
const [uploadedDoc, setUploadedDoc] = useState<{ filename: string; sizeBytes: number } | null>(
null
);
const [resetKey, setResetKey] = useState(0);
const resetChat = () => setResetKey((k) => k + 1);
useEffect(() => {
anonymousChatApiService
.getDocument()
.then((doc) => {
if (doc) {
setUploadedDoc({ filename: doc.filename, sizeBytes: doc.size_bytes });
}
})
.catch(() => {});
}, []);
const value = useMemo<AnonymousModeContextValue>(
() => ({
isAnonymous: true,
modelSlug,
setModelSlug,
uploadedDoc,
setUploadedDoc,
resetKey,
resetChat,
}),
[modelSlug, uploadedDoc, resetKey]
);
return <AnonymousModeContext.Provider value={value}>{children}</AnonymousModeContext.Provider>;
}
export function useAnonymousMode(): ContextValue {
return useContext(AnonymousModeContext);
}
export function useIsAnonymous(): boolean {
return useContext(AnonymousModeContext).isAnonymous;
}

View file

@ -0,0 +1,84 @@
"use client";
import Link from "next/link";
import { createContext, type ReactNode, useCallback, useContext, useState } from "react";
import { Button } from "@/components/ui/button";
import {
Dialog,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
import { useIsAnonymous } from "./anonymous-mode";
interface LoginGateContextValue {
gate: (feature: string) => void;
}
const LoginGateContext = createContext<LoginGateContextValue>({
gate: () => {},
});
export function LoginGateProvider({ children }: { children: ReactNode }) {
const isAnonymous = useIsAnonymous();
const [feature, setFeature] = useState<string | null>(null);
const gate = useCallback(
(feat: string) => {
if (isAnonymous) {
setFeature(feat);
}
},
[isAnonymous]
);
const close = () => setFeature(null);
return (
<LoginGateContext.Provider value={{ gate }}>
{children}
<Dialog open={feature !== null} onOpenChange={(open) => !open && close()}>
<DialogContent className="sm:max-w-md">
<DialogHeader>
<DialogTitle>Create a free account to {feature}</DialogTitle>
<DialogDescription>
Get 5 million tokens, save chat history, upload documents, use all AI tools, and
connect 30+ integrations.
</DialogDescription>
</DialogHeader>
<DialogFooter className="flex flex-col gap-2 sm:flex-row">
<Button asChild>
<Link href="/register">Create Free Account</Link>
</Button>
<Button variant="outline" asChild>
<Link href="/login">Log In</Link>
</Button>
</DialogFooter>
</DialogContent>
</Dialog>
</LoginGateContext.Provider>
);
}
export function useLoginGate(): LoginGateContextValue {
return useContext(LoginGateContext);
}
/**
* Returns a click handler that triggers the login gate when anonymous,
* or calls the original handler when authenticated.
*/
export function useGatedHandler(handler: (() => void) | undefined, feature: string): () => void {
const { gate } = useLoginGate();
const isAnonymous = useIsAnonymous();
return useCallback(() => {
if (isAnonymous) {
gate(feature);
} else {
handler?.();
}
}, [isAnonymous, gate, feature, handler]);
}

View file

@ -0,0 +1,49 @@
import { z } from "zod";
export const anonModel = z.object({
id: z.number(),
name: z.string(),
description: z.string().nullable().optional(),
provider: z.string(),
model_name: z.string(),
billing_tier: z.string().default("free"),
is_premium: z.boolean().default(false),
seo_slug: z.string().nullable().optional(),
seo_enabled: z.boolean().default(false),
seo_title: z.string().nullable().optional(),
seo_description: z.string().nullable().optional(),
quota_reserve_tokens: z.number().nullable().optional(),
});
export const getAnonModelsResponse = z.array(anonModel);
export const getAnonModelResponse = anonModel;
export const anonQuotaResponse = z.object({
used: z.number(),
limit: z.number(),
remaining: z.number(),
status: z.string(),
warning_threshold: z.number(),
captcha_required: z.boolean().default(false),
});
export const anonChatRequest = z.object({
model_slug: z.string().max(100),
messages: z
.array(
z.object({
role: z.enum(["system", "user", "assistant"]),
content: z.string(),
})
)
.min(1),
disabled_tools: z.array(z.string()).optional(),
turnstile_token: z.string().optional(),
});
export type AnonModel = z.infer<typeof anonModel>;
export type GetAnonModelsResponse = z.infer<typeof getAnonModelsResponse>;
export type GetAnonModelResponse = z.infer<typeof getAnonModelResponse>;
export type AnonQuotaResponse = z.infer<typeof anonQuotaResponse>;
export type AnonChatRequest = z.infer<typeof anonChatRequest>;

View file

@ -162,6 +162,16 @@ export const globalNewLLMConfig = z.object({
is_global: z.literal(true), is_global: z.literal(true),
is_auto_mode: z.boolean().optional().default(false), // True only for Auto mode (ID 0) is_auto_mode: z.boolean().optional().default(false), // True only for Auto mode (ID 0)
// Token quota and billing policy
billing_tier: z.string().default("free"),
is_premium: z.boolean().default(false),
anonymous_enabled: z.boolean().default(false),
seo_enabled: z.boolean().default(false),
seo_slug: z.string().nullable().optional(),
seo_title: z.string().nullable().optional(),
seo_description: z.string().nullable().optional(),
quota_reserve_tokens: z.number().nullable().optional(),
}); });
export const getGlobalNewLLMConfigsResponse = z.array(globalNewLLMConfig); export const getGlobalNewLLMConfigsResponse = z.array(globalNewLLMConfig);

View file

@ -32,9 +32,48 @@ export const getPagePurchasesResponse = z.object({
purchases: z.array(pagePurchase), purchases: z.array(pagePurchase),
}); });
// Premium token purchases
export const createTokenCheckoutSessionRequest = z.object({
quantity: z.number().int().min(1).max(100),
search_space_id: z.number().int().min(1),
});
export const createTokenCheckoutSessionResponse = z.object({
checkout_url: z.string(),
});
export const tokenStripeStatusResponse = z.object({
token_buying_enabled: z.boolean(),
premium_tokens_used: z.number().default(0),
premium_tokens_limit: z.number().default(0),
premium_tokens_remaining: z.number().default(0),
});
export const tokenPurchase = z.object({
id: z.uuid(),
stripe_checkout_session_id: z.string(),
stripe_payment_intent_id: z.string().nullable(),
quantity: z.number(),
tokens_granted: z.number(),
amount_total: z.number().nullable(),
currency: z.string().nullable(),
status: z.string(),
completed_at: z.string().nullable(),
created_at: z.string(),
});
export const getTokenPurchasesResponse = z.object({
purchases: z.array(tokenPurchase),
});
export type PagePurchaseStatus = z.infer<typeof pagePurchaseStatusEnum>; export type PagePurchaseStatus = z.infer<typeof pagePurchaseStatusEnum>;
export type CreateCheckoutSessionRequest = z.infer<typeof createCheckoutSessionRequest>; export type CreateCheckoutSessionRequest = z.infer<typeof createCheckoutSessionRequest>;
export type CreateCheckoutSessionResponse = z.infer<typeof createCheckoutSessionResponse>; export type CreateCheckoutSessionResponse = z.infer<typeof createCheckoutSessionResponse>;
export type StripeStatusResponse = z.infer<typeof stripeStatusResponse>; export type StripeStatusResponse = z.infer<typeof stripeStatusResponse>;
export type PagePurchase = z.infer<typeof pagePurchase>; export type PagePurchase = z.infer<typeof pagePurchase>;
export type GetPagePurchasesResponse = z.infer<typeof getPagePurchasesResponse>; export type GetPagePurchasesResponse = z.infer<typeof getPagePurchasesResponse>;
export type CreateTokenCheckoutSessionRequest = z.infer<typeof createTokenCheckoutSessionRequest>;
export type CreateTokenCheckoutSessionResponse = z.infer<typeof createTokenCheckoutSessionResponse>;
export type TokenStripeStatusResponse = z.infer<typeof tokenStripeStatusResponse>;
export type TokenPurchase = z.infer<typeof tokenPurchase>;
export type GetTokenPurchasesResponse = z.infer<typeof getTokenPurchasesResponse>;

View file

@ -0,0 +1,97 @@
import {
type AnonChatRequest,
type AnonModel,
type AnonQuotaResponse,
anonChatRequest,
anonQuotaResponse,
getAnonModelResponse,
getAnonModelsResponse,
} from "@/contracts/types/anonymous-chat.types";
import { BACKEND_URL } from "../env-config";
import { ValidationError } from "../error";
const BASE = "/api/v1/public/anon-chat";
class AnonymousChatApiService {
private baseUrl: string;
constructor(baseUrl: string) {
this.baseUrl = baseUrl;
}
private fullUrl(path: string): string {
return `${this.baseUrl}${BASE}${path}`;
}
getModels = async (): Promise<AnonModel[]> => {
const res = await fetch(this.fullUrl("/models"), { credentials: "include" });
if (!res.ok) throw new Error(`Failed to fetch models: ${res.status}`);
const data = await res.json();
const parsed = getAnonModelsResponse.safeParse(data);
if (!parsed.success) console.error("Invalid anon models response:", parsed.error);
return data;
};
getModel = async (slug: string): Promise<AnonModel> => {
const res = await fetch(this.fullUrl(`/models/${encodeURIComponent(slug)}`), {
credentials: "include",
});
if (!res.ok) {
if (res.status === 404) throw new Error("Model not found");
throw new Error(`Failed to fetch model: ${res.status}`);
}
const data = await res.json();
const parsed = getAnonModelResponse.safeParse(data);
if (!parsed.success) console.error("Invalid anon model response:", parsed.error);
return data;
};
getQuota = async (): Promise<AnonQuotaResponse> => {
const res = await fetch(this.fullUrl("/quota"), { credentials: "include" });
if (!res.ok) throw new Error(`Failed to fetch quota: ${res.status}`);
const data = await res.json();
const parsed = anonQuotaResponse.safeParse(data);
if (!parsed.success) console.error("Invalid anon quota response:", parsed.error);
return data;
};
streamChat = async (request: AnonChatRequest): Promise<Response> => {
const validated = anonChatRequest.safeParse(request);
if (!validated.success) {
throw new ValidationError(
`Invalid request: ${validated.error.issues.map((i) => i.message).join(", ")}`
);
}
return fetch(this.fullUrl("/stream"), {
method: "POST",
headers: { "Content-Type": "application/json" },
credentials: "include",
body: JSON.stringify(validated.data),
});
};
uploadDocument = async (file: File): Promise<{ filename: string; size_bytes: number }> => {
const formData = new FormData();
formData.append("file", file);
const res = await fetch(this.fullUrl("/upload"), {
method: "POST",
credentials: "include",
body: formData,
});
if (!res.ok) {
const body = await res.json().catch(() => ({}));
throw new Error(body.detail || `Upload failed: ${res.status}`);
}
return res.json();
};
getDocument = async (): Promise<{ filename: string; size_bytes: number } | null> => {
const res = await fetch(this.fullUrl("/document"), { credentials: "include" });
if (res.status === 404) return null;
if (!res.ok) throw new Error(`Failed to fetch document: ${res.status}`);
return res.json();
};
}
export const anonymousChatApiService = new AnonymousChatApiService(BACKEND_URL);

View file

@ -1,11 +1,18 @@
import { import {
type CreateCheckoutSessionRequest, type CreateCheckoutSessionRequest,
type CreateCheckoutSessionResponse, type CreateCheckoutSessionResponse,
type CreateTokenCheckoutSessionRequest,
type CreateTokenCheckoutSessionResponse,
createCheckoutSessionResponse, createCheckoutSessionResponse,
createTokenCheckoutSessionResponse,
type GetPagePurchasesResponse, type GetPagePurchasesResponse,
type GetTokenPurchasesResponse,
getPagePurchasesResponse, getPagePurchasesResponse,
getTokenPurchasesResponse,
type StripeStatusResponse, type StripeStatusResponse,
stripeStatusResponse, stripeStatusResponse,
type TokenStripeStatusResponse,
tokenStripeStatusResponse,
} from "@/contracts/types/stripe.types"; } from "@/contracts/types/stripe.types";
import { baseApiService } from "./base-api.service"; import { baseApiService } from "./base-api.service";
@ -29,6 +36,24 @@ class StripeApiService {
getStatus = async (): Promise<StripeStatusResponse> => { getStatus = async (): Promise<StripeStatusResponse> => {
return baseApiService.get("/api/v1/stripe/status", stripeStatusResponse); return baseApiService.get("/api/v1/stripe/status", stripeStatusResponse);
}; };
createTokenCheckoutSession = async (
request: CreateTokenCheckoutSessionRequest
): Promise<CreateTokenCheckoutSessionResponse> => {
return baseApiService.post(
"/api/v1/stripe/create-token-checkout-session",
createTokenCheckoutSessionResponse,
{ body: request }
);
};
getTokenStatus = async (): Promise<TokenStripeStatusResponse> => {
return baseApiService.get("/api/v1/stripe/token-status", tokenStripeStatusResponse);
};
getTokenPurchases = async (): Promise<GetTokenPurchasesResponse> => {
return baseApiService.get("/api/v1/stripe/token-purchases", getTokenPurchasesResponse);
};
} }
export const stripeApiService = new StripeApiService(); export const stripeApiService = new StripeApiService();

View file

@ -18,6 +18,7 @@ const PUBLIC_ROUTE_PREFIXES = [
"/desktop/login", "/desktop/login",
"/docs", "/docs",
"/public", "/public",
"/free",
"/invite", "/invite",
"/contact", "/contact",
"/pricing", "/pricing",

View file

@ -27,6 +27,7 @@
"@assistant-ui/react-markdown": "^0.12.6", "@assistant-ui/react-markdown": "^0.12.6",
"@babel/standalone": "^7.29.2", "@babel/standalone": "^7.29.2",
"@hookform/resolvers": "^5.2.2", "@hookform/resolvers": "^5.2.2",
"@marsidev/react-turnstile": "^1.5.0",
"@number-flow/react": "^0.5.10", "@number-flow/react": "^0.5.10",
"@platejs/autoformat": "^52.0.11", "@platejs/autoformat": "^52.0.11",
"@platejs/basic-nodes": "^52.0.11", "@platejs/basic-nodes": "^52.0.11",

View file

@ -26,6 +26,9 @@ importers:
'@hookform/resolvers': '@hookform/resolvers':
specifier: ^5.2.2 specifier: ^5.2.2
version: 5.2.2(react-hook-form@7.71.2(react@19.2.4)) version: 5.2.2(react-hook-form@7.71.2(react@19.2.4))
'@marsidev/react-turnstile':
specifier: ^1.5.0
version: 1.5.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
'@number-flow/react': '@number-flow/react':
specifier: ^0.5.10 specifier: ^0.5.10
version: 0.5.14(react-dom@19.2.4(react@19.2.4))(react@19.2.4) version: 0.5.14(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
@ -1954,6 +1957,12 @@ packages:
'@juggle/resize-observer@3.4.0': '@juggle/resize-observer@3.4.0':
resolution: {integrity: sha512-dfLbk+PwWvFzSxwk3n5ySL0hfBog779o8h68wK/7/APo/7cgyWp5jcXockbxdk5kFRkbeXWm4Fbi9FrdN381sA==} resolution: {integrity: sha512-dfLbk+PwWvFzSxwk3n5ySL0hfBog779o8h68wK/7/APo/7cgyWp5jcXockbxdk5kFRkbeXWm4Fbi9FrdN381sA==}
'@marsidev/react-turnstile@1.5.0':
resolution: {integrity: sha512-Ph6mcj8u9WBDsBO7s9jKPsyRDz1sBPBJwrk+Ngx09vFInvKsQ6U6kW5amEcGq4dHOreB6DgFrOJk7/fy318YlQ==}
peerDependencies:
react: ^17.0.2 || ^18.0.0 || ^19.0
react-dom: ^17.0.2 || ^18.0.0 || ^19.0
'@mdx-js/mdx@3.1.1': '@mdx-js/mdx@3.1.1':
resolution: {integrity: sha512-f6ZO2ifpwAQIpzGWaBQT2TXxPv6z3RBzQKpVftEWN78Vl/YweF1uwussDx8ECAXVtr3Rs89fKyG9YlzUs9DyGQ==} resolution: {integrity: sha512-f6ZO2ifpwAQIpzGWaBQT2TXxPv6z3RBzQKpVftEWN78Vl/YweF1uwussDx8ECAXVtr3Rs89fKyG9YlzUs9DyGQ==}
@ -9936,6 +9945,11 @@ snapshots:
'@juggle/resize-observer@3.4.0': {} '@juggle/resize-observer@3.4.0': {}
'@marsidev/react-turnstile@1.5.0(react-dom@19.2.4(react@19.2.4))(react@19.2.4)':
dependencies:
react: 19.2.4
react-dom: 19.2.4(react@19.2.4)
'@mdx-js/mdx@3.1.1': '@mdx-js/mdx@3.1.1':
dependencies: dependencies:
'@types/estree': 1.0.8 '@types/estree': 1.0.8