From c007f0e056a5f501d9f8f39fe26d06dd1aa80afd Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Tue, 24 Feb 2026 16:36:11 -0800
Subject: [PATCH 1/9] feat: unut codesandbox integration
---
Dockerfile.allinone | 13 +-
docker-compose.yml | 42 +++
scripts/docker/entrypoint-allinone.sh | 12 +
scripts/docker/supervisor-allinone.conf | 17 +-
.../app/agents/new_chat/chat_deepagent.py | 15 +-
.../app/agents/new_chat/sandbox.py | 69 +++++
.../app/agents/new_chat/system_prompt.py | 74 ++++-
.../app/tasks/chat/stream_new_chat.py | 80 ++++++
surfsense_backend/pyproject.toml | 1 +
surfsense_backend/uv.lock | 98 ++++---
.../new-chat/[[...chat_id]]/page.tsx | 3 +
surfsense_web/components/tool-ui/index.ts | 7 +
.../components/tool-ui/sandbox-execute.tsx | 266 ++++++++++++++++++
13 files changed, 651 insertions(+), 46 deletions(-)
create mode 100644 surfsense_backend/app/agents/new_chat/sandbox.py
create mode 100644 surfsense_web/components/tool-ui/sandbox-execute.tsx
diff --git a/Dockerfile.allinone b/Dockerfile.allinone
index e96618adc..a51e31814 100644
--- a/Dockerfile.allinone
+++ b/Dockerfile.allinone
@@ -216,6 +216,10 @@ RUN pip install --no-cache-dir playwright \
&& playwright install chromium \
&& rm -rf /root/.cache/ms-playwright/ffmpeg*
+# Install Microsandbox (optional secure code execution for deep agent).
+# Requires --device /dev/kvm at runtime. Enable via MICROSANDBOX_ENABLED=TRUE.
+RUN curl -sSL https://get.microsandbox.dev | sh || true
+
# Copy backend source
COPY surfsense_backend/ ./
@@ -260,6 +264,11 @@ ENV NEXT_PUBLIC_FASTAPI_BACKEND_URL=http://localhost:8000
ENV NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE=LOCAL
ENV NEXT_PUBLIC_ETL_SERVICE=DOCLING
+# Microsandbox (optional - requires --device /dev/kvm and --privileged at runtime)
+ENV MICROSANDBOX_ENABLED=FALSE
+ENV MICROSANDBOX_SERVER_URL=http://localhost:5555
+# MICROSANDBOX_API_KEY is intentionally unset; set at runtime for production.
+
# Electric SQL configuration (ELECTRIC_DATABASE_URL is built dynamically by entrypoint from these values)
ENV ELECTRIC_DB_USER=electric
ENV ELECTRIC_DB_PASSWORD=electric_password
@@ -274,8 +283,8 @@ ENV NEXT_PUBLIC_ELECTRIC_AUTH_MODE=insecure
# Data volume
VOLUME ["/data"]
-# Expose ports (Frontend: 3000, Backend: 8000, Electric: 5133)
-EXPOSE 3000 8000 5133
+# Expose ports (Frontend: 3000, Backend: 8000, Electric: 5133, Microsandbox: 5555)
+EXPOSE 3000 8000 5133 5555
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=120s --retries=3 \
diff --git a/docker-compose.yml b/docker-compose.yml
index a94cea2e5..04231ff20 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -65,9 +65,14 @@ services:
- ELECTRIC_DB_PASSWORD=${ELECTRIC_DB_PASSWORD:-electric_password}
- AUTH_TYPE=${AUTH_TYPE:-LOCAL}
- NEXT_FRONTEND_URL=${NEXT_FRONTEND_URL:-http://localhost:3000}
+ # Microsandbox – uncomment when microsandbox service is enabled
+ # - MICROSANDBOX_ENABLED=TRUE
+ # - MICROSANDBOX_SERVER_URL=http://microsandbox:5555
+ # - MICROSANDBOX_API_KEY=${MICROSANDBOX_API_KEY:-}
depends_on:
- db
- redis
+ # - microsandbox
# Run these services separately in production
# celery_worker:
@@ -124,6 +129,42 @@ services:
# - redis
# - celery_worker
+ # ============================================================
+ # Microsandbox (optional - secure code execution for deep agent)
+ # ============================================================
+ # Requires a Linux host with KVM support (/dev/kvm).
+ # To enable:
+ # 1. Uncomment this service
+ # 2. Set MICROSANDBOX_ENABLED=TRUE in surfsense_backend/.env
+ # 3. Run with: docker compose up -d
+ # The first sandbox creation will pull the OCI image (e.g. microsandbox/python),
+ # so the initial run takes a bit longer.
+ #
+ # microsandbox:
+ # image: ubuntu:22.04
+ # ports:
+ # - "${MICROSANDBOX_PORT:-5555}:5555"
+ # volumes:
+ # - microsandbox_data:/root/.microsandbox
+ # privileged: true
+ # devices:
+ # - /dev/kvm:/dev/kvm
+ # entrypoint: ["/bin/bash", "-c"]
+ # command:
+ # - |
+ # set -e
+ # if ! command -v msb &>/dev/null; then
+ # apt-get update && apt-get install -y --no-install-recommends curl ca-certificates
+ # curl -sSL https://get.microsandbox.dev | sh
+ # fi
+ # exec msb server start --dev
+ # restart: unless-stopped
+ # healthcheck:
+ # test: ["CMD", "curl", "-f", "http://localhost:5555/health"]
+ # interval: 10s
+ # timeout: 5s
+ # retries: 5
+
electric:
image: electricsql/electric:latest
ports:
@@ -165,3 +206,4 @@ volumes:
pgadmin_data:
redis_data:
shared_temp:
+ # microsandbox_data:
diff --git a/scripts/docker/entrypoint-allinone.sh b/scripts/docker/entrypoint-allinone.sh
index 4f88b3382..9ca653979 100644
--- a/scripts/docker/entrypoint-allinone.sh
+++ b/scripts/docker/entrypoint-allinone.sh
@@ -42,6 +42,17 @@ if [ -z "$STT_SERVICE" ]; then
echo "✅ Using default STT_SERVICE: local/base"
fi
+# ================================================
+# Microsandbox (optional secure sandbox server)
+# ================================================
+if [ "${MICROSANDBOX_ENABLED:-FALSE}" = "TRUE" ]; then
+ export MICROSANDBOX_AUTOSTART=true
+ echo "✅ Microsandbox enabled (requires --device /dev/kvm)"
+else
+ export MICROSANDBOX_AUTOSTART=false
+ echo "ℹ️ Microsandbox disabled (set MICROSANDBOX_ENABLED=TRUE to enable)"
+fi
+
# ================================================
# Set Electric SQL configuration
# ================================================
@@ -232,6 +243,7 @@ echo " Auth Type: ${NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE}"
echo " ETL Service: ${NEXT_PUBLIC_ETL_SERVICE}"
echo " TTS Service: ${TTS_SERVICE}"
echo " STT Service: ${STT_SERVICE}"
+echo " Microsandbox: ${MICROSANDBOX_ENABLED:-FALSE}"
echo "==========================================="
echo ""
diff --git a/scripts/docker/supervisor-allinone.conf b/scripts/docker/supervisor-allinone.conf
index 1a21fcc04..b935737d9 100644
--- a/scripts/docker/supervisor-allinone.conf
+++ b/scripts/docker/supervisor-allinone.conf
@@ -114,8 +114,23 @@ stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
environment=NODE_ENV="production",PORT="3000",HOSTNAME="0.0.0.0"
+# Microsandbox (secure code execution sandbox server)
+# Autostart is controlled by the entrypoint based on MICROSANDBOX_ENABLED env var.
+# Requires --device /dev/kvm and --privileged when running the container.
+[program:microsandbox]
+command=msb server start --dev
+autostart=%(ENV_MICROSANDBOX_AUTOSTART)s
+autorestart=true
+priority=25
+startsecs=5
+startretries=3
+stdout_logfile=/dev/stdout
+stdout_logfile_maxbytes=0
+stderr_logfile=/dev/stderr
+stderr_logfile_maxbytes=0
+
# Process Groups
[group:surfsense]
-programs=postgresql,redis,electric,backend,celery-worker,celery-beat,frontend
+programs=postgresql,redis,electric,backend,celery-worker,celery-beat,frontend,microsandbox
priority=999
diff --git a/surfsense_backend/app/agents/new_chat/chat_deepagent.py b/surfsense_backend/app/agents/new_chat/chat_deepagent.py
index f4af16b78..dbb1d4b4a 100644
--- a/surfsense_backend/app/agents/new_chat/chat_deepagent.py
+++ b/surfsense_backend/app/agents/new_chat/chat_deepagent.py
@@ -10,6 +10,7 @@ from collections.abc import Sequence
from typing import Any
from deepagents import create_deep_agent
+from deepagents.backends.protocol import SandboxBackendProtocol
from langchain_core.language_models import BaseChatModel
from langchain_core.tools import BaseTool
from langgraph.types import Checkpointer
@@ -128,6 +129,7 @@ async def create_surfsense_deep_agent(
additional_tools: Sequence[BaseTool] | None = None,
firecrawl_api_key: str | None = None,
thread_visibility: ChatVisibility | None = None,
+ sandbox_backend: SandboxBackendProtocol | None = None,
):
"""
Create a SurfSense deep agent with configurable tools and prompts.
@@ -167,6 +169,9 @@ async def create_surfsense_deep_agent(
These are always added regardless of enabled/disabled settings.
firecrawl_api_key: Optional Firecrawl API key for premium web scraping.
Falls back to Chromium/Trafilatura if not provided.
+ sandbox_backend: Optional sandbox backend (e.g. MicrosandboxBackend) for
+ secure code execution. When provided, the agent gets an
+ isolated ``execute`` tool for running shell commands.
Returns:
CompiledStateGraph: The configured deep agent
@@ -277,19 +282,26 @@ async def create_surfsense_deep_agent(
)
# Build system prompt based on agent_config
+ _sandbox_enabled = sandbox_backend is not None
if agent_config is not None:
- # Use configurable prompt with settings from NewLLMConfig
system_prompt = build_configurable_system_prompt(
custom_system_instructions=agent_config.system_instructions,
use_default_system_instructions=agent_config.use_default_system_instructions,
citations_enabled=agent_config.citations_enabled,
thread_visibility=thread_visibility,
+ sandbox_enabled=_sandbox_enabled,
)
else:
system_prompt = build_surfsense_system_prompt(
thread_visibility=thread_visibility,
+ sandbox_enabled=_sandbox_enabled,
)
+ # Build optional kwargs for the deep agent
+ deep_agent_kwargs: dict[str, Any] = {}
+ if sandbox_backend is not None:
+ deep_agent_kwargs["backend"] = sandbox_backend
+
# Create the deep agent with system prompt and checkpointer
# Note: TodoListMiddleware (write_todos) is included by default in create_deep_agent
agent = create_deep_agent(
@@ -298,6 +310,7 @@ async def create_surfsense_deep_agent(
system_prompt=system_prompt,
context_schema=SurfSenseContextSchema,
checkpointer=checkpointer,
+ **deep_agent_kwargs,
)
return agent
diff --git a/surfsense_backend/app/agents/new_chat/sandbox.py b/surfsense_backend/app/agents/new_chat/sandbox.py
new file mode 100644
index 000000000..53e71329a
--- /dev/null
+++ b/surfsense_backend/app/agents/new_chat/sandbox.py
@@ -0,0 +1,69 @@
+"""
+Microsandbox provider for SurfSense deep agent.
+
+Manages the lifecycle of sandboxed code execution environments.
+Each conversation thread gets its own isolated sandbox instance.
+"""
+
+import logging
+import os
+
+from deepagents_microsandbox import MicrosandboxBackend, MicrosandboxProvider
+
+logger = logging.getLogger(__name__)
+
+_provider: MicrosandboxProvider | None = None
+
+
+def is_sandbox_enabled() -> bool:
+ return os.environ.get("MICROSANDBOX_ENABLED", "FALSE").upper() == "TRUE"
+
+
+def _get_provider() -> MicrosandboxProvider:
+ global _provider
+ if _provider is None:
+ server_url = os.environ.get(
+ "MICROSANDBOX_SERVER_URL", "http://127.0.0.1:5555"
+ )
+ api_key = os.environ.get("MICROSANDBOX_API_KEY")
+ _provider = MicrosandboxProvider(
+ server_url=server_url,
+ api_key=api_key,
+ namespace="surfsense",
+ )
+ return _provider
+
+
+async def get_or_create_sandbox(thread_id: int | str) -> MicrosandboxBackend:
+ """Get or create a sandbox for a conversation thread.
+
+ Uses the thread_id as the sandbox name so the same sandbox persists
+ across multiple messages within the same conversation.
+
+ Args:
+ thread_id: The conversation thread identifier.
+
+ Returns:
+ MicrosandboxBackend connected to the sandbox.
+ """
+ provider = _get_provider()
+ sandbox_name = f"thread-{thread_id}"
+ sandbox = await provider.aget_or_create(
+ sandbox_id=sandbox_name,
+ timeout=120,
+ memory=512,
+ cpus=1.0,
+ )
+ logger.info("Sandbox ready: %s", sandbox.id)
+ return sandbox
+
+
+async def delete_sandbox(thread_id: int | str) -> None:
+ """Delete the sandbox for a conversation thread."""
+ provider = _get_provider()
+ sandbox_name = f"thread-{thread_id}"
+ try:
+ await provider.adelete(sandbox_id=sandbox_name)
+ logger.info("Sandbox deleted: surfsense/%s", sandbox_name)
+ except Exception:
+ logger.warning("Failed to delete sandbox surfsense/%s", sandbox_name, exc_info=True)
diff --git a/surfsense_backend/app/agents/new_chat/system_prompt.py b/surfsense_backend/app/agents/new_chat/system_prompt.py
index c8dcf5154..a965a0bca 100644
--- a/surfsense_backend/app/agents/new_chat/system_prompt.py
+++ b/surfsense_backend/app/agents/new_chat/system_prompt.py
@@ -645,6 +645,63 @@ However, from your video learning, it's important to note that asyncio is not su
"""
+# Sandbox / code execution instructions — appended when sandbox backend is enabled.
+# Inspired by Claude's computer-use prompt, scoped to code execution & data analytics.
+SANDBOX_EXECUTION_INSTRUCTIONS = """
+
+You have access to a secure, isolated Linux sandbox environment for running code and shell commands.
+This gives you the `execute` tool alongside the standard filesystem tools (`ls`, `read_file`, `write_file`, `edit_file`, `glob`, `grep`).
+
+## When to Use Code Execution
+
+Use the sandbox when the task benefits from actually running code rather than just describing it:
+- **Data analysis**: Load CSVs/JSON, compute statistics, filter/aggregate data, pivot tables
+- **Visualization**: Generate charts and plots (matplotlib, plotly, seaborn)
+- **Calculations**: Math, financial modeling, unit conversions, simulations
+- **Code validation**: Run and test code snippets the user provides or asks about
+- **File processing**: Parse, transform, or convert data files
+- **Quick prototyping**: Demonstrate working code for the user's problem
+- **Package exploration**: Install and test libraries the user is evaluating
+
+## When NOT to Use Code Execution
+
+Do not use the sandbox for:
+- Answering factual questions from your own knowledge
+- Summarizing or explaining concepts
+- Simple formatting or text generation tasks
+- Tasks that don't require running code to answer
+
+## Package Management
+
+- Use `pip install ` to install Python packages as needed
+- Common data/analytics packages (pandas, numpy, matplotlib, scipy, scikit-learn) may need to be installed on first use
+- Always verify a package installed successfully before using it
+
+## Working Guidelines
+
+- **Working directory**: Use `/home` or `/tmp` for all work
+- **Iterative approach**: For complex tasks, break work into steps — write code, run it, check output, refine
+- **Error handling**: If code fails, read the error, fix the issue, and retry. Don't just report the error without attempting a fix.
+- **Show results**: When generating plots or outputs, present the key findings directly in your response. For plots, save to a file and describe the results.
+- **Be efficient**: Install packages once per session. Combine related commands when possible.
+- **Large outputs**: If command output is very large, use `head`, `tail`, or save to a file and read selectively.
+
+## Data Analytics Best Practices
+
+When the user asks you to analyze data:
+1. First, inspect the data structure (`head`, `shape`, `dtypes`, `describe()`)
+2. Clean and validate before computing (handle nulls, check types)
+3. Perform the analysis and present results clearly
+4. Offer follow-up insights or visualizations when appropriate
+
+## Security Notes
+
+- The sandbox is fully isolated — you cannot access the host system, the user's local files, or any secrets
+- Each conversation thread has its own sandbox environment
+- Installed packages and created files can persist for the thread while its sandbox is active; cleanup depends on sandbox lifecycle/deletion policy
+
+"""
+
# Anti-citation prompt - used when citations are disabled
# This explicitly tells the model NOT to include citations
SURFSENSE_NO_CITATION_INSTRUCTIONS = """
@@ -670,6 +727,7 @@ Your goal is to provide helpful, informative answers in a clean, readable format
def build_surfsense_system_prompt(
today: datetime | None = None,
thread_visibility: ChatVisibility | None = None,
+ sandbox_enabled: bool = False,
) -> str:
"""
Build the SurfSense system prompt with default settings.
@@ -678,10 +736,12 @@ def build_surfsense_system_prompt(
- Default system instructions
- Tools instructions (always included)
- Citation instructions enabled
+ - Sandbox execution instructions (when sandbox_enabled=True)
Args:
today: Optional datetime for today's date (defaults to current UTC date)
thread_visibility: Optional; when provided, used for conditional prompt (e.g. private vs shared memory wording). Defaults to private behavior when None.
+ sandbox_enabled: Whether the sandbox backend is active (adds code execution instructions).
Returns:
Complete system prompt string
@@ -691,7 +751,8 @@ def build_surfsense_system_prompt(
system_instructions = _get_system_instructions(visibility, today)
tools_instructions = _get_tools_instructions(visibility)
citation_instructions = SURFSENSE_CITATION_INSTRUCTIONS
- return system_instructions + tools_instructions + citation_instructions
+ sandbox_instructions = SANDBOX_EXECUTION_INSTRUCTIONS if sandbox_enabled else ""
+ return system_instructions + tools_instructions + citation_instructions + sandbox_instructions
def build_configurable_system_prompt(
@@ -700,14 +761,16 @@ def build_configurable_system_prompt(
citations_enabled: bool = True,
today: datetime | None = None,
thread_visibility: ChatVisibility | None = None,
+ sandbox_enabled: bool = False,
) -> str:
"""
Build a configurable SurfSense system prompt based on NewLLMConfig settings.
- The prompt is composed of three parts:
+ The prompt is composed of up to four parts:
1. System Instructions - either custom or default SURFSENSE_SYSTEM_INSTRUCTIONS
2. Tools Instructions - always included (SURFSENSE_TOOLS_INSTRUCTIONS)
3. Citation Instructions - either SURFSENSE_CITATION_INSTRUCTIONS or SURFSENSE_NO_CITATION_INSTRUCTIONS
+ 4. Sandbox Execution Instructions - when sandbox_enabled=True
Args:
custom_system_instructions: Custom system instructions to use. If empty/None and
@@ -719,6 +782,7 @@ def build_configurable_system_prompt(
anti-citation instructions (False).
today: Optional datetime for today's date (defaults to current UTC date)
thread_visibility: Optional; when provided, used for conditional prompt (e.g. private vs shared memory wording). Defaults to private behavior when None.
+ sandbox_enabled: Whether the sandbox backend is active (adds code execution instructions).
Returns:
Complete system prompt string
@@ -727,7 +791,6 @@ def build_configurable_system_prompt(
# Determine system instructions
if custom_system_instructions and custom_system_instructions.strip():
- # Use custom instructions, injecting the date placeholder if present
system_instructions = custom_system_instructions.format(
resolved_today=resolved_today
)
@@ -735,7 +798,6 @@ def build_configurable_system_prompt(
visibility = thread_visibility or ChatVisibility.PRIVATE
system_instructions = _get_system_instructions(visibility, today)
else:
- # No system instructions (edge case)
system_instructions = ""
# Tools instructions: conditional on thread_visibility (private vs shared memory wording)
@@ -748,7 +810,9 @@ def build_configurable_system_prompt(
else SURFSENSE_NO_CITATION_INSTRUCTIONS
)
- return system_instructions + tools_instructions + citation_instructions
+ sandbox_instructions = SANDBOX_EXECUTION_INSTRUCTIONS if sandbox_enabled else ""
+
+ return system_instructions + tools_instructions + citation_instructions + sandbox_instructions
def get_default_system_instructions() -> str:
diff --git a/surfsense_backend/app/tasks/chat/stream_new_chat.py b/surfsense_backend/app/tasks/chat/stream_new_chat.py
index 4ba12c171..ecf04ce08 100644
--- a/surfsense_backend/app/tasks/chat/stream_new_chat.py
+++ b/surfsense_backend/app/tasks/chat/stream_new_chat.py
@@ -10,6 +10,7 @@ Supports loading LLM configurations from:
"""
import json
+import re
from collections.abc import AsyncGenerator
from dataclasses import dataclass
from typing import Any
@@ -404,6 +405,21 @@ async def _stream_agent_events(
status="in_progress",
items=last_active_step_items,
)
+ elif tool_name == "execute":
+ cmd = (
+ tool_input.get("command", "")
+ if isinstance(tool_input, dict)
+ else str(tool_input)
+ )
+ display_cmd = cmd[:80] + ("…" if len(cmd) > 80 else "")
+ last_active_step_title = "Running command"
+ last_active_step_items = [f"$ {display_cmd}"]
+ yield streaming_service.format_thinking_step(
+ step_id=tool_step_id,
+ title="Running command",
+ status="in_progress",
+ items=last_active_step_items,
+ )
else:
last_active_step_title = f"Using {tool_name.replace('_', ' ')}"
last_active_step_items = []
@@ -620,6 +636,26 @@ async def _stream_agent_events(
status="completed",
items=completed_items,
)
+ elif tool_name == "execute":
+ raw_text = (
+ tool_output.get("result", "")
+ if isinstance(tool_output, dict)
+ else str(tool_output)
+ )
+ m = re.match(r"^Exit code:\s*(\d+)", raw_text)
+ exit_code_val = int(m.group(1)) if m else None
+ if exit_code_val is not None and exit_code_val == 0:
+ completed_items = [*last_active_step_items, "Completed successfully"]
+ elif exit_code_val is not None:
+ completed_items = [*last_active_step_items, f"Exit code: {exit_code_val}"]
+ else:
+ completed_items = [*last_active_step_items, "Finished"]
+ yield streaming_service.format_thinking_step(
+ step_id=original_step_id,
+ title="Running command",
+ status="completed",
+ items=completed_items,
+ )
elif tool_name == "ls":
if isinstance(tool_output, dict):
ls_output = tool_output.get("result", "")
@@ -811,6 +847,26 @@ async def _stream_agent_events(
if isinstance(tool_output, dict)
else {"result": tool_output},
)
+ elif tool_name == "execute":
+ raw_text = (
+ tool_output.get("result", "")
+ if isinstance(tool_output, dict)
+ else str(tool_output)
+ )
+ exit_code: int | None = None
+ output_text = raw_text
+ m = re.match(r"^Exit code:\s*(\d+)", raw_text)
+ if m:
+ exit_code = int(m.group(1))
+ om = re.search(r"\nOutput:\n([\s\S]*)", raw_text)
+ output_text = om.group(1) if om else ""
+ yield streaming_service.format_tool_output_available(
+ tool_call_id,
+ {
+ "exit_code": exit_code,
+ "output": output_text,
+ },
+ )
else:
yield streaming_service.format_tool_output_available(
tool_call_id,
@@ -975,6 +1031,17 @@ async def stream_new_chat(
# Get the PostgreSQL checkpointer for persistent conversation memory
checkpointer = await get_checkpointer()
+ # Optionally provision a sandboxed code execution environment
+ sandbox_backend = None
+ from app.agents.new_chat.sandbox import is_sandbox_enabled, get_or_create_sandbox
+ if is_sandbox_enabled():
+ try:
+ sandbox_backend = await get_or_create_sandbox(chat_id)
+ except Exception as sandbox_err:
+ logging.getLogger(__name__).warning(
+ "Sandbox creation failed, continuing without execute tool: %s", sandbox_err
+ )
+
visibility = thread_visibility or ChatVisibility.PRIVATE
agent = await create_surfsense_deep_agent(
llm=llm,
@@ -987,6 +1054,7 @@ async def stream_new_chat(
agent_config=agent_config,
firecrawl_api_key=firecrawl_api_key,
thread_visibility=visibility,
+ sandbox_backend=sandbox_backend,
)
# Build input with message history
@@ -1352,6 +1420,17 @@ async def stream_resume_chat(
firecrawl_api_key = webcrawler_connector.config.get("FIRECRAWL_API_KEY")
checkpointer = await get_checkpointer()
+
+ sandbox_backend = None
+ from app.agents.new_chat.sandbox import is_sandbox_enabled, get_or_create_sandbox
+ if is_sandbox_enabled():
+ try:
+ sandbox_backend = await get_or_create_sandbox(chat_id)
+ except Exception as sandbox_err:
+ logging.getLogger(__name__).warning(
+ "Sandbox creation failed, continuing without execute tool: %s", sandbox_err
+ )
+
visibility = thread_visibility or ChatVisibility.PRIVATE
agent = await create_surfsense_deep_agent(
@@ -1365,6 +1444,7 @@ async def stream_resume_chat(
agent_config=agent_config,
firecrawl_api_key=firecrawl_api_key,
thread_visibility=visibility,
+ sandbox_backend=sandbox_backend,
)
# Release the transaction before streaming (same rationale as stream_new_chat).
diff --git a/surfsense_backend/pyproject.toml b/surfsense_backend/pyproject.toml
index 7f52d4881..3df84141d 100644
--- a/surfsense_backend/pyproject.toml
+++ b/surfsense_backend/pyproject.toml
@@ -65,6 +65,7 @@ dependencies = [
"pypandoc_binary>=1.16.2",
"typst>=0.14.0",
"deepagents>=0.4.3",
+ "deepagents-microsandbox>=1.0.1",
]
[dependency-groups]
diff --git a/surfsense_backend/uv.lock b/surfsense_backend/uv.lock
index 8a6b7138a..50ed66617 100644
--- a/surfsense_backend/uv.lock
+++ b/surfsense_backend/uv.lock
@@ -67,7 +67,7 @@ wheels = [
[[package]]
name = "aiohttp"
-version = "3.12.13"
+version = "3.10.11"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiohappyeyeballs" },
@@ -75,45 +75,40 @@ dependencies = [
{ name = "attrs" },
{ name = "frozenlist" },
{ name = "multidict" },
- { name = "propcache" },
{ name = "yarl" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/42/6e/ab88e7cb2a4058bed2f7870276454f85a7c56cd6da79349eb314fc7bbcaa/aiohttp-3.12.13.tar.gz", hash = "sha256:47e2da578528264a12e4e3dd8dd72a7289e5f812758fe086473fab037a10fcce", size = 7819160 }
+sdist = { url = "https://files.pythonhosted.org/packages/25/a8/8e2ba36c6e3278d62e0c88aa42bb92ddbef092ac363b390dab4421da5cf5/aiohttp-3.10.11.tar.gz", hash = "sha256:9dc2b8f3dcab2e39e0fa309c8da50c3b55e6f34ab25f1a71d3288f24924d33a7", size = 7551886 }
wheels = [
- { url = "https://files.pythonhosted.org/packages/b4/6a/ce40e329788013cd190b1d62bbabb2b6a9673ecb6d836298635b939562ef/aiohttp-3.12.13-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0aa580cf80558557285b49452151b9c69f2fa3ad94c5c9e76e684719a8791b73", size = 700491 },
- { url = "https://files.pythonhosted.org/packages/28/d9/7150d5cf9163e05081f1c5c64a0cdf3c32d2f56e2ac95db2a28fe90eca69/aiohttp-3.12.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b103a7e414b57e6939cc4dece8e282cfb22043efd0c7298044f6594cf83ab347", size = 475104 },
- { url = "https://files.pythonhosted.org/packages/f8/91/d42ba4aed039ce6e449b3e2db694328756c152a79804e64e3da5bc19dffc/aiohttp-3.12.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f64e748e9e741d2eccff9597d09fb3cd962210e5b5716047cbb646dc8fe06f", size = 467948 },
- { url = "https://files.pythonhosted.org/packages/99/3b/06f0a632775946981d7c4e5a865cddb6e8dfdbaed2f56f9ade7bb4a1039b/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c955989bf4c696d2ededc6b0ccb85a73623ae6e112439398935362bacfaaf6", size = 1714742 },
- { url = "https://files.pythonhosted.org/packages/92/a6/2552eebad9ec5e3581a89256276009e6a974dc0793632796af144df8b740/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d640191016763fab76072c87d8854a19e8e65d7a6fcfcbf017926bdbbb30a7e5", size = 1697393 },
- { url = "https://files.pythonhosted.org/packages/d8/9f/bd08fdde114b3fec7a021381b537b21920cdd2aa29ad48c5dffd8ee314f1/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dc507481266b410dede95dd9f26c8d6f5a14315372cc48a6e43eac652237d9b", size = 1752486 },
- { url = "https://files.pythonhosted.org/packages/f7/e1/affdea8723aec5bd0959171b5490dccd9a91fcc505c8c26c9f1dca73474d/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8a94daa873465d518db073bd95d75f14302e0208a08e8c942b2f3f1c07288a75", size = 1798643 },
- { url = "https://files.pythonhosted.org/packages/f3/9d/666d856cc3af3a62ae86393baa3074cc1d591a47d89dc3bf16f6eb2c8d32/aiohttp-3.12.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f52420cde4ce0bb9425a375d95577fe082cb5721ecb61da3049b55189e4e6", size = 1718082 },
- { url = "https://files.pythonhosted.org/packages/f3/ce/3c185293843d17be063dada45efd2712bb6bf6370b37104b4eda908ffdbd/aiohttp-3.12.13-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f7df1f620ec40f1a7fbcb99ea17d7326ea6996715e78f71a1c9a021e31b96b8", size = 1633884 },
- { url = "https://files.pythonhosted.org/packages/3a/5b/f3413f4b238113be35dfd6794e65029250d4b93caa0974ca572217745bdb/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3062d4ad53b36e17796dce1c0d6da0ad27a015c321e663657ba1cc7659cfc710", size = 1694943 },
- { url = "https://files.pythonhosted.org/packages/82/c8/0e56e8bf12081faca85d14a6929ad5c1263c146149cd66caa7bc12255b6d/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:8605e22d2a86b8e51ffb5253d9045ea73683d92d47c0b1438e11a359bdb94462", size = 1716398 },
- { url = "https://files.pythonhosted.org/packages/ea/f3/33192b4761f7f9b2f7f4281365d925d663629cfaea093a64b658b94fc8e1/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:54fbbe6beafc2820de71ece2198458a711e224e116efefa01b7969f3e2b3ddae", size = 1657051 },
- { url = "https://files.pythonhosted.org/packages/5e/0b/26ddd91ca8f84c48452431cb4c5dd9523b13bc0c9766bda468e072ac9e29/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:050bd277dfc3768b606fd4eae79dd58ceda67d8b0b3c565656a89ae34525d15e", size = 1736611 },
- { url = "https://files.pythonhosted.org/packages/c3/8d/e04569aae853302648e2c138a680a6a2f02e374c5b6711732b29f1e129cc/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2637a60910b58f50f22379b6797466c3aa6ae28a6ab6404e09175ce4955b4e6a", size = 1764586 },
- { url = "https://files.pythonhosted.org/packages/ac/98/c193c1d1198571d988454e4ed75adc21c55af247a9fda08236602921c8c8/aiohttp-3.12.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e986067357550d1aaa21cfe9897fa19e680110551518a5a7cf44e6c5638cb8b5", size = 1724197 },
- { url = "https://files.pythonhosted.org/packages/e7/9e/07bb8aa11eec762c6b1ff61575eeeb2657df11ab3d3abfa528d95f3e9337/aiohttp-3.12.13-cp312-cp312-win32.whl", hash = "sha256:ac941a80aeea2aaae2875c9500861a3ba356f9ff17b9cb2dbfb5cbf91baaf5bf", size = 421771 },
- { url = "https://files.pythonhosted.org/packages/52/66/3ce877e56ec0813069cdc9607cd979575859c597b6fb9b4182c6d5f31886/aiohttp-3.12.13-cp312-cp312-win_amd64.whl", hash = "sha256:671f41e6146a749b6c81cb7fd07f5a8356d46febdaaaf07b0e774ff04830461e", size = 447869 },
- { url = "https://files.pythonhosted.org/packages/11/0f/db19abdf2d86aa1deec3c1e0e5ea46a587b97c07a16516b6438428b3a3f8/aiohttp-3.12.13-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d4a18e61f271127465bdb0e8ff36e8f02ac4a32a80d8927aa52371e93cd87938", size = 694910 },
- { url = "https://files.pythonhosted.org/packages/d5/81/0ab551e1b5d7f1339e2d6eb482456ccbe9025605b28eed2b1c0203aaaade/aiohttp-3.12.13-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:532542cb48691179455fab429cdb0d558b5e5290b033b87478f2aa6af5d20ace", size = 472566 },
- { url = "https://files.pythonhosted.org/packages/34/3f/6b7d336663337672d29b1f82d1f252ec1a040fe2d548f709d3f90fa2218a/aiohttp-3.12.13-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d7eea18b52f23c050ae9db5d01f3d264ab08f09e7356d6f68e3f3ac2de9dfabb", size = 464856 },
- { url = "https://files.pythonhosted.org/packages/26/7f/32ca0f170496aa2ab9b812630fac0c2372c531b797e1deb3deb4cea904bd/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad7c8e5c25f2a26842a7c239de3f7b6bfb92304593ef997c04ac49fb703ff4d7", size = 1703683 },
- { url = "https://files.pythonhosted.org/packages/ec/53/d5513624b33a811c0abea8461e30a732294112318276ce3dbf047dbd9d8b/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6af355b483e3fe9d7336d84539fef460120c2f6e50e06c658fe2907c69262d6b", size = 1684946 },
- { url = "https://files.pythonhosted.org/packages/37/72/4c237dd127827b0247dc138d3ebd49c2ded6114c6991bbe969058575f25f/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a95cf9f097498f35c88e3609f55bb47b28a5ef67f6888f4390b3d73e2bac6177", size = 1737017 },
- { url = "https://files.pythonhosted.org/packages/0d/67/8a7eb3afa01e9d0acc26e1ef847c1a9111f8b42b82955fcd9faeb84edeb4/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8ed8c38a1c584fe99a475a8f60eefc0b682ea413a84c6ce769bb19a7ff1c5ef", size = 1786390 },
- { url = "https://files.pythonhosted.org/packages/48/19/0377df97dd0176ad23cd8cad4fd4232cfeadcec6c1b7f036315305c98e3f/aiohttp-3.12.13-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0b9170d5d800126b5bc89d3053a2363406d6e327afb6afaeda2d19ee8bb103", size = 1708719 },
- { url = "https://files.pythonhosted.org/packages/61/97/ade1982a5c642b45f3622255173e40c3eed289c169f89d00eeac29a89906/aiohttp-3.12.13-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:372feeace612ef8eb41f05ae014a92121a512bd5067db8f25101dd88a8db11da", size = 1622424 },
- { url = "https://files.pythonhosted.org/packages/99/ab/00ad3eea004e1d07ccc406e44cfe2b8da5acb72f8c66aeeb11a096798868/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a946d3702f7965d81f7af7ea8fb03bb33fe53d311df48a46eeca17e9e0beed2d", size = 1675447 },
- { url = "https://files.pythonhosted.org/packages/3f/fe/74e5ce8b2ccaba445fe0087abc201bfd7259431d92ae608f684fcac5d143/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a0c4725fae86555bbb1d4082129e21de7264f4ab14baf735278c974785cd2041", size = 1707110 },
- { url = "https://files.pythonhosted.org/packages/ef/c4/39af17807f694f7a267bd8ab1fbacf16ad66740862192a6c8abac2bff813/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b28ea2f708234f0a5c44eb6c7d9eb63a148ce3252ba0140d050b091b6e842d1", size = 1649706 },
- { url = "https://files.pythonhosted.org/packages/38/e8/f5a0a5f44f19f171d8477059aa5f28a158d7d57fe1a46c553e231f698435/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d4f5becd2a5791829f79608c6f3dc745388162376f310eb9c142c985f9441cc1", size = 1725839 },
- { url = "https://files.pythonhosted.org/packages/fd/ac/81acc594c7f529ef4419d3866913f628cd4fa9cab17f7bf410a5c3c04c53/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:60f2ce6b944e97649051d5f5cc0f439360690b73909230e107fd45a359d3e911", size = 1759311 },
- { url = "https://files.pythonhosted.org/packages/38/0d/aabe636bd25c6ab7b18825e5a97d40024da75152bec39aa6ac8b7a677630/aiohttp-3.12.13-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:69fc1909857401b67bf599c793f2183fbc4804717388b0b888f27f9929aa41f3", size = 1708202 },
- { url = "https://files.pythonhosted.org/packages/1f/ab/561ef2d8a223261683fb95a6283ad0d36cb66c87503f3a7dde7afe208bb2/aiohttp-3.12.13-cp313-cp313-win32.whl", hash = "sha256:7d7e68787a2046b0e44ba5587aa723ce05d711e3a3665b6b7545328ac8e3c0dd", size = 420794 },
- { url = "https://files.pythonhosted.org/packages/9d/47/b11d0089875a23bff0abd3edb5516bcd454db3fefab8604f5e4b07bd6210/aiohttp-3.12.13-cp313-cp313-win_amd64.whl", hash = "sha256:5a178390ca90419bfd41419a809688c368e63c86bd725e1186dd97f6b89c2706", size = 446735 },
+ { url = "https://files.pythonhosted.org/packages/01/16/077057ef3bd684dbf9a8273a5299e182a8d07b4b252503712ff8b5364fd1/aiohttp-3.10.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7480519f70e32bfb101d71fb9a1f330fbd291655a4c1c922232a48c458c52710", size = 584830 },
+ { url = "https://files.pythonhosted.org/packages/2c/cf/348b93deb9597c61a51b6682e81f7c7d79290249e886022ef0705d858d90/aiohttp-3.10.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f65267266c9aeb2287a6622ee2bb39490292552f9fbf851baabc04c9f84e048d", size = 397090 },
+ { url = "https://files.pythonhosted.org/packages/70/bf/903df5cd739dfaf5b827b3d8c9d68ff4fcea16a0ca1aeb948c9da30f56c8/aiohttp-3.10.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7400a93d629a0608dc1d6c55f1e3d6e07f7375745aaa8bd7f085571e4d1cee97", size = 392361 },
+ { url = "https://files.pythonhosted.org/packages/fb/97/e4792675448a2ac5bd56f377a095233b805dd1315235c940c8ba5624e3cb/aiohttp-3.10.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f34b97e4b11b8d4eb2c3a4f975be626cc8af99ff479da7de49ac2c6d02d35725", size = 1309839 },
+ { url = "https://files.pythonhosted.org/packages/96/d0/ba19b1260da6fbbda4d5b1550d8a53ba3518868f2c143d672aedfdbc6172/aiohttp-3.10.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e7b825da878464a252ccff2958838f9caa82f32a8dbc334eb9b34a026e2c636", size = 1348116 },
+ { url = "https://files.pythonhosted.org/packages/b3/b9/15100ee7113a2638bfdc91aecc54641609a92a7ce4fe533ebeaa8d43ff93/aiohttp-3.10.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9f92a344c50b9667827da308473005f34767b6a2a60d9acff56ae94f895f385", size = 1391402 },
+ { url = "https://files.pythonhosted.org/packages/c5/36/831522618ac0dcd0b28f327afd18df7fb6bbf3eaf302f912a40e87714846/aiohttp-3.10.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc6f1ab987a27b83c5268a17218463c2ec08dbb754195113867a27b166cd6087", size = 1304239 },
+ { url = "https://files.pythonhosted.org/packages/60/9f/b7230d0c48b076500ae57adb717aa0656432acd3d8febb1183dedfaa4e75/aiohttp-3.10.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1dc0f4ca54842173d03322793ebcf2c8cc2d34ae91cc762478e295d8e361e03f", size = 1256565 },
+ { url = "https://files.pythonhosted.org/packages/63/c2/35c7b4699f4830b3b0a5c3d5619df16dca8052ae8b488e66065902d559f6/aiohttp-3.10.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7ce6a51469bfaacff146e59e7fb61c9c23006495d11cc24c514a455032bcfa03", size = 1269285 },
+ { url = "https://files.pythonhosted.org/packages/51/48/bc20ea753909bdeb09f9065260aefa7453e3a57f6a51f56f5216adc1a5e7/aiohttp-3.10.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:aad3cd91d484d065ede16f3cf15408254e2469e3f613b241a1db552c5eb7ab7d", size = 1276716 },
+ { url = "https://files.pythonhosted.org/packages/0c/7b/a8708616b3810f55ead66f8e189afa9474795760473aea734bbea536cd64/aiohttp-3.10.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f4df4b8ca97f658c880fb4b90b1d1ec528315d4030af1ec763247ebfd33d8b9a", size = 1315023 },
+ { url = "https://files.pythonhosted.org/packages/2a/d6/dfe9134a921e05b01661a127a37b7d157db93428905450e32f9898eef27d/aiohttp-3.10.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2e4e18a0a2d03531edbc06c366954e40a3f8d2a88d2b936bbe78a0c75a3aab3e", size = 1342735 },
+ { url = "https://files.pythonhosted.org/packages/ca/1a/3bd7f18e3909eabd57e5d17ecdbf5ea4c5828d91341e3676a07de7c76312/aiohttp-3.10.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6ce66780fa1a20e45bc753cda2a149daa6dbf1561fc1289fa0c308391c7bc0a4", size = 1302618 },
+ { url = "https://files.pythonhosted.org/packages/cf/51/d063133781cda48cfdd1e11fc8ef45ab3912b446feba41556385b3ae5087/aiohttp-3.10.11-cp312-cp312-win32.whl", hash = "sha256:a919c8957695ea4c0e7a3e8d16494e3477b86f33067478f43106921c2fef15bb", size = 360497 },
+ { url = "https://files.pythonhosted.org/packages/55/4e/f29def9ed39826fe8f85955f2e42fe5cc0cbe3ebb53c97087f225368702e/aiohttp-3.10.11-cp312-cp312-win_amd64.whl", hash = "sha256:b5e29706e6389a2283a91611c91bf24f218962717c8f3b4e528ef529d112ee27", size = 380577 },
+ { url = "https://files.pythonhosted.org/packages/1f/63/654c185dfe3cf5d4a0d35b6ee49ee6ca91922c694eaa90732e1ba4b40ef1/aiohttp-3.10.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:703938e22434d7d14ec22f9f310559331f455018389222eed132808cd8f44127", size = 577381 },
+ { url = "https://files.pythonhosted.org/packages/4e/c4/ee9c350acb202ba2eb0c44b0f84376b05477e870444192a9f70e06844c28/aiohttp-3.10.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9bc50b63648840854e00084c2b43035a62e033cb9b06d8c22b409d56eb098413", size = 393289 },
+ { url = "https://files.pythonhosted.org/packages/3d/7c/30d161a7e3b208cef1b922eacf2bbb8578b7e5a62266a6a2245a1dd044dc/aiohttp-3.10.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f0463bf8b0754bc744e1feb61590706823795041e63edf30118a6f0bf577461", size = 388859 },
+ { url = "https://files.pythonhosted.org/packages/79/10/8d050e04be447d3d39e5a4a910fa289d930120cebe1b893096bd3ee29063/aiohttp-3.10.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6c6dec398ac5a87cb3a407b068e1106b20ef001c344e34154616183fe684288", size = 1280983 },
+ { url = "https://files.pythonhosted.org/packages/31/b3/977eca40afe643dcfa6b8d8bb9a93f4cba1d8ed1ead22c92056b08855c7a/aiohttp-3.10.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcaf2d79104d53d4dcf934f7ce76d3d155302d07dae24dff6c9fffd217568067", size = 1317132 },
+ { url = "https://files.pythonhosted.org/packages/1a/43/b5ee8e697ed0f96a2b3d80b3058fa7590cda508e9cd256274246ba1cf37a/aiohttp-3.10.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25fd5470922091b5a9aeeb7e75be609e16b4fba81cdeaf12981393fb240dd10e", size = 1362630 },
+ { url = "https://files.pythonhosted.org/packages/28/20/3ae8e993b2990fa722987222dea74d6bac9331e2f530d086f309b4aa8847/aiohttp-3.10.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbde2ca67230923a42161b1f408c3992ae6e0be782dca0c44cb3206bf330dee1", size = 1276865 },
+ { url = "https://files.pythonhosted.org/packages/02/08/1afb0ab7dcff63333b683e998e751aa2547d1ff897b577d2244b00e6fe38/aiohttp-3.10.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:249c8ff8d26a8b41a0f12f9df804e7c685ca35a207e2410adbd3e924217b9006", size = 1230448 },
+ { url = "https://files.pythonhosted.org/packages/c6/fd/ccd0ff842c62128d164ec09e3dd810208a84d79cd402358a3038ae91f3e9/aiohttp-3.10.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:878ca6a931ee8c486a8f7b432b65431d095c522cbeb34892bee5be97b3481d0f", size = 1244626 },
+ { url = "https://files.pythonhosted.org/packages/9f/75/30e9537ab41ed7cb062338d8df7c4afb0a715b3551cd69fc4ea61cfa5a95/aiohttp-3.10.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8663f7777ce775f0413324be0d96d9730959b2ca73d9b7e2c2c90539139cbdd6", size = 1243608 },
+ { url = "https://files.pythonhosted.org/packages/c2/e0/3e7a62d99b9080793affddc12a82b11c9bc1312916ad849700d2bddf9786/aiohttp-3.10.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6cd3f10b01f0c31481fba8d302b61603a2acb37b9d30e1d14e0f5a58b7b18a31", size = 1286158 },
+ { url = "https://files.pythonhosted.org/packages/71/b8/df67886802e71e976996ed9324eb7dc379e53a7d972314e9c7fe3f6ac6bc/aiohttp-3.10.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4e8d8aad9402d3aa02fdc5ca2fe68bcb9fdfe1f77b40b10410a94c7f408b664d", size = 1313636 },
+ { url = "https://files.pythonhosted.org/packages/3c/3b/aea9c3e70ff4e030f46902df28b4cdf486695f4d78fd9c6698827e2bafab/aiohttp-3.10.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:38e3c4f80196b4f6c3a85d134a534a56f52da9cb8d8e7af1b79a32eefee73a00", size = 1273772 },
+ { url = "https://files.pythonhosted.org/packages/e9/9e/4b4c5705270d1c4ee146516ad288af720798d957ba46504aaf99b86e85d9/aiohttp-3.10.11-cp313-cp313-win32.whl", hash = "sha256:fc31820cfc3b2863c6e95e14fcf815dc7afe52480b4dc03393c4873bb5599f71", size = 358679 },
+ { url = "https://files.pythonhosted.org/packages/28/1d/18ef37549901db94717d4389eb7be807acbfbdeab48a73ff2993fc909118/aiohttp-3.10.11-cp313-cp313-win_amd64.whl", hash = "sha256:4996ff1345704ffdd6d75fb06ed175938c133425af616142e7187f28dc75f14e", size = 378073 },
]
[[package]]
@@ -1261,6 +1256,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/58/f8/c076a841b68cc13d89c395cc97965b37751ed008691a304119efa0f5717e/deepagents-0.4.3-py3-none-any.whl", hash = "sha256:298d19c5c0b4c6fc6a74b68049a7bfea0ba481aece7201ab21e7172b71ee61b9", size = 94882 },
]
+[[package]]
+name = "deepagents-microsandbox"
+version = "1.0.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "deepagents" },
+ { name = "microsandbox" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/8d/d5/77562772b7bf868478e5e3badb4f66e60171c6b740be4cf9fd5ffa0c37e5/deepagents_microsandbox-1.0.1.tar.gz", hash = "sha256:b9471f251597fc56b9b2bc5f41a478cd6b87db2641a1e91210978b4abeeb1600", size = 140696 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e8/e5/7fc618dfa08d60a954bf3b13cb9c765ecb37cd3ad8c2174171dcbff8b00b/deepagents_microsandbox-1.0.1-py3-none-any.whl", hash = "sha256:8173ce8dbdf290a0fb5bf83f204814b587470ba9b93fcdad8980ca85e46604b1", size = 9736 },
+]
+
[[package]]
name = "defusedxml"
version = "0.7.1"
@@ -3703,6 +3711,20 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 },
]
+[[package]]
+name = "microsandbox"
+version = "0.1.8"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiohttp" },
+ { name = "frozenlist" },
+ { name = "python-dotenv" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/bf/ad/200f7d89d9ae6f6066ee71e2dff3b3becece1858e8d795f8cc8a66c94516/microsandbox-0.1.8.tar.gz", hash = "sha256:38eac3310f05a238fc49c27cd9c6064a767ccb6f8a53c118b7ecfccb5df58b7a", size = 8949 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/3c/89/2d6653e4c6bfa535da59d84d7c8bcc1678b35299ed43c1d11fb1c07a2179/microsandbox-0.1.8-py3-none-any.whl", hash = "sha256:b4503f6efd0f58e1acbac782399d3020cc704031279637fe5c60bdb5da267cd8", size = 12112 },
+]
+
[[package]]
name = "misaki"
version = "0.9.4"
@@ -6845,6 +6867,7 @@ dependencies = [
{ name = "composio" },
{ name = "datasets" },
{ name = "deepagents" },
+ { name = "deepagents-microsandbox" },
{ name = "discord-py" },
{ name = "docling" },
{ name = "elasticsearch" },
@@ -6915,6 +6938,7 @@ requires-dist = [
{ name = "composio", specifier = ">=0.10.9" },
{ name = "datasets", specifier = ">=2.21.0" },
{ name = "deepagents", specifier = ">=0.4.3" },
+ { name = "deepagents-microsandbox", specifier = ">=1.0.1" },
{ name = "discord-py", specifier = ">=2.5.2" },
{ name = "docling", specifier = ">=2.15.0" },
{ name = "elasticsearch", specifier = ">=9.1.1" },
diff --git a/surfsense_web/app/dashboard/[search_space_id]/new-chat/[[...chat_id]]/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/new-chat/[[...chat_id]]/page.tsx
index dd11382a8..8720078cc 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/new-chat/[[...chat_id]]/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/new-chat/[[...chat_id]]/page.tsx
@@ -49,6 +49,7 @@ import {
DeleteNotionPageToolUI,
UpdateNotionPageToolUI,
} from "@/components/tool-ui/notion";
+import { SandboxExecuteToolUI } from "@/components/tool-ui/sandbox-execute";
import { ScrapeWebpageToolUI } from "@/components/tool-ui/scrape-webpage";
import { RecallMemoryToolUI, SaveMemoryToolUI } from "@/components/tool-ui/user-memory";
import { Skeleton } from "@/components/ui/skeleton";
@@ -151,6 +152,7 @@ const TOOLS_WITH_UI = new Set([
"create_linear_issue",
"update_linear_issue",
"delete_linear_issue",
+ "execute",
// "write_todos", // Disabled for now
]);
@@ -1664,6 +1666,7 @@ export default function NewChatPage() {
+
{/* Disabled for now */}
diff --git a/surfsense_web/components/tool-ui/index.ts b/surfsense_web/components/tool-ui/index.ts
index 93b6229a0..c4f0dbde5 100644
--- a/surfsense_web/components/tool-ui/index.ts
+++ b/surfsense_web/components/tool-ui/index.ts
@@ -97,4 +97,11 @@ export {
SaveMemoryResultSchema,
SaveMemoryToolUI,
} from "./user-memory";
+export {
+ type ExecuteArgs,
+ ExecuteArgsSchema,
+ type ExecuteResult,
+ ExecuteResultSchema,
+ SandboxExecuteToolUI,
+} from "./sandbox-execute";
export { type WriteTodosData, WriteTodosSchema, WriteTodosToolUI } from "./write-todos";
diff --git a/surfsense_web/components/tool-ui/sandbox-execute.tsx b/surfsense_web/components/tool-ui/sandbox-execute.tsx
new file mode 100644
index 000000000..0dd853218
--- /dev/null
+++ b/surfsense_web/components/tool-ui/sandbox-execute.tsx
@@ -0,0 +1,266 @@
+"use client";
+
+import { makeAssistantToolUI } from "@assistant-ui/react";
+import {
+ AlertCircleIcon,
+ CheckCircle2Icon,
+ ChevronRightIcon,
+ Loader2Icon,
+ TerminalIcon,
+ XCircleIcon,
+} from "lucide-react";
+import { useMemo, useState } from "react";
+import { z } from "zod";
+import { Badge } from "@/components/ui/badge";
+import {
+ Collapsible,
+ CollapsibleContent,
+ CollapsibleTrigger,
+} from "@/components/ui/collapsible";
+import { cn } from "@/lib/utils";
+
+// ============================================================================
+// Zod Schemas
+// ============================================================================
+
+const ExecuteArgsSchema = z.object({
+ command: z.string(),
+ timeout: z.number().nullish(),
+});
+
+const ExecuteResultSchema = z.object({
+ result: z.string().nullish(),
+ exit_code: z.number().nullish(),
+ output: z.string().nullish(),
+ error: z.string().nullish(),
+ status: z.string().nullish(),
+});
+
+// ============================================================================
+// Types
+// ============================================================================
+
+type ExecuteArgs = z.infer
;
+type ExecuteResult = z.infer;
+
+interface ParsedOutput {
+ exitCode: number | null;
+ output: string;
+ truncated: boolean;
+ isError: boolean;
+}
+
+// ============================================================================
+// Helpers
+// ============================================================================
+
+function parseExecuteResult(result: ExecuteResult): ParsedOutput {
+ const raw = result.result || result.output || "";
+
+ if (result.error) {
+ return { exitCode: null, output: result.error, truncated: false, isError: true };
+ }
+
+ if (result.exit_code !== undefined && result.exit_code !== null) {
+ return {
+ exitCode: result.exit_code,
+ output: raw,
+ truncated: raw.includes("[Output was truncated"),
+ isError: result.exit_code !== 0,
+ };
+ }
+
+ const exitMatch = raw.match(/^Exit code:\s*(\d+)/);
+ if (exitMatch) {
+ const exitCode = parseInt(exitMatch[1], 10);
+ const outputMatch = raw.match(/\nOutput:\n([\s\S]*)/);
+ const output = outputMatch ? outputMatch[1] : "";
+ return {
+ exitCode,
+ output,
+ truncated: raw.includes("[Output was truncated"),
+ isError: exitCode !== 0,
+ };
+ }
+
+ if (raw.startsWith("Error:")) {
+ return { exitCode: null, output: raw, truncated: false, isError: true };
+ }
+
+ return { exitCode: null, output: raw, truncated: false, isError: false };
+}
+
+function truncateCommand(command: string, maxLen = 80): string {
+ if (command.length <= maxLen) return command;
+ return command.slice(0, maxLen) + "…";
+}
+
+// ============================================================================
+// Sub-Components
+// ============================================================================
+
+function ExecuteLoading({ command }: { command: string }) {
+ return (
+
+
+
+ {truncateCommand(command)}
+
+
+ );
+}
+
+function ExecuteErrorState({ command, error }: { command: string; error: string }) {
+ return (
+
+
+
+
+
Execution failed
+
+ $ {command}
+
+
{error}
+
+
+
+ );
+}
+
+function ExecuteCancelledState({ command }: { command: string }) {
+ return (
+
+ );
+}
+
+function ExecuteResult({
+ command,
+ parsed,
+}: {
+ command: string;
+ parsed: ParsedOutput;
+}) {
+ const [open, setOpen] = useState(false);
+ const hasOutput = parsed.output.trim().length > 0;
+
+ const exitBadge = useMemo(() => {
+ if (parsed.exitCode === null) return null;
+ const success = parsed.exitCode === 0;
+ return (
+
+ {success ? (
+
+ ) : (
+
+ )}
+ {parsed.exitCode}
+
+ );
+ }, [parsed.exitCode]);
+
+ return (
+
+
+
+
+
+
+ {truncateCommand(command)}
+
+ {exitBadge}
+
+
+
+
+
+ {parsed.output}
+
+ {parsed.truncated && (
+
+ Output was truncated due to size limits
+
+ )}
+
+
+
+
+ );
+}
+
+// ============================================================================
+// Tool UI
+// ============================================================================
+
+export const SandboxExecuteToolUI = makeAssistantToolUI({
+ toolName: "execute",
+ render: function SandboxExecuteUI({ args, result, status }) {
+ const command = args.command || "…";
+
+ if (status.type === "running" || status.type === "requires-action") {
+ return ;
+ }
+
+ if (status.type === "incomplete") {
+ if (status.reason === "cancelled") {
+ return ;
+ }
+ if (status.reason === "error") {
+ return (
+
+ );
+ }
+ }
+
+ if (!result) {
+ return ;
+ }
+
+ if (result.error && !result.result && !result.output) {
+ return ;
+ }
+
+ const parsed = parseExecuteResult(result);
+ return ;
+ },
+});
+
+export {
+ ExecuteArgsSchema,
+ ExecuteResultSchema,
+ type ExecuteArgs,
+ type ExecuteResult,
+};
From 8095cec37ee4337561485e395c07b73c15ff4fd2 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Tue, 24 Feb 2026 23:26:31 -0800
Subject: [PATCH 2/9] refactor: enable microsandbox service in docker-compose
with updated installation commands and healthcheck
---
docker-compose.yml | 51 +++++++++++++++++++++++-----------------------
1 file changed, 26 insertions(+), 25 deletions(-)
diff --git a/docker-compose.yml b/docker-compose.yml
index 04231ff20..5bdd390c0 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -140,30 +140,31 @@ services:
# The first sandbox creation will pull the OCI image (e.g. microsandbox/python),
# so the initial run takes a bit longer.
#
- # microsandbox:
- # image: ubuntu:22.04
- # ports:
- # - "${MICROSANDBOX_PORT:-5555}:5555"
- # volumes:
- # - microsandbox_data:/root/.microsandbox
- # privileged: true
- # devices:
- # - /dev/kvm:/dev/kvm
- # entrypoint: ["/bin/bash", "-c"]
- # command:
- # - |
- # set -e
- # if ! command -v msb &>/dev/null; then
- # apt-get update && apt-get install -y --no-install-recommends curl ca-certificates
- # curl -sSL https://get.microsandbox.dev | sh
- # fi
- # exec msb server start --dev
- # restart: unless-stopped
- # healthcheck:
- # test: ["CMD", "curl", "-f", "http://localhost:5555/health"]
- # interval: 10s
- # timeout: 5s
- # retries: 5
+ microsandbox:
+ image: ubuntu:22.04
+ ports:
+ - "${MICROSANDBOX_PORT:-5555}:5555"
+ volumes:
+ - microsandbox_data:/root/.microsandbox
+ privileged: true
+ devices:
+ - /dev/kvm:/dev/kvm
+ entrypoint: ["/bin/bash", "-c"]
+ command:
+ - |
+ set -e
+ export PATH="$$HOME/.local/bin:$$PATH"
+ if ! command -v msb &>/dev/null; then
+ apt-get update && apt-get install -y --no-install-recommends curl ca-certificates libdigest-sha-perl
+ curl -sSL https://get.microsandbox.dev | sh
+ fi
+ exec msb server start --dev
+ restart: unless-stopped
+ healthcheck:
+ test: ["CMD", "curl", "-f", "http://localhost:5555/health"]
+ interval: 10s
+ timeout: 5s
+ retries: 5
electric:
image: electricsql/electric:latest
@@ -206,4 +207,4 @@ volumes:
pgadmin_data:
redis_data:
shared_temp:
- # microsandbox_data:
+ microsandbox_data:
From 421bb29466bcbdd651f7ff6699257ef336d848f9 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Tue, 24 Feb 2026 23:53:03 -0800
Subject: [PATCH 3/9] chore: update microsandbox command to bind to all
interfaces and add compatibility shims for missing types
---
docker-compose.yml | 2 +-
scripts/docker/supervisor-allinone.conf | 2 +-
.../app/agents/new_chat/sandbox.py | 65 ++++++++++++++++++-
surfsense_backend/pyproject.toml | 1 +
surfsense_backend/uv.lock | 2 +
5 files changed, 68 insertions(+), 4 deletions(-)
diff --git a/docker-compose.yml b/docker-compose.yml
index 5bdd390c0..942a3de09 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -158,7 +158,7 @@ services:
apt-get update && apt-get install -y --no-install-recommends curl ca-certificates libdigest-sha-perl
curl -sSL https://get.microsandbox.dev | sh
fi
- exec msb server start --dev
+ exec msb server start --dev --host 0.0.0.0
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:5555/health"]
diff --git a/scripts/docker/supervisor-allinone.conf b/scripts/docker/supervisor-allinone.conf
index b935737d9..2a0c4fe81 100644
--- a/scripts/docker/supervisor-allinone.conf
+++ b/scripts/docker/supervisor-allinone.conf
@@ -118,7 +118,7 @@ environment=NODE_ENV="production",PORT="3000",HOSTNAME="0.0.0.0"
# Autostart is controlled by the entrypoint based on MICROSANDBOX_ENABLED env var.
# Requires --device /dev/kvm and --privileged when running the container.
[program:microsandbox]
-command=msb server start --dev
+command=msb server start --dev --host 0.0.0.0
autostart=%(ENV_MICROSANDBOX_AUTOSTART)s
autorestart=true
priority=25
diff --git a/surfsense_backend/app/agents/new_chat/sandbox.py b/surfsense_backend/app/agents/new_chat/sandbox.py
index 53e71329a..84ba9fac1 100644
--- a/surfsense_backend/app/agents/new_chat/sandbox.py
+++ b/surfsense_backend/app/agents/new_chat/sandbox.py
@@ -5,13 +5,74 @@ Manages the lifecycle of sandboxed code execution environments.
Each conversation thread gets its own isolated sandbox instance.
"""
+from __future__ import annotations
+
import logging
import os
-from deepagents_microsandbox import MicrosandboxBackend, MicrosandboxProvider
-
logger = logging.getLogger(__name__)
+# ---------------------------------------------------------------------------
+# Compatibility shim
+# ---------------------------------------------------------------------------
+# deepagents-microsandbox imports SandboxInfo, SandboxListResponse, and
+# SandboxProvider from deepagents.backends.sandbox. These types were added
+# in a fork and have not yet landed in the official deepagents package.
+# We inject minimal stubs so the import succeeds without patching the venv.
+# ---------------------------------------------------------------------------
+
+def _ensure_sandbox_provider_types() -> None:
+ """Inject missing SandboxProvider / SandboxInfo types if absent."""
+ import importlib
+ sandbox_mod = importlib.import_module("deepagents.backends.sandbox")
+
+ if hasattr(sandbox_mod, "SandboxProvider"):
+ return # Already present – nothing to do.
+
+ from abc import ABC, abstractmethod
+ from dataclasses import dataclass, field
+ from typing import Any, Generic, TypeVar
+
+ _M = TypeVar("_M")
+
+ @dataclass
+ class SandboxInfo(Generic[_M]):
+ sandbox_id: str
+ metadata: _M = field(default_factory=dict) # type: ignore[assignment]
+
+ @dataclass
+ class SandboxListResponse(Generic[_M]):
+ items: list[SandboxInfo[_M]] = field(default_factory=list)
+ cursor: str | None = None
+
+ class SandboxProvider(ABC, Generic[_M]):
+ @abstractmethod
+ def list(self, *, cursor: str | None = None, **kwargs: Any) -> SandboxListResponse[_M]: ...
+
+ @abstractmethod
+ async def alist(self, *, cursor: str | None = None, **kwargs: Any) -> SandboxListResponse[_M]: ...
+
+ @abstractmethod
+ def get_or_create(self, *, sandbox_id: str | None = None, **kwargs: Any) -> Any: ...
+
+ @abstractmethod
+ async def aget_or_create(self, *, sandbox_id: str | None = None, **kwargs: Any) -> Any: ...
+
+ @abstractmethod
+ def delete(self, *, sandbox_id: str, **kwargs: Any) -> None: ...
+
+ @abstractmethod
+ async def adelete(self, *, sandbox_id: str, **kwargs: Any) -> None: ...
+
+ sandbox_mod.SandboxInfo = SandboxInfo # type: ignore[attr-defined]
+ sandbox_mod.SandboxListResponse = SandboxListResponse # type: ignore[attr-defined]
+ sandbox_mod.SandboxProvider = SandboxProvider # type: ignore[attr-defined]
+
+
+_ensure_sandbox_provider_types()
+
+from deepagents_microsandbox import MicrosandboxBackend, MicrosandboxProvider # noqa: E402
+
_provider: MicrosandboxProvider | None = None
diff --git a/surfsense_backend/pyproject.toml b/surfsense_backend/pyproject.toml
index 5f79c3154..f9359c9b5 100644
--- a/surfsense_backend/pyproject.toml
+++ b/surfsense_backend/pyproject.toml
@@ -67,6 +67,7 @@ dependencies = [
"typst>=0.14.0",
"deepagents>=0.4.3",
"deepagents-microsandbox>=1.0.1",
+ "microsandbox>=0.1.8",
]
[dependency-groups]
diff --git a/surfsense_backend/uv.lock b/surfsense_backend/uv.lock
index 68e7d1f1b..ef3d0fbe0 100644
--- a/surfsense_backend/uv.lock
+++ b/surfsense_backend/uv.lock
@@ -6895,6 +6895,7 @@ dependencies = [
{ name = "markdown" },
{ name = "markdownify" },
{ name = "mcp" },
+ { name = "microsandbox" },
{ name = "notion-client" },
{ name = "numpy" },
{ name = "pgvector" },
@@ -6967,6 +6968,7 @@ requires-dist = [
{ name = "markdown", specifier = ">=3.7" },
{ name = "markdownify", specifier = ">=0.14.1" },
{ name = "mcp", specifier = ">=1.25.0" },
+ { name = "microsandbox", specifier = ">=0.1.8" },
{ name = "notion-client", specifier = ">=2.3.0" },
{ name = "numpy", specifier = ">=1.24.0" },
{ name = "pgvector", specifier = ">=0.3.6" },
From a6563f396ad13817d1f8d9d6e1215201e601d320 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Wed, 25 Feb 2026 00:38:27 -0800
Subject: [PATCH 4/9] chore: moved to daytona due to lack of windows support in
microsandbox
---
Dockerfile.allinone | 15 +-
docker-compose.yml | 48 +--
scripts/docker/entrypoint-allinone.sh | 13 +-
scripts/docker/supervisor-allinone.conf | 17 +-
.../app/agents/new_chat/chat_deepagent.py | 2 +-
.../app/agents/new_chat/sandbox.py | 151 ++++-----
surfsense_backend/pyproject.toml | 3 +-
surfsense_backend/uv.lock | 294 ++++++++++++++++--
.../components/tool-ui/sandbox-execute.tsx | 36 ++-
9 files changed, 357 insertions(+), 222 deletions(-)
diff --git a/Dockerfile.allinone b/Dockerfile.allinone
index a51e31814..6bcf78459 100644
--- a/Dockerfile.allinone
+++ b/Dockerfile.allinone
@@ -216,10 +216,6 @@ RUN pip install --no-cache-dir playwright \
&& playwright install chromium \
&& rm -rf /root/.cache/ms-playwright/ffmpeg*
-# Install Microsandbox (optional secure code execution for deep agent).
-# Requires --device /dev/kvm at runtime. Enable via MICROSANDBOX_ENABLED=TRUE.
-RUN curl -sSL https://get.microsandbox.dev | sh || true
-
# Copy backend source
COPY surfsense_backend/ ./
@@ -264,10 +260,9 @@ ENV NEXT_PUBLIC_FASTAPI_BACKEND_URL=http://localhost:8000
ENV NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE=LOCAL
ENV NEXT_PUBLIC_ETL_SERVICE=DOCLING
-# Microsandbox (optional - requires --device /dev/kvm and --privileged at runtime)
-ENV MICROSANDBOX_ENABLED=FALSE
-ENV MICROSANDBOX_SERVER_URL=http://localhost:5555
-# MICROSANDBOX_API_KEY is intentionally unset; set at runtime for production.
+# Daytona Sandbox (cloud code execution — no local server needed)
+ENV DAYTONA_SANDBOX_ENABLED=FALSE
+# DAYTONA_API_KEY, DAYTONA_API_URL, DAYTONA_TARGET: set at runtime for production.
# Electric SQL configuration (ELECTRIC_DATABASE_URL is built dynamically by entrypoint from these values)
ENV ELECTRIC_DB_USER=electric
@@ -283,8 +278,8 @@ ENV NEXT_PUBLIC_ELECTRIC_AUTH_MODE=insecure
# Data volume
VOLUME ["/data"]
-# Expose ports (Frontend: 3000, Backend: 8000, Electric: 5133, Microsandbox: 5555)
-EXPOSE 3000 8000 5133 5555
+# Expose ports (Frontend: 3000, Backend: 8000, Electric: 5133)
+EXPOSE 3000 8000 5133
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=120s --retries=3 \
diff --git a/docker-compose.yml b/docker-compose.yml
index 942a3de09..50abb8548 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -65,14 +65,14 @@ services:
- ELECTRIC_DB_PASSWORD=${ELECTRIC_DB_PASSWORD:-electric_password}
- AUTH_TYPE=${AUTH_TYPE:-LOCAL}
- NEXT_FRONTEND_URL=${NEXT_FRONTEND_URL:-http://localhost:3000}
- # Microsandbox – uncomment when microsandbox service is enabled
- # - MICROSANDBOX_ENABLED=TRUE
- # - MICROSANDBOX_SERVER_URL=http://microsandbox:5555
- # - MICROSANDBOX_API_KEY=${MICROSANDBOX_API_KEY:-}
+ # Daytona Sandbox – uncomment and set credentials to enable cloud code execution
+ # - DAYTONA_SANDBOX_ENABLED=TRUE
+ # - DAYTONA_API_KEY=${DAYTONA_API_KEY:-}
+ # - DAYTONA_API_URL=${DAYTONA_API_URL:-https://app.daytona.io/api}
+ # - DAYTONA_TARGET=${DAYTONA_TARGET:-us}
depends_on:
- db
- redis
- # - microsandbox
# Run these services separately in production
# celery_worker:
@@ -129,43 +129,6 @@ services:
# - redis
# - celery_worker
- # ============================================================
- # Microsandbox (optional - secure code execution for deep agent)
- # ============================================================
- # Requires a Linux host with KVM support (/dev/kvm).
- # To enable:
- # 1. Uncomment this service
- # 2. Set MICROSANDBOX_ENABLED=TRUE in surfsense_backend/.env
- # 3. Run with: docker compose up -d
- # The first sandbox creation will pull the OCI image (e.g. microsandbox/python),
- # so the initial run takes a bit longer.
- #
- microsandbox:
- image: ubuntu:22.04
- ports:
- - "${MICROSANDBOX_PORT:-5555}:5555"
- volumes:
- - microsandbox_data:/root/.microsandbox
- privileged: true
- devices:
- - /dev/kvm:/dev/kvm
- entrypoint: ["/bin/bash", "-c"]
- command:
- - |
- set -e
- export PATH="$$HOME/.local/bin:$$PATH"
- if ! command -v msb &>/dev/null; then
- apt-get update && apt-get install -y --no-install-recommends curl ca-certificates libdigest-sha-perl
- curl -sSL https://get.microsandbox.dev | sh
- fi
- exec msb server start --dev --host 0.0.0.0
- restart: unless-stopped
- healthcheck:
- test: ["CMD", "curl", "-f", "http://localhost:5555/health"]
- interval: 10s
- timeout: 5s
- retries: 5
-
electric:
image: electricsql/electric:latest
ports:
@@ -207,4 +170,3 @@ volumes:
pgadmin_data:
redis_data:
shared_temp:
- microsandbox_data:
diff --git a/scripts/docker/entrypoint-allinone.sh b/scripts/docker/entrypoint-allinone.sh
index 9ca653979..7c232a079 100644
--- a/scripts/docker/entrypoint-allinone.sh
+++ b/scripts/docker/entrypoint-allinone.sh
@@ -42,17 +42,6 @@ if [ -z "$STT_SERVICE" ]; then
echo "✅ Using default STT_SERVICE: local/base"
fi
-# ================================================
-# Microsandbox (optional secure sandbox server)
-# ================================================
-if [ "${MICROSANDBOX_ENABLED:-FALSE}" = "TRUE" ]; then
- export MICROSANDBOX_AUTOSTART=true
- echo "✅ Microsandbox enabled (requires --device /dev/kvm)"
-else
- export MICROSANDBOX_AUTOSTART=false
- echo "ℹ️ Microsandbox disabled (set MICROSANDBOX_ENABLED=TRUE to enable)"
-fi
-
# ================================================
# Set Electric SQL configuration
# ================================================
@@ -243,7 +232,7 @@ echo " Auth Type: ${NEXT_PUBLIC_FASTAPI_BACKEND_AUTH_TYPE}"
echo " ETL Service: ${NEXT_PUBLIC_ETL_SERVICE}"
echo " TTS Service: ${TTS_SERVICE}"
echo " STT Service: ${STT_SERVICE}"
-echo " Microsandbox: ${MICROSANDBOX_ENABLED:-FALSE}"
+echo " Daytona Sandbox: ${DAYTONA_SANDBOX_ENABLED:-FALSE}"
echo "==========================================="
echo ""
diff --git a/scripts/docker/supervisor-allinone.conf b/scripts/docker/supervisor-allinone.conf
index 2a0c4fe81..1a21fcc04 100644
--- a/scripts/docker/supervisor-allinone.conf
+++ b/scripts/docker/supervisor-allinone.conf
@@ -114,23 +114,8 @@ stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
environment=NODE_ENV="production",PORT="3000",HOSTNAME="0.0.0.0"
-# Microsandbox (secure code execution sandbox server)
-# Autostart is controlled by the entrypoint based on MICROSANDBOX_ENABLED env var.
-# Requires --device /dev/kvm and --privileged when running the container.
-[program:microsandbox]
-command=msb server start --dev --host 0.0.0.0
-autostart=%(ENV_MICROSANDBOX_AUTOSTART)s
-autorestart=true
-priority=25
-startsecs=5
-startretries=3
-stdout_logfile=/dev/stdout
-stdout_logfile_maxbytes=0
-stderr_logfile=/dev/stderr
-stderr_logfile_maxbytes=0
-
# Process Groups
[group:surfsense]
-programs=postgresql,redis,electric,backend,celery-worker,celery-beat,frontend,microsandbox
+programs=postgresql,redis,electric,backend,celery-worker,celery-beat,frontend
priority=999
diff --git a/surfsense_backend/app/agents/new_chat/chat_deepagent.py b/surfsense_backend/app/agents/new_chat/chat_deepagent.py
index dbb1d4b4a..5fcb8236d 100644
--- a/surfsense_backend/app/agents/new_chat/chat_deepagent.py
+++ b/surfsense_backend/app/agents/new_chat/chat_deepagent.py
@@ -169,7 +169,7 @@ async def create_surfsense_deep_agent(
These are always added regardless of enabled/disabled settings.
firecrawl_api_key: Optional Firecrawl API key for premium web scraping.
Falls back to Chromium/Trafilatura if not provided.
- sandbox_backend: Optional sandbox backend (e.g. MicrosandboxBackend) for
+ sandbox_backend: Optional sandbox backend (e.g. DaytonaSandbox) for
secure code execution. When provided, the agent gets an
isolated ``execute`` tool for running shell commands.
diff --git a/surfsense_backend/app/agents/new_chat/sandbox.py b/surfsense_backend/app/agents/new_chat/sandbox.py
index 84ba9fac1..959ec6949 100644
--- a/surfsense_backend/app/agents/new_chat/sandbox.py
+++ b/surfsense_backend/app/agents/new_chat/sandbox.py
@@ -1,130 +1,89 @@
"""
-Microsandbox provider for SurfSense deep agent.
+Daytona sandbox provider for SurfSense deep agent.
Manages the lifecycle of sandboxed code execution environments.
-Each conversation thread gets its own isolated sandbox instance.
+Each conversation thread gets its own isolated sandbox instance
+via the Daytona cloud API, identified by labels.
"""
from __future__ import annotations
+import asyncio
import logging
import os
+from daytona import CreateSandboxFromSnapshotParams, Daytona, DaytonaConfig
+from langchain_daytona import DaytonaSandbox
+
logger = logging.getLogger(__name__)
-# ---------------------------------------------------------------------------
-# Compatibility shim
-# ---------------------------------------------------------------------------
-# deepagents-microsandbox imports SandboxInfo, SandboxListResponse, and
-# SandboxProvider from deepagents.backends.sandbox. These types were added
-# in a fork and have not yet landed in the official deepagents package.
-# We inject minimal stubs so the import succeeds without patching the venv.
-# ---------------------------------------------------------------------------
-
-def _ensure_sandbox_provider_types() -> None:
- """Inject missing SandboxProvider / SandboxInfo types if absent."""
- import importlib
- sandbox_mod = importlib.import_module("deepagents.backends.sandbox")
-
- if hasattr(sandbox_mod, "SandboxProvider"):
- return # Already present – nothing to do.
-
- from abc import ABC, abstractmethod
- from dataclasses import dataclass, field
- from typing import Any, Generic, TypeVar
-
- _M = TypeVar("_M")
-
- @dataclass
- class SandboxInfo(Generic[_M]):
- sandbox_id: str
- metadata: _M = field(default_factory=dict) # type: ignore[assignment]
-
- @dataclass
- class SandboxListResponse(Generic[_M]):
- items: list[SandboxInfo[_M]] = field(default_factory=list)
- cursor: str | None = None
-
- class SandboxProvider(ABC, Generic[_M]):
- @abstractmethod
- def list(self, *, cursor: str | None = None, **kwargs: Any) -> SandboxListResponse[_M]: ...
-
- @abstractmethod
- async def alist(self, *, cursor: str | None = None, **kwargs: Any) -> SandboxListResponse[_M]: ...
-
- @abstractmethod
- def get_or_create(self, *, sandbox_id: str | None = None, **kwargs: Any) -> Any: ...
-
- @abstractmethod
- async def aget_or_create(self, *, sandbox_id: str | None = None, **kwargs: Any) -> Any: ...
-
- @abstractmethod
- def delete(self, *, sandbox_id: str, **kwargs: Any) -> None: ...
-
- @abstractmethod
- async def adelete(self, *, sandbox_id: str, **kwargs: Any) -> None: ...
-
- sandbox_mod.SandboxInfo = SandboxInfo # type: ignore[attr-defined]
- sandbox_mod.SandboxListResponse = SandboxListResponse # type: ignore[attr-defined]
- sandbox_mod.SandboxProvider = SandboxProvider # type: ignore[attr-defined]
-
-
-_ensure_sandbox_provider_types()
-
-from deepagents_microsandbox import MicrosandboxBackend, MicrosandboxProvider # noqa: E402
-
-_provider: MicrosandboxProvider | None = None
+_daytona_client: Daytona | None = None
+THREAD_LABEL_KEY = "surfsense_thread"
def is_sandbox_enabled() -> bool:
- return os.environ.get("MICROSANDBOX_ENABLED", "FALSE").upper() == "TRUE"
+ return os.environ.get("DAYTONA_SANDBOX_ENABLED", "FALSE").upper() == "TRUE"
-def _get_provider() -> MicrosandboxProvider:
- global _provider
- if _provider is None:
- server_url = os.environ.get(
- "MICROSANDBOX_SERVER_URL", "http://127.0.0.1:5555"
+def _get_client() -> Daytona:
+ global _daytona_client
+ if _daytona_client is None:
+ config = DaytonaConfig(
+ api_key=os.environ.get("DAYTONA_API_KEY", ""),
+ api_url=os.environ.get("DAYTONA_API_URL", "https://app.daytona.io/api"),
+ target=os.environ.get("DAYTONA_TARGET", "us"),
)
- api_key = os.environ.get("MICROSANDBOX_API_KEY")
- _provider = MicrosandboxProvider(
- server_url=server_url,
- api_key=api_key,
- namespace="surfsense",
+ _daytona_client = Daytona(config)
+ return _daytona_client
+
+
+def _find_or_create(thread_id: str) -> DaytonaSandbox:
+ """Find an existing sandbox for *thread_id*, or create a new one."""
+ client = _get_client()
+ labels = {THREAD_LABEL_KEY: thread_id}
+
+ try:
+ sandbox = client.find_one(labels=labels)
+ logger.info("Reusing existing sandbox: %s", sandbox.id)
+ except Exception:
+ sandbox = client.create(
+ CreateSandboxFromSnapshotParams(language="python", labels=labels)
)
- return _provider
+ logger.info("Created new sandbox: %s", sandbox.id)
+
+ return DaytonaSandbox(sandbox=sandbox)
-async def get_or_create_sandbox(thread_id: int | str) -> MicrosandboxBackend:
+async def get_or_create_sandbox(thread_id: int | str) -> DaytonaSandbox:
"""Get or create a sandbox for a conversation thread.
- Uses the thread_id as the sandbox name so the same sandbox persists
+ Uses the thread_id as a label so the same sandbox persists
across multiple messages within the same conversation.
Args:
thread_id: The conversation thread identifier.
Returns:
- MicrosandboxBackend connected to the sandbox.
+ DaytonaSandbox connected to the sandbox.
"""
- provider = _get_provider()
- sandbox_name = f"thread-{thread_id}"
- sandbox = await provider.aget_or_create(
- sandbox_id=sandbox_name,
- timeout=120,
- memory=512,
- cpus=1.0,
- )
- logger.info("Sandbox ready: %s", sandbox.id)
- return sandbox
+ return await asyncio.to_thread(_find_or_create, str(thread_id))
async def delete_sandbox(thread_id: int | str) -> None:
"""Delete the sandbox for a conversation thread."""
- provider = _get_provider()
- sandbox_name = f"thread-{thread_id}"
- try:
- await provider.adelete(sandbox_id=sandbox_name)
- logger.info("Sandbox deleted: surfsense/%s", sandbox_name)
- except Exception:
- logger.warning("Failed to delete sandbox surfsense/%s", sandbox_name, exc_info=True)
+
+ def _delete() -> None:
+ client = _get_client()
+ labels = {THREAD_LABEL_KEY: str(thread_id)}
+ try:
+ sandbox = client.find_one(labels=labels)
+ client.delete(sandbox)
+ logger.info("Sandbox deleted: %s", sandbox.id)
+ except Exception:
+ logger.warning(
+ "Failed to delete sandbox for thread %s",
+ thread_id,
+ exc_info=True,
+ )
+
+ await asyncio.to_thread(_delete)
diff --git a/surfsense_backend/pyproject.toml b/surfsense_backend/pyproject.toml
index f9359c9b5..1319f4519 100644
--- a/surfsense_backend/pyproject.toml
+++ b/surfsense_backend/pyproject.toml
@@ -66,8 +66,7 @@ dependencies = [
"pypandoc_binary>=1.16.2",
"typst>=0.14.0",
"deepagents>=0.4.3",
- "deepagents-microsandbox>=1.0.1",
- "microsandbox>=0.1.8",
+ "langchain-daytona>=0.0.2",
]
[dependency-groups]
diff --git a/surfsense_backend/uv.lock b/surfsense_backend/uv.lock
index ef3d0fbe0..c6ee66d89 100644
--- a/surfsense_backend/uv.lock
+++ b/surfsense_backend/uv.lock
@@ -49,11 +49,11 @@ wheels = [
[[package]]
name = "aiofiles"
-version = "25.1.0"
+version = "24.1.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/41/c3/534eac40372d8ee36ef40df62ec129bee4fdb5ad9706e58a29be53b2c970/aiofiles-25.1.0.tar.gz", hash = "sha256:a8d728f0a29de45dc521f18f07297428d56992a742f0cd2701ba86e44d23d5b2", size = 46354 }
+sdist = { url = "https://files.pythonhosted.org/packages/0b/03/a88171e277e8caa88a4c77808c20ebb04ba74cc4681bf1e9416c862de237/aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c", size = 30247 }
wheels = [
- { url = "https://files.pythonhosted.org/packages/bc/8a/340a1555ae33d7354dbca4faa54948d76d89a27ceef032c8c3bc661d003e/aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695", size = 14668 },
+ { url = "https://files.pythonhosted.org/packages/a5/45/30bb92d442636f570cb5651bc661f52b610e2eec3f891a5dc3a4c3667db0/aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5", size = 15896 },
]
[[package]]
@@ -111,6 +111,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/28/1d/18ef37549901db94717d4389eb7be807acbfbdeab48a73ff2993fc909118/aiohttp-3.10.11-cp313-cp313-win_amd64.whl", hash = "sha256:4996ff1345704ffdd6d75fb06ed175938c133425af616142e7187f28dc75f14e", size = 378073 },
]
+[[package]]
+name = "aiohttp-retry"
+version = "2.9.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiohttp" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/9d/61/ebda4d8e3d8cfa1fd3db0fb428db2dd7461d5742cea35178277ad180b033/aiohttp_retry-2.9.1.tar.gz", hash = "sha256:8eb75e904ed4ee5c2ec242fefe85bf04240f685391c4879d8f541d6028ff01f1", size = 13608 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1a/99/84ba7273339d0f3dfa57901b846489d2e5c2cd731470167757f1935fffbd/aiohttp_retry-2.9.1-py3-none-any.whl", hash = "sha256:66d2759d1921838256a05a3f80ad7e724936f083e35be5abb5e16eed6be6dc54", size = 9981 },
+]
+
[[package]]
name = "aiolimiter"
version = "1.2.1"
@@ -1240,6 +1252,98 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/87/22/f020c047ae1346613db9322638186468238bcfa8849b4668a22b97faad65/dateparser-1.2.2-py3-none-any.whl", hash = "sha256:5a5d7211a09013499867547023a2a0c91d5a27d15dd4dbcea676ea9fe66f2482", size = 315453 },
]
+[[package]]
+name = "daytona"
+version = "0.145.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiofiles" },
+ { name = "daytona-api-client" },
+ { name = "daytona-api-client-async" },
+ { name = "daytona-toolbox-api-client" },
+ { name = "daytona-toolbox-api-client-async" },
+ { name = "deprecated" },
+ { name = "environs" },
+ { name = "httpx" },
+ { name = "multipart" },
+ { name = "obstore" },
+ { name = "opentelemetry-api" },
+ { name = "opentelemetry-exporter-otlp-proto-http" },
+ { name = "opentelemetry-instrumentation-aiohttp-client" },
+ { name = "opentelemetry-sdk" },
+ { name = "pydantic" },
+ { name = "toml" },
+ { name = "websockets" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/00/ff/d0c4d6295c7da4e32fa7ea7d4b319f9c9ac22023448dfb45ce160bd1d807/daytona-0.145.0.tar.gz", hash = "sha256:717ba4b59732839eec6c8d97b7069520129f7ebaea32d643e99a049dfcf69671", size = 125342 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d0/5b/66f790ef3188718f2e42abb562af212454c278120c0407880542ad5689d3/daytona-0.145.0-py3-none-any.whl", hash = "sha256:2f0ed0384ea6b662fb3c8dacd21c6bb91f0c138161f654034a4d8666030e8118", size = 155401 },
+]
+
+[[package]]
+name = "daytona-api-client"
+version = "0.145.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pydantic" },
+ { name = "python-dateutil" },
+ { name = "typing-extensions" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/d0/f4/2a75eb88a32d0da2a53be703daf7f02a1a5fe3332844ac84712701109880/daytona_api_client-0.145.0.tar.gz", hash = "sha256:40e6be54c5fe23cb9884629b1ac948d6528262d635f540990e51c50830b04526", size = 140299 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/df/18/47cc59737237a34f6a6d2df361251f7512b8a199ed995c8c1f3d543efd18/daytona_api_client-0.145.0-py3-none-any.whl", hash = "sha256:578e2c7e6af72a2c36a8de55f9c6539ba192faf1e1e1037906b05350cb369f0e", size = 393463 },
+]
+
+[[package]]
+name = "daytona-api-client-async"
+version = "0.145.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiohttp" },
+ { name = "aiohttp-retry" },
+ { name = "pydantic" },
+ { name = "python-dateutil" },
+ { name = "typing-extensions" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/64/98/fcd1c3f23843c3c7b5bdd6a6d56289e6c6f14d5a1026878f3a45cdd6712f/daytona_api_client_async-0.145.0.tar.gz", hash = "sha256:bb78da16e445e0d5eed59368737290abfe9073e04a19885fcc71e32bd452eb69", size = 140342 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/55/98/4d97c5b27b14464dcfecdffe41e9f6f9df8fc020f020021814be81942090/daytona_api_client_async-0.145.0-py3-none-any.whl", hash = "sha256:2b3a98588f89ecb2d948d705f1ed847fd5d69abb1185e2b75461ee0b75ee25f9", size = 396425 },
+]
+
+[[package]]
+name = "daytona-toolbox-api-client"
+version = "0.145.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "pydantic" },
+ { name = "python-dateutil" },
+ { name = "typing-extensions" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e3/12/4d565376366376d7e767e69d87a0bd82593ca41c5168a0acbebcde48155d/daytona_toolbox_api_client-0.145.0.tar.gz", hash = "sha256:a1cb9f1a4ed699fee8cd0cb11d6d452d238d3c1ccf04c8452b4b77db7c223622", size = 64785 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/95/74/865219b984d78d3b4df8cf4806d2d29426c7b9c24d9f82b5b55766905621/daytona_toolbox_api_client-0.145.0-py3-none-any.whl", hash = "sha256:d1418a207ff46a1fb48bd511d28a93336f0a2b6b2c1a7c8d0b218f4c08f8b2b3", size = 174400 },
+]
+
+[[package]]
+name = "daytona-toolbox-api-client-async"
+version = "0.145.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "aiohttp" },
+ { name = "aiohttp-retry" },
+ { name = "pydantic" },
+ { name = "python-dateutil" },
+ { name = "typing-extensions" },
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c0/ab/63acd0e6fb0e2d8f4c9e3d9f94782f15ae2fa6d91dac6e165f949fb92ce7/daytona_toolbox_api_client_async-0.145.0.tar.gz", hash = "sha256:070876471653e4f54af0a5e6c2d56d10b298ce4c24d62c635e57b80713501ee2", size = 61835 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/35/ca/48fcdc463376bd0f317c6dfb28511d0f49d241dfa95c012ca3a27912d8fa/daytona_toolbox_api_client_async-0.145.0-py3-none-any.whl", hash = "sha256:fa2b0ab87f4a4f9e243a5c2906bdf6829a56c6c30f474dcb9f28adfcfa29d263", size = 175774 },
+]
+
[[package]]
name = "deepagents"
version = "0.4.3"
@@ -1256,19 +1360,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/58/f8/c076a841b68cc13d89c395cc97965b37751ed008691a304119efa0f5717e/deepagents-0.4.3-py3-none-any.whl", hash = "sha256:298d19c5c0b4c6fc6a74b68049a7bfea0ba481aece7201ab21e7172b71ee61b9", size = 94882 },
]
-[[package]]
-name = "deepagents-microsandbox"
-version = "1.0.1"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "deepagents" },
- { name = "microsandbox" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/8d/d5/77562772b7bf868478e5e3badb4f66e60171c6b740be4cf9fd5ffa0c37e5/deepagents_microsandbox-1.0.1.tar.gz", hash = "sha256:b9471f251597fc56b9b2bc5f41a478cd6b87db2641a1e91210978b4abeeb1600", size = 140696 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/e8/e5/7fc618dfa08d60a954bf3b13cb9c765ecb37cd3ad8c2174171dcbff8b00b/deepagents_microsandbox-1.0.1-py3-none-any.whl", hash = "sha256:8173ce8dbdf290a0fb5bf83f204814b587470ba9b93fcdad8980ca85e46604b1", size = 9736 },
-]
-
[[package]]
name = "defusedxml"
version = "0.7.1"
@@ -1602,6 +1693,19 @@ wheels = [
{ url = "https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.8.0/en_core_web_sm-3.8.0-py3-none-any.whl", hash = "sha256:1932429db727d4bff3deed6b34cfc05df17794f4a52eeb26cf8928f7c1a0fb85" },
]
+[[package]]
+name = "environs"
+version = "14.5.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "marshmallow" },
+ { name = "python-dotenv" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/aa/75/06801d5beeb398ed3903167af9376bb81c4ac41c44a53d45193065ebb1a8/environs-14.5.0.tar.gz", hash = "sha256:f7b8f6fcf3301bc674bc9c03e39b5986d116126ffb96764efd34c339ed9464ee", size = 35426 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d3/f3/6961beb9a1e77d01dee1dd48f00fb3064429c8abcfa26aa863eb7cb2b6dd/environs-14.5.0-py3-none-any.whl", hash = "sha256:1abd3e3a5721fb09797438d6c902bc2f35d4580dfaffe68b8ee588b67b504e13", size = 17202 },
+]
+
[[package]]
name = "espeakng-loader"
version = "0.2.4"
@@ -3052,6 +3156,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/71/41/fe6ae9065b866b1397adbfc98db5e1648e8dcd78126b8e1266fcbe2d6395/langchain_core-1.2.14-py3-none-any.whl", hash = "sha256:b349ca28c057ac1f9b5280ea091bddb057db24d0f1c3c89bbb590713e1715838", size = 501411 },
]
+[[package]]
+name = "langchain-daytona"
+version = "0.0.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "daytona" },
+ { name = "deepagents" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/84/f1/0440d3bf4c49ca7e07dd42a5756bc73500b4a41e49ba49c15b9c8f927eb0/langchain_daytona-0.0.2.tar.gz", hash = "sha256:0a849a4a27776434c9c29d40d3c2161f6e6354bcd30e11014c72023dc94107f5", size = 188358 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/af/92/1d3af3134e79bb0f19b9c12bdf987b0e786084b948584c51b9328cd3cf2a/langchain_daytona-0.0.2-py3-none-any.whl", hash = "sha256:cc3cf13cc7c2558f22cc255ffed3be6726e860756e15232799524b7ec0f92091", size = 4065 },
+]
+
[[package]]
name = "langchain-google-genai"
version = "4.2.1"
@@ -3711,20 +3828,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 },
]
-[[package]]
-name = "microsandbox"
-version = "0.1.8"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "aiohttp" },
- { name = "frozenlist" },
- { name = "python-dotenv" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/bf/ad/200f7d89d9ae6f6066ee71e2dff3b3becece1858e8d795f8cc8a66c94516/microsandbox-0.1.8.tar.gz", hash = "sha256:38eac3310f05a238fc49c27cd9c6064a767ccb6f8a53c118b7ecfccb5df58b7a", size = 8949 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/3c/89/2d6653e4c6bfa535da59d84d7c8bcc1678b35299ed43c1d11fb1c07a2179/microsandbox-0.1.8-py3-none-any.whl", hash = "sha256:b4503f6efd0f58e1acbac782399d3020cc704031279637fe5c60bdb5da267cd8", size = 12112 },
-]
-
[[package]]
name = "misaki"
version = "0.9.4"
@@ -4021,6 +4124,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/44/d8/45e8fc9892a7386d074941429e033adb4640e59ff0780d96a8cf46fe788e/multidict-6.5.0-py3-none-any.whl", hash = "sha256:5634b35f225977605385f56153bd95a7133faffc0ffe12ad26e10517537e8dfc", size = 12181 },
]
+[[package]]
+name = "multipart"
+version = "1.3.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/6d/c9/c6f5ab81bae667d4fe42a58df29f4c2db6ad8377cfd0e9baa729e4fa3ebb/multipart-1.3.0.tar.gz", hash = "sha256:a46bd6b0eb4c1ba865beb88ddd886012a3da709b6e7b86084fc37e99087e5cf1", size = 38816 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9a/d6/d547a7004b81fa0b2aafa143b09196f6635e4105cd9d2c641fa8a4051c05/multipart-1.3.0-py3-none-any.whl", hash = "sha256:439bf4b00fd7cb2dbff08ae13f49f4f49798931ecd8d496372c63537fa19f304", size = 14938 },
+]
+
[[package]]
name = "multiprocess"
version = "0.70.16"
@@ -4387,6 +4499,55 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e1/3d/760b1456010ed11ce87c0109007f0166078dfdada7597f0091ae76eb7305/oauthlib-3.3.0-py3-none-any.whl", hash = "sha256:a2b3a0a2a4ec2feb4b9110f56674a39b2cc2f23e14713f4ed20441dfba14e934", size = 165155 },
]
+[[package]]
+name = "obstore"
+version = "0.8.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions", marker = "python_full_version < '3.13'" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/a3/8c/9ec984edd0f3b72226adfaa19b1c61b15823b35b52f311ca4af36d009d15/obstore-0.8.2.tar.gz", hash = "sha256:a467bc4e97169e2ba749981b4fd0936015428d9b8f3fb83a5528536b1b6f377f", size = 168852 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2b/dc/60fefbb5736e69eab56657bca04ca64dc07fdeccb3814164a31b62ad066b/obstore-0.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:bb70ce297a47392b1d9a3e310f18d59cd5ebbb9453428210fef02ed60e4d75d1", size = 3612955 },
+ { url = "https://files.pythonhosted.org/packages/d2/8b/844e8f382e5a12b8a3796a05d76a03e12c7aedc13d6900419e39207d7868/obstore-0.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1619bf618428abf1f607e0b219b2e230a966dcf697b717deccfa0983dd91f646", size = 3346564 },
+ { url = "https://files.pythonhosted.org/packages/89/73/8537f99e09a38a54a6a15ede907aa25d4da089f767a808f0b2edd9c03cec/obstore-0.8.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a4605c3ed7c9515aeb4c619b5f7f2c9986ed4a79fe6045e536b5e59b804b1476", size = 3460809 },
+ { url = "https://files.pythonhosted.org/packages/b4/99/7714dec721e43f521d6325a82303a002cddad089437640f92542b84e9cc8/obstore-0.8.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce42670417876dd8668cbb8659e860e9725e5f26bbc86449fd259970e2dd9d18", size = 3692081 },
+ { url = "https://files.pythonhosted.org/packages/ec/bd/4ac4175fe95a24c220a96021c25c432bcc0c0212f618be0737184eebbaad/obstore-0.8.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4a3e893b2a06585f651c541c1972fe1e3bf999ae2a5fda052ee55eb7e6516f5", size = 3957466 },
+ { url = "https://files.pythonhosted.org/packages/4e/04/caa288fb735484fc5cb019bdf3d896eaccfae0ac4622e520d05692c46790/obstore-0.8.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08462b32f95a9948ed56ed63e88406e2e5a4cae1fde198f9682e0fb8487100ed", size = 3951293 },
+ { url = "https://files.pythonhosted.org/packages/44/2f/d380239da2d6a1fda82e17df5dae600a404e8a93a065784518ff8325d5f6/obstore-0.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a0bf7763292a8fc47d01cd66e6f19002c5c6ad4b3ed4e6b2729f5e190fa8a0d", size = 3766199 },
+ { url = "https://files.pythonhosted.org/packages/28/41/d391be069d3da82969b54266948b2582aeca5dd735abeda4d63dba36e07b/obstore-0.8.2-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:bcd47f8126cb192cbe86942b8f73b1c45a651ce7e14c9a82c5641dfbf8be7603", size = 3529678 },
+ { url = "https://files.pythonhosted.org/packages/b9/4c/4862fdd1a3abde459ee8eea699b1797df638a460af235b18ca82c8fffb72/obstore-0.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:57eda9fd8c757c3b4fe36cf3918d7e589cc1286591295cc10b34122fa36dd3fd", size = 3698079 },
+ { url = "https://files.pythonhosted.org/packages/68/ca/014e747bc53b570059c27e3565b2316fbe5c107d4134551f4cd3e24aa667/obstore-0.8.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ea44442aad8992166baa69f5069750979e4c5d9ffce772e61565945eea5774b9", size = 3687154 },
+ { url = "https://files.pythonhosted.org/packages/6f/89/6db5f8edd93028e5b8bfbeee15e6bd3e56f72106107d31cb208b57659de4/obstore-0.8.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:41496a3ab8527402db4142aaaf0d42df9d7d354b13ba10d9c33e0e48dd49dd96", size = 3773444 },
+ { url = "https://files.pythonhosted.org/packages/26/e5/c9e2cc540689c873beb61246e1615d6e38301e6a34dec424f5a5c63c1afd/obstore-0.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:43da209803f052df96c7c3cbec512d310982efd2407e4a435632841a51143170", size = 3939315 },
+ { url = "https://files.pythonhosted.org/packages/4d/c9/bb53280ca50103c1ffda373cdc9b0f835431060039c2897cbc87ddd92e42/obstore-0.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:1836f5dcd49f9f2950c75889ab5c51fb290d3ea93cdc39a514541e0be3af016e", size = 3978234 },
+ { url = "https://files.pythonhosted.org/packages/f0/5d/8c3316cc958d386d5e6ab03e9db9ddc27f8e2141cee4a6777ae5b92f3aac/obstore-0.8.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:212f033e53fe6e53d64957923c5c88949a400e9027f7038c705ec2e9038be563", size = 3612027 },
+ { url = "https://files.pythonhosted.org/packages/ea/4d/699359774ce6330130536d008bfc32827fab0c25a00238d015a5974a3d1d/obstore-0.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bee21fa4ba148d08fa90e47a96df11161661ed31e09c056a373cb2154b0f2852", size = 3344686 },
+ { url = "https://files.pythonhosted.org/packages/82/37/55437341f10512906e02fd9fa69a8a95ad3f2f6a916d3233fda01763d110/obstore-0.8.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4c66594b59832ff1ced4c72575d9beb8b5f9b4e404ac1150a42bfb226617fd50", size = 3459860 },
+ { url = "https://files.pythonhosted.org/packages/7a/51/4245a616c94ee4851965e33f7a563ab4090cc81f52cc73227ff9ceca2e46/obstore-0.8.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:089f33af5c2fe132d00214a0c1f40601b28f23a38e24ef9f79fb0576f2730b74", size = 3691648 },
+ { url = "https://files.pythonhosted.org/packages/4e/f1/4e2fb24171e3ca3641a4653f006be826e7e17634b11688a5190553b00b83/obstore-0.8.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d87f658dfd340d5d9ea2d86a7c90d44da77a0db9e00c034367dca335735110cf", size = 3956867 },
+ { url = "https://files.pythonhosted.org/packages/42/f5/b703115361c798c9c1744e1e700d5908d904a8c2e2bd38bec759c9ffb469/obstore-0.8.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e2e4fa92828c4fbc2d487f3da2d3588701a1b67d9f6ca3c97cc2afc912e9c63", size = 3950599 },
+ { url = "https://files.pythonhosted.org/packages/53/20/08c6dc0f20c1394e2324b9344838e4e7af770cdcb52c30757a475f50daeb/obstore-0.8.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab440e89c5c37a8ec230857dd65147d4b923e0cada33297135d05e0f937d696a", size = 3765865 },
+ { url = "https://files.pythonhosted.org/packages/77/20/77907765e29b2eba6bd8821872284d91170d7084f670855b2dfcb249ea14/obstore-0.8.2-cp313-cp313-manylinux_2_24_aarch64.whl", hash = "sha256:b9beed107c5c9cd995d4a73263861fcfbc414d58773ed65c14f80eb18258a932", size = 3529807 },
+ { url = "https://files.pythonhosted.org/packages/a5/f5/f629d39cc30d050f52b1bf927e4d65c1cc7d7ffbb8a635cd546b5c5219a0/obstore-0.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b75b4e7746292c785e31edcd5aadc8b758238372a19d4c5e394db5c305d7d175", size = 3693629 },
+ { url = "https://files.pythonhosted.org/packages/30/ff/106763fd10f2a1cb47f2ef1162293c78ad52f4e73223d8d43fc6b755445d/obstore-0.8.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:f33e6c366869d05ab0b7f12efe63269e631c5450d95d6b4ba4c5faf63f69de70", size = 3686176 },
+ { url = "https://files.pythonhosted.org/packages/ce/0c/d2ccb6f32feeca906d5a7c4255340df5262af8838441ca06c9e4e37b67d5/obstore-0.8.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:12c885a9ce5ceb09d13cc186586c0c10b62597eff21b985f6ce8ff9dab963ad3", size = 3773081 },
+ { url = "https://files.pythonhosted.org/packages/fa/79/40d1cc504cefc89c9b3dd8874287f3fddc7d963a8748d6dffc5880222013/obstore-0.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4accc883b93349a81c9931e15dd318cc703b02bbef2805d964724c73d006d00e", size = 3938589 },
+ { url = "https://files.pythonhosted.org/packages/14/dd/916c6777222db3271e9fb3cf9a97ed92b3a9b3e465bdeec96de9ab809d53/obstore-0.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:ec850adf9980e5788a826ccfd5819989724e2a2f712bfa3258e85966c8d9981e", size = 3977768 },
+ { url = "https://files.pythonhosted.org/packages/f1/61/66f8dc98bbf5613bbfe5bf21747b4c8091442977f4bd897945895ab7325c/obstore-0.8.2-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:1431e40e9bb4773a261e51b192ea6489d0799b9d4d7dbdf175cdf813eb8c0503", size = 3623364 },
+ { url = "https://files.pythonhosted.org/packages/1a/66/6d527b3027e42f625c8fc816ac7d19b0d6228f95bfe7666e4d6b081d2348/obstore-0.8.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ddb39d4da303f50b959da000aa42734f6da7ac0cc0be2d5a7838b62c97055bb9", size = 3347764 },
+ { url = "https://files.pythonhosted.org/packages/0d/79/c00103302b620192ea447a948921ad3fed031ce3d19e989f038e1183f607/obstore-0.8.2-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e01f4e13783db453e17e005a4a3ceff09c41c262e44649ba169d253098c775e8", size = 3460981 },
+ { url = "https://files.pythonhosted.org/packages/3d/d9/bfe4ed4b1aebc45b56644dd5b943cf8e1673505cccb352e66878a457e807/obstore-0.8.2-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df0fc2d0bc17caff9b538564ddc26d7616f7e8b7c65b1a3c90b5048a8ad2e797", size = 3692711 },
+ { url = "https://files.pythonhosted.org/packages/13/47/cd6c2cbb18e1f40c77e7957a4a03d2d83f1859a2e876a408f1ece81cad4c/obstore-0.8.2-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e439d06c99a140348f046c9f598ee349cc2dcd9105c15540a4b231f9cc48bbae", size = 3958362 },
+ { url = "https://files.pythonhosted.org/packages/3d/ea/5ee82bf23abd71c7d6a3f2d008197ae8f8f569d41314c26a8f75318245be/obstore-0.8.2-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e37d9046669fcc59522d0faf1d105fcbfd09c84cccaaa1e809227d8e030f32c", size = 3957082 },
+ { url = "https://files.pythonhosted.org/packages/cb/ee/46650405e50fdaa8d95f30375491f9c91fac9517980e8a28a4a6af66927f/obstore-0.8.2-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2646fdcc4bbe92dc2bb5bcdff15574da1211f5806c002b66d514cee2a23c7cb8", size = 3775539 },
+ { url = "https://files.pythonhosted.org/packages/35/d6/348a7ebebe2ca3d94dfc75344ea19675ae45472823e372c1852844078307/obstore-0.8.2-cp314-cp314-manylinux_2_24_aarch64.whl", hash = "sha256:e31a7d37675056d93dfc244605089dee67f5bba30f37c88436623c8c5ad9ba9d", size = 3535048 },
+ { url = "https://files.pythonhosted.org/packages/41/07/b7a16cc0da91a4b902d47880ad24016abfe7880c63f7cdafda45d89a2f91/obstore-0.8.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:656313dd8170dde0f0cd471433283337a63912e8e790a121f7cc7639c83e3816", size = 3699035 },
+ { url = "https://files.pythonhosted.org/packages/7f/74/3269a3a58347e0b019742d888612c4b765293c9c75efa44e144b1e884c0d/obstore-0.8.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:329038c9645d6d1741e77fe1a53e28a14b1a5c1461cfe4086082ad39ebabf981", size = 3687307 },
+ { url = "https://files.pythonhosted.org/packages/01/f9/4fd4819ad6a49d2f462a45be453561f4caebded0dc40112deeffc34b89b1/obstore-0.8.2-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:1e4df99b369790c97c752d126b286dc86484ea49bff5782843a265221406566f", size = 3776076 },
+ { url = "https://files.pythonhosted.org/packages/14/dd/7c4f958fa0b9fc4778fb3d232e38b37db8c6b260f641022fbba48b049d7e/obstore-0.8.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9e1c65c65e20cc990414a8a9af88209b1bbc0dd9521b5f6b0293c60e19439bb7", size = 3947445 },
+]
+
[[package]]
name = "olefile"
version = "0.47"
@@ -4566,6 +4727,55 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/81/a3/cc9b66575bd6597b98b886a2067eea2693408d2d5f39dad9ab7fc264f5f3/opentelemetry_exporter_otlp_proto_grpc-1.39.1-py3-none-any.whl", hash = "sha256:fa1c136a05c7e9b4c09f739469cbdb927ea20b34088ab1d959a849b5cc589c18", size = 19766 },
]
+[[package]]
+name = "opentelemetry-exporter-otlp-proto-http"
+version = "1.39.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "googleapis-common-protos" },
+ { name = "opentelemetry-api" },
+ { name = "opentelemetry-exporter-otlp-proto-common" },
+ { name = "opentelemetry-proto" },
+ { name = "opentelemetry-sdk" },
+ { name = "requests" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/80/04/2a08fa9c0214ae38880df01e8bfae12b067ec0793446578575e5080d6545/opentelemetry_exporter_otlp_proto_http-1.39.1.tar.gz", hash = "sha256:31bdab9745c709ce90a49a0624c2bd445d31a28ba34275951a6a362d16a0b9cb", size = 17288 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/95/f1/b27d3e2e003cd9a3592c43d099d2ed8d0a947c15281bf8463a256db0b46c/opentelemetry_exporter_otlp_proto_http-1.39.1-py3-none-any.whl", hash = "sha256:d9f5207183dd752a412c4cd564ca8875ececba13be6e9c6c370ffb752fd59985", size = 19641 },
+]
+
+[[package]]
+name = "opentelemetry-instrumentation"
+version = "0.60b1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "opentelemetry-api" },
+ { name = "opentelemetry-semantic-conventions" },
+ { name = "packaging" },
+ { name = "wrapt" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/41/0f/7e6b713ac117c1f5e4e3300748af699b9902a2e5e34c9cf443dde25a01fa/opentelemetry_instrumentation-0.60b1.tar.gz", hash = "sha256:57ddc7974c6eb35865af0426d1a17132b88b2ed8586897fee187fd5b8944bd6a", size = 31706 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/77/d2/6788e83c5c86a2690101681aeef27eeb2a6bf22df52d3f263a22cee20915/opentelemetry_instrumentation-0.60b1-py3-none-any.whl", hash = "sha256:04480db952b48fb1ed0073f822f0ee26012b7be7c3eac1a3793122737c78632d", size = 33096 },
+]
+
+[[package]]
+name = "opentelemetry-instrumentation-aiohttp-client"
+version = "0.60b1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "opentelemetry-api" },
+ { name = "opentelemetry-instrumentation" },
+ { name = "opentelemetry-semantic-conventions" },
+ { name = "opentelemetry-util-http" },
+ { name = "wrapt" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c0/79/95be90c555fd7efde79dcba36ea5c668815aa2d0a4250b63687e0f91c74a/opentelemetry_instrumentation_aiohttp_client-0.60b1.tar.gz", hash = "sha256:d0e7d5aa057791ca4d9090b0d3c9982f253c1a24b6bc78a734fc18d8dd97927b", size = 15907 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/ca/f4/1a1ec632c86269750ae833c8fbdd4c8d15316eb1c21e3544e34791c805ee/opentelemetry_instrumentation_aiohttp_client-0.60b1-py3-none-any.whl", hash = "sha256:34c5097256a30b16c5a2a88a409ed82b92972a494c43212c85632d204a78c2a1", size = 12694 },
+]
+
[[package]]
name = "opentelemetry-proto"
version = "1.39.1"
@@ -4606,6 +4816,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/7a/5e/5958555e09635d09b75de3c4f8b9cae7335ca545d77392ffe7331534c402/opentelemetry_semantic_conventions-0.60b1-py3-none-any.whl", hash = "sha256:9fa8c8b0c110da289809292b0591220d3a7b53c1526a23021e977d68597893fb", size = 219982 },
]
+[[package]]
+name = "opentelemetry-util-http"
+version = "0.60b1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/50/fc/c47bb04a1d8a941a4061307e1eddfa331ed4d0ab13d8a9781e6db256940a/opentelemetry_util_http-0.60b1.tar.gz", hash = "sha256:0d97152ca8c8a41ced7172d29d3622a219317f74ae6bb3027cfbdcf22c3cc0d6", size = 11053 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/16/5c/d3f1733665f7cd582ef0842fb1d2ed0bc1fba10875160593342d22bba375/opentelemetry_util_http-0.60b1-py3-none-any.whl", hash = "sha256:66381ba28550c91bee14dcba8979ace443444af1ed609226634596b4b0faf199", size = 8947 },
+]
+
[[package]]
name = "orjson"
version = "3.10.18"
@@ -6867,7 +7086,6 @@ dependencies = [
{ name = "composio" },
{ name = "datasets" },
{ name = "deepagents" },
- { name = "deepagents-microsandbox" },
{ name = "discord-py" },
{ name = "docling" },
{ name = "elasticsearch" },
@@ -6885,6 +7103,7 @@ dependencies = [
{ name = "kokoro" },
{ name = "langchain" },
{ name = "langchain-community" },
+ { name = "langchain-daytona" },
{ name = "langchain-litellm" },
{ name = "langchain-unstructured" },
{ name = "langgraph" },
@@ -6895,7 +7114,6 @@ dependencies = [
{ name = "markdown" },
{ name = "markdownify" },
{ name = "mcp" },
- { name = "microsandbox" },
{ name = "notion-client" },
{ name = "numpy" },
{ name = "pgvector" },
@@ -6940,7 +7158,6 @@ requires-dist = [
{ name = "composio", specifier = ">=0.10.9" },
{ name = "datasets", specifier = ">=2.21.0" },
{ name = "deepagents", specifier = ">=0.4.3" },
- { name = "deepagents-microsandbox", specifier = ">=1.0.1" },
{ name = "discord-py", specifier = ">=2.5.2" },
{ name = "docling", specifier = ">=2.15.0" },
{ name = "elasticsearch", specifier = ">=9.1.1" },
@@ -6958,6 +7175,7 @@ requires-dist = [
{ name = "kokoro", specifier = ">=0.9.4" },
{ name = "langchain", specifier = ">=1.2.6" },
{ name = "langchain-community", specifier = ">=0.3.31" },
+ { name = "langchain-daytona", specifier = ">=0.0.2" },
{ name = "langchain-litellm", specifier = ">=0.3.5" },
{ name = "langchain-unstructured", specifier = ">=1.0.1" },
{ name = "langgraph", specifier = ">=1.0.5" },
@@ -6968,7 +7186,6 @@ requires-dist = [
{ name = "markdown", specifier = ">=3.7" },
{ name = "markdownify", specifier = ">=0.14.1" },
{ name = "mcp", specifier = ">=1.25.0" },
- { name = "microsandbox", specifier = ">=0.1.8" },
{ name = "notion-client", specifier = ">=2.3.0" },
{ name = "numpy", specifier = ">=1.24.0" },
{ name = "pgvector", specifier = ">=0.3.6" },
@@ -7176,6 +7393,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e6/b6/072a8e053ae600dcc2ac0da81a23548e3b523301a442a6ca900e92ac35be/tokenizers-0.21.1-cp39-abi3-win_amd64.whl", hash = "sha256:0f0dcbcc9f6e13e675a66d7a5f2f225a736745ce484c1a4e07476a89ccdad382", size = 2435481 },
]
+[[package]]
+name = "toml"
+version = "0.10.2"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588 },
+]
+
[[package]]
name = "torch"
version = "2.7.1"
diff --git a/surfsense_web/components/tool-ui/sandbox-execute.tsx b/surfsense_web/components/tool-ui/sandbox-execute.tsx
index 0dd853218..4cb3ba63c 100644
--- a/surfsense_web/components/tool-ui/sandbox-execute.tsx
+++ b/surfsense_web/components/tool-ui/sandbox-execute.tsx
@@ -148,7 +148,8 @@ function ExecuteResult({
parsed: ParsedOutput;
}) {
const [open, setOpen] = useState(false);
- const hasOutput = parsed.output.trim().length > 0;
+ const isLongCommand = command.length > 80 || command.includes("\n");
+ const hasContent = parsed.output.trim().length > 0 || isLongCommand;
const exitBadge = useMemo(() => {
if (parsed.exitCode === null) return null;
@@ -180,13 +181,13 @@ function ExecuteResult({
open && "rounded-b-none border-b-0",
parsed.isError && "border-destructive/20"
)}
- disabled={!hasOutput}
+ disabled={!hasContent}
>
@@ -199,15 +200,34 @@ function ExecuteResult({
-
- {parsed.output}
-
+ {isLongCommand && (
+
+
+ Command
+
+
+ {command}
+
+
+ )}
+ {parsed.output.trim().length > 0 && (
+
+ {isLongCommand && (
+
+ Output
+
+ )}
+
+ {parsed.output}
+
+
+ )}
{parsed.truncated && (
-
+
Output was truncated due to size limits
)}
From d570cae3c60eeb94de3da2328aa19c21372c273e Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Wed, 25 Feb 2026 01:36:30 -0800
Subject: [PATCH 5/9] feat: added file handling for daytona sandboxes
- Added _TimeoutAwareSandbox class to handle per-command timeouts in DaytonaSandbox.
- Updated _find_or_create function to manage sandbox states and restart stopped/archived sandboxes.
- Enhanced get_or_create_sandbox to return the new sandbox class.
- Introduced file download functionality in the frontend, allowing users to download generated files from the sandbox.
- Updated system prompt to include guidelines for sharing generated files.
---
.../app/agents/new_chat/sandbox.py | 65 +++++++-
.../app/agents/new_chat/system_prompt.py | 10 +-
surfsense_backend/app/routes/__init__.py | 2 +
.../app/routes/sandbox_routes.py | 91 +++++++++++
.../app/tasks/chat/stream_new_chat.py | 2 +
.../components/tool-ui/sandbox-execute.tsx | 154 ++++++++++++++++--
6 files changed, 307 insertions(+), 17 deletions(-)
create mode 100644 surfsense_backend/app/routes/sandbox_routes.py
diff --git a/surfsense_backend/app/agents/new_chat/sandbox.py b/surfsense_backend/app/agents/new_chat/sandbox.py
index 959ec6949..d2afd5df0 100644
--- a/surfsense_backend/app/agents/new_chat/sandbox.py
+++ b/surfsense_backend/app/agents/new_chat/sandbox.py
@@ -12,11 +12,35 @@ import asyncio
import logging
import os
-from daytona import CreateSandboxFromSnapshotParams, Daytona, DaytonaConfig
+from daytona import CreateSandboxFromSnapshotParams, Daytona, DaytonaConfig, SandboxState
+from deepagents.backends.protocol import ExecuteResponse
from langchain_daytona import DaytonaSandbox
logger = logging.getLogger(__name__)
+
+class _TimeoutAwareSandbox(DaytonaSandbox):
+ """DaytonaSandbox subclass that accepts the per-command *timeout*
+ kwarg required by the deepagents middleware.
+
+ The upstream ``langchain-daytona`` ``execute()`` ignores timeout,
+ so deepagents raises *"This sandbox backend does not support
+ per-command timeout overrides"* on every first call. This thin
+ wrapper forwards the parameter to the Daytona SDK.
+ """
+
+ def execute(self, command: str, *, timeout: int | None = None) -> ExecuteResponse:
+ t = timeout if timeout is not None else self._timeout
+ result = self._sandbox.process.exec(command, timeout=t)
+ return ExecuteResponse(
+ output=result.result,
+ exit_code=result.exit_code,
+ truncated=False,
+ )
+
+ async def aexecute(self, command: str, *, timeout: int | None = None) -> ExecuteResponse: # type: ignore[override]
+ return await asyncio.to_thread(self.execute, command, timeout=timeout)
+
_daytona_client: Daytona | None = None
THREAD_LABEL_KEY = "surfsense_thread"
@@ -37,24 +61,53 @@ def _get_client() -> Daytona:
return _daytona_client
-def _find_or_create(thread_id: str) -> DaytonaSandbox:
- """Find an existing sandbox for *thread_id*, or create a new one."""
+def _find_or_create(thread_id: str) -> _TimeoutAwareSandbox:
+ """Find an existing sandbox for *thread_id*, or create a new one.
+
+ If an existing sandbox is found but is stopped/archived, it will be
+ restarted automatically before returning.
+ """
client = _get_client()
labels = {THREAD_LABEL_KEY: thread_id}
try:
sandbox = client.find_one(labels=labels)
- logger.info("Reusing existing sandbox: %s", sandbox.id)
+ logger.info(
+ "Found existing sandbox %s (state=%s)", sandbox.id, sandbox.state
+ )
+
+ if sandbox.state in (
+ SandboxState.STOPPED,
+ SandboxState.STOPPING,
+ SandboxState.ARCHIVED,
+ ):
+ logger.info("Starting stopped sandbox %s …", sandbox.id)
+ sandbox.start(timeout=60)
+ logger.info("Sandbox %s is now started", sandbox.id)
+ elif sandbox.state in (SandboxState.ERROR, SandboxState.BUILD_FAILED, SandboxState.DESTROYED):
+ logger.warning(
+ "Sandbox %s in unrecoverable state %s — creating a new one",
+ sandbox.id,
+ sandbox.state,
+ )
+ sandbox = client.create(
+ CreateSandboxFromSnapshotParams(language="python", labels=labels)
+ )
+ logger.info("Created replacement sandbox: %s", sandbox.id)
+ elif sandbox.state != SandboxState.STARTED:
+ sandbox.wait_for_sandbox_start(timeout=60)
+
except Exception:
+ logger.info("No existing sandbox for thread %s — creating one", thread_id)
sandbox = client.create(
CreateSandboxFromSnapshotParams(language="python", labels=labels)
)
logger.info("Created new sandbox: %s", sandbox.id)
- return DaytonaSandbox(sandbox=sandbox)
+ return _TimeoutAwareSandbox(sandbox=sandbox)
-async def get_or_create_sandbox(thread_id: int | str) -> DaytonaSandbox:
+async def get_or_create_sandbox(thread_id: int | str) -> _TimeoutAwareSandbox:
"""Get or create a sandbox for a conversation thread.
Uses the thread_id as a label so the same sandbox persists
diff --git a/surfsense_backend/app/agents/new_chat/system_prompt.py b/surfsense_backend/app/agents/new_chat/system_prompt.py
index a965a0bca..be592b9eb 100644
--- a/surfsense_backend/app/agents/new_chat/system_prompt.py
+++ b/surfsense_backend/app/agents/new_chat/system_prompt.py
@@ -679,13 +679,21 @@ Do not use the sandbox for:
## Working Guidelines
-- **Working directory**: Use `/home` or `/tmp` for all work
+- **Working directory**: The shell starts in the sandbox user's home directory (e.g. `/home/daytona`). Use **relative paths** or `/tmp/` for all files you create. NEVER write directly to `/home/` — that is the parent directory and is not writable. Use `pwd` if you need to discover the current working directory.
- **Iterative approach**: For complex tasks, break work into steps — write code, run it, check output, refine
- **Error handling**: If code fails, read the error, fix the issue, and retry. Don't just report the error without attempting a fix.
- **Show results**: When generating plots or outputs, present the key findings directly in your response. For plots, save to a file and describe the results.
- **Be efficient**: Install packages once per session. Combine related commands when possible.
- **Large outputs**: If command output is very large, use `head`, `tail`, or save to a file and read selectively.
+## Sharing Generated Files
+
+When your code creates output files (images, CSVs, PDFs, etc.) in the sandbox:
+- **Print the absolute path** at the end of your script so the user can download the file. Example: `print("SANDBOX_FILE: /tmp/chart.png")`
+- **DO NOT call `display_image`** for files created inside the sandbox. Sandbox files are not accessible via public URLs, so `display_image` will always show "Image not available".
+- You can output multiple files, one per line: `print("SANDBOX_FILE: /tmp/report.csv")`, `print("SANDBOX_FILE: /tmp/chart.png")`
+- Always describe what the file contains in your response text so the user knows what they are downloading.
+
## Data Analytics Best Practices
When the user asks you to analyze data:
diff --git a/surfsense_backend/app/routes/__init__.py b/surfsense_backend/app/routes/__init__.py
index f8e22f872..6114dd207 100644
--- a/surfsense_backend/app/routes/__init__.py
+++ b/surfsense_backend/app/routes/__init__.py
@@ -36,6 +36,7 @@ from .podcasts_routes import router as podcasts_router
from .public_chat_routes import router as public_chat_router
from .rbac_routes import router as rbac_router
from .reports_routes import router as reports_router
+from .sandbox_routes import router as sandbox_router
from .search_source_connectors_routes import router as search_source_connectors_router
from .search_spaces_routes import router as search_spaces_router
from .slack_add_connector_route import router as slack_add_connector_router
@@ -50,6 +51,7 @@ router.include_router(editor_router)
router.include_router(documents_router)
router.include_router(notes_router)
router.include_router(new_chat_router) # Chat with assistant-ui persistence
+router.include_router(sandbox_router) # Sandbox file downloads (Daytona)
router.include_router(chat_comments_router)
router.include_router(podcasts_router) # Podcast task status and audio
router.include_router(reports_router) # Report CRUD and export (PDF/DOCX)
diff --git a/surfsense_backend/app/routes/sandbox_routes.py b/surfsense_backend/app/routes/sandbox_routes.py
new file mode 100644
index 000000000..af13e48fc
--- /dev/null
+++ b/surfsense_backend/app/routes/sandbox_routes.py
@@ -0,0 +1,91 @@
+"""Routes for downloading files from Daytona sandbox environments."""
+
+from __future__ import annotations
+
+import asyncio
+import logging
+
+from fastapi import APIRouter, Depends, HTTPException, Query
+from fastapi.responses import Response
+from sqlalchemy.ext.asyncio import AsyncSession
+from sqlalchemy.future import select
+
+from app.db import NewChatThread, Permission, User, get_async_session
+from app.users import current_active_user
+from app.utils.rbac import check_permission
+
+logger = logging.getLogger(__name__)
+
+router = APIRouter()
+
+MIME_TYPES: dict[str, str] = {
+ ".png": "image/png",
+ ".jpg": "image/jpeg",
+ ".jpeg": "image/jpeg",
+ ".gif": "image/gif",
+ ".webp": "image/webp",
+ ".svg": "image/svg+xml",
+ ".pdf": "application/pdf",
+ ".csv": "text/csv",
+ ".json": "application/json",
+ ".txt": "text/plain",
+ ".html": "text/html",
+ ".md": "text/markdown",
+ ".py": "text/x-python",
+ ".xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
+ ".zip": "application/zip",
+}
+
+
+def _guess_media_type(filename: str) -> str:
+ ext = ("." + filename.rsplit(".", 1)[-1].lower()) if "." in filename else ""
+ return MIME_TYPES.get(ext, "application/octet-stream")
+
+
+@router.get("/threads/{thread_id}/sandbox/download")
+async def download_sandbox_file(
+ thread_id: int,
+ path: str = Query(..., description="Absolute path of the file inside the sandbox"),
+ session: AsyncSession = Depends(get_async_session),
+ user: User = Depends(current_active_user),
+):
+ """Download a file from the Daytona sandbox associated with a chat thread."""
+
+ from app.agents.new_chat.sandbox import get_or_create_sandbox, is_sandbox_enabled
+
+ if not is_sandbox_enabled():
+ raise HTTPException(status_code=404, detail="Sandbox is not enabled")
+
+ result = await session.execute(
+ select(NewChatThread).filter(NewChatThread.id == thread_id)
+ )
+ thread = result.scalars().first()
+ if not thread:
+ raise HTTPException(status_code=404, detail="Thread not found")
+
+ await check_permission(
+ session,
+ user,
+ thread.search_space_id,
+ Permission.CHATS_READ.value,
+ "You don't have permission to access files in this thread",
+ )
+
+ try:
+ sandbox = await get_or_create_sandbox(thread_id)
+ raw_sandbox = sandbox._sandbox # noqa: SLF001
+ content: bytes = await asyncio.to_thread(raw_sandbox.fs.download_file, path)
+ except Exception as exc:
+ logger.warning("Sandbox file download failed for %s: %s", path, exc)
+ raise HTTPException(
+ status_code=404, detail=f"Could not download file: {exc}"
+ ) from exc
+
+ filename = path.rsplit("/", 1)[-1] if "/" in path else path
+ media_type = _guess_media_type(filename)
+
+ return Response(
+ content=content,
+ media_type=media_type,
+ headers={"Content-Disposition": f'attachment; filename="{filename}"'},
+ )
diff --git a/surfsense_backend/app/tasks/chat/stream_new_chat.py b/surfsense_backend/app/tasks/chat/stream_new_chat.py
index f5df5862b..327aa7977 100644
--- a/surfsense_backend/app/tasks/chat/stream_new_chat.py
+++ b/surfsense_backend/app/tasks/chat/stream_new_chat.py
@@ -862,11 +862,13 @@ async def _stream_agent_events(
exit_code = int(m.group(1))
om = re.search(r"\nOutput:\n([\s\S]*)", raw_text)
output_text = om.group(1) if om else ""
+ thread_id_str = config.get("configurable", {}).get("thread_id", "")
yield streaming_service.format_tool_output_available(
tool_call_id,
{
"exit_code": exit_code,
"output": output_text,
+ "thread_id": thread_id_str,
},
)
else:
diff --git a/surfsense_web/components/tool-ui/sandbox-execute.tsx b/surfsense_web/components/tool-ui/sandbox-execute.tsx
index 4cb3ba63c..dd1477d74 100644
--- a/surfsense_web/components/tool-ui/sandbox-execute.tsx
+++ b/surfsense_web/components/tool-ui/sandbox-execute.tsx
@@ -5,19 +5,24 @@ import {
AlertCircleIcon,
CheckCircle2Icon,
ChevronRightIcon,
+ DownloadIcon,
+ FileIcon,
Loader2Icon,
TerminalIcon,
XCircleIcon,
} from "lucide-react";
-import { useMemo, useState } from "react";
+import { useCallback, useMemo, useState } from "react";
import { z } from "zod";
import { Badge } from "@/components/ui/badge";
+import { Button } from "@/components/ui/button";
import {
Collapsible,
CollapsibleContent,
CollapsibleTrigger,
} from "@/components/ui/collapsible";
import { cn } from "@/lib/utils";
+import { getBearerToken } from "@/lib/auth-utils";
+import { BACKEND_URL } from "@/lib/env-config";
// ============================================================================
// Zod Schemas
@@ -34,6 +39,7 @@ const ExecuteResultSchema = z.object({
output: z.string().nullish(),
error: z.string().nullish(),
status: z.string().nullish(),
+ thread_id: z.string().nullish(),
});
// ============================================================================
@@ -43,30 +49,63 @@ const ExecuteResultSchema = z.object({
type ExecuteArgs = z.infer;
type ExecuteResult = z.infer;
+interface SandboxFile {
+ path: string;
+ name: string;
+}
+
interface ParsedOutput {
exitCode: number | null;
output: string;
+ displayOutput: string;
truncated: boolean;
isError: boolean;
+ files: SandboxFile[];
}
// ============================================================================
// Helpers
// ============================================================================
+const SANDBOX_FILE_RE = /^SANDBOX_FILE:\s*(.+)$/gm;
+
+function extractSandboxFiles(text: string): SandboxFile[] {
+ const files: SandboxFile[] = [];
+ let match: RegExpExecArray | null;
+ while ((match = SANDBOX_FILE_RE.exec(text)) !== null) {
+ const filePath = match[1].trim();
+ if (filePath) {
+ const name = filePath.includes("/")
+ ? filePath.split("/").pop() || filePath
+ : filePath;
+ files.push({ path: filePath, name });
+ }
+ }
+ SANDBOX_FILE_RE.lastIndex = 0;
+ return files;
+}
+
+function stripSandboxFileLines(text: string): string {
+ return text.replace(/^SANDBOX_FILE:\s*.+$/gm, "").replace(/\n{3,}/g, "\n\n").trim();
+}
+
function parseExecuteResult(result: ExecuteResult): ParsedOutput {
const raw = result.result || result.output || "";
if (result.error) {
- return { exitCode: null, output: result.error, truncated: false, isError: true };
+ return { exitCode: null, output: result.error, displayOutput: result.error, truncated: false, isError: true, files: [] };
}
if (result.exit_code !== undefined && result.exit_code !== null) {
+ const files = extractSandboxFiles(raw);
+ const displayOutput = stripSandboxFileLines(raw);
return {
exitCode: result.exit_code,
output: raw,
+ displayOutput,
truncated: raw.includes("[Output was truncated"),
isError: result.exit_code !== 0,
+ files,
};
}
@@ -75,19 +114,25 @@ function parseExecuteResult(result: ExecuteResult): ParsedOutput {
const exitCode = parseInt(exitMatch[1], 10);
const outputMatch = raw.match(/\nOutput:\n([\s\S]*)/);
const output = outputMatch ? outputMatch[1] : "";
+ const files = extractSandboxFiles(output);
+ const displayOutput = stripSandboxFileLines(output);
return {
exitCode,
output,
+ displayOutput,
truncated: raw.includes("[Output was truncated"),
isError: exitCode !== 0,
+ files,
};
}
if (raw.startsWith("Error:")) {
- return { exitCode: null, output: raw, truncated: false, isError: true };
+ return { exitCode: null, output: raw, displayOutput: raw, truncated: false, isError: true, files: [] };
}
- return { exitCode: null, output: raw, truncated: false, isError: false };
+ const files = extractSandboxFiles(raw);
+ const displayOutput = stripSandboxFileLines(raw);
+ return { exitCode: null, output: raw, displayOutput, truncated: false, isError: false, files };
}
function truncateCommand(command: string, maxLen = 80): string {
@@ -95,6 +140,30 @@ function truncateCommand(command: string, maxLen = 80): string {
return command.slice(0, maxLen) + "…";
}
+// ============================================================================
+// Download helper
+// ============================================================================
+
+async function downloadSandboxFile(threadId: string, filePath: string, fileName: string) {
+ const token = getBearerToken();
+ const url = `${BACKEND_URL}/api/v1/threads/${threadId}/sandbox/download?path=${encodeURIComponent(filePath)}`;
+ const res = await fetch(url, {
+ headers: { Authorization: `Bearer ${token || ""}` },
+ });
+ if (!res.ok) {
+ throw new Error(`Download failed: ${res.statusText}`);
+ }
+ const blob = await res.blob();
+ const blobUrl = URL.createObjectURL(blob);
+ const a = document.createElement("a");
+ a.href = blobUrl;
+ a.download = fileName;
+ document.body.appendChild(a);
+ a.click();
+ a.remove();
+ URL.revokeObjectURL(blobUrl);
+}
+
// ============================================================================
// Sub-Components
// ============================================================================
@@ -140,16 +209,58 @@ function ExecuteCancelledState({ command }: { command: string }) {
);
}
-function ExecuteResult({
+function SandboxFileDownload({ file, threadId }: { file: SandboxFile; threadId: string }) {
+ const [downloading, setDownloading] = useState(false);
+ const [error, setError] = useState(null);
+
+ const handleDownload = useCallback(async () => {
+ setDownloading(true);
+ setError(null);
+ try {
+ await downloadSandboxFile(threadId, file.path, file.name);
+ } catch (e) {
+ setError(e instanceof Error ? e.message : "Download failed");
+ } finally {
+ setDownloading(false);
+ }
+ }, [threadId, file.path, file.name]);
+
+ return (
+
+ );
+}
+
+function ExecuteCompleted({
command,
parsed,
+ threadId,
}: {
command: string;
parsed: ParsedOutput;
+ threadId: string | null;
}) {
const [open, setOpen] = useState(false);
const isLongCommand = command.length > 80 || command.includes("\n");
- const hasContent = parsed.output.trim().length > 0 || isLongCommand;
+ const hasTextContent = parsed.displayOutput.trim().length > 0 || isLongCommand;
+ const hasFiles = parsed.files.length > 0 && !!threadId;
+ const hasContent = hasTextContent || hasFiles;
const exitBadge = useMemo(() => {
if (parsed.exitCode === null) return null;
@@ -194,6 +305,12 @@ function ExecuteResult({
{truncateCommand(command)}
+ {hasFiles && !open && (
+
+
+ {parsed.files.length}
+
+ )}
{exitBadge}
@@ -214,15 +331,15 @@ function ExecuteResult({
)}
- {parsed.output.trim().length > 0 && (
+ {parsed.displayOutput.trim().length > 0 && (
- {isLongCommand && (
+ {(isLongCommand || hasFiles) && (
Output
)}
- {parsed.output}
+ {parsed.displayOutput}
)}
@@ -231,6 +348,22 @@ function ExecuteResult({
Output was truncated due to size limits
)}
+ {hasFiles && threadId && (
+
+
+ Files
+
+
+ {parsed.files.map((file) => (
+
+ ))}
+
+
+ )}
@@ -274,7 +407,8 @@ export const SandboxExecuteToolUI = makeAssistantToolUI
;
+ const threadId = result.thread_id || null;
+ return
;
},
});
From 70686a1eb2663191f793db35e7ec747fbdcfcbd2 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Wed, 25 Feb 2026 01:48:54 -0800
Subject: [PATCH 6/9] feat: enhance code execution system prompt
- Added detailed instructions for prioritizing code execution over text responses in specific scenarios, such as data visualization and file generation.
- Included examples to illustrate correct and incorrect approaches to handling user requests involving code execution.
- Emphasized the importance of printing file paths for user downloads and clarified the handling of sandbox-generated files.
---
.../app/agents/new_chat/system_prompt.py | 24 ++++++++++++++++++-
1 file changed, 23 insertions(+), 1 deletion(-)
diff --git a/surfsense_backend/app/agents/new_chat/system_prompt.py b/surfsense_backend/app/agents/new_chat/system_prompt.py
index be592b9eb..c55a9c756 100644
--- a/surfsense_backend/app/agents/new_chat/system_prompt.py
+++ b/surfsense_backend/app/agents/new_chat/system_prompt.py
@@ -652,6 +652,27 @@ SANDBOX_EXECUTION_INSTRUCTIONS = """
You have access to a secure, isolated Linux sandbox environment for running code and shell commands.
This gives you the `execute` tool alongside the standard filesystem tools (`ls`, `read_file`, `write_file`, `edit_file`, `glob`, `grep`).
+## CRITICAL — CODE-FIRST RULE
+
+ALWAYS prefer executing code over giving a text-only response when the user's request involves ANY of the following:
+- **Creating a chart, plot, graph, or visualization** → Write Python code and generate the actual file. NEVER describe percentages or data in text and offer to "paste into Excel". Just produce the chart.
+- **Data analysis, statistics, or computation** → Write code to compute the answer. Do not do math by hand in text.
+- **Generating or transforming files** (CSV, PDF, images, etc.) → Write code to create the file.
+- **Running, testing, or debugging code** → Execute it in the sandbox.
+
+This applies even when you first retrieve data from the knowledge base. After `search_knowledge_base` returns relevant data, **immediately proceed to write and execute code** if the user's request matches any of the categories above. Do NOT stop at a text summary and wait for the user to ask you to "use Python" — that extra round-trip is a poor experience.
+
+Example (CORRECT):
+ User: "Create a pie chart of my benefits"
+ → 1. search_knowledge_base → retrieve benefits data
+ → 2. Immediately execute Python code (matplotlib) to generate the pie chart
+ → 3. Return the downloadable file + brief description
+
+Example (WRONG):
+ User: "Create a pie chart of my benefits"
+ → 1. search_knowledge_base → retrieve benefits data
+ → 2. Print a text table with percentages and ask the user if they want a chart ← NEVER do this
+
## When to Use Code Execution
Use the sandbox when the task benefits from actually running code rather than just describing it:
@@ -690,9 +711,10 @@ Do not use the sandbox for:
When your code creates output files (images, CSVs, PDFs, etc.) in the sandbox:
- **Print the absolute path** at the end of your script so the user can download the file. Example: `print("SANDBOX_FILE: /tmp/chart.png")`
-- **DO NOT call `display_image`** for files created inside the sandbox. Sandbox files are not accessible via public URLs, so `display_image` will always show "Image not available".
+- **DO NOT call `display_image`** for files created inside the sandbox. Sandbox files are not accessible via public URLs, so `display_image` will always show "Image not available". The frontend automatically renders a download button from the `SANDBOX_FILE:` marker.
- You can output multiple files, one per line: `print("SANDBOX_FILE: /tmp/report.csv")`, `print("SANDBOX_FILE: /tmp/chart.png")`
- Always describe what the file contains in your response text so the user knows what they are downloading.
+- IMPORTANT: Every `execute` call that saves a file MUST print the `SANDBOX_FILE: ` marker. Without it the user cannot download the file.
## Data Analytics Best Practices
From d198c8b89d392aa659ddf1cebdf9c7e2984a0f72 Mon Sep 17 00:00:00 2001
From: "DESKTOP-RTLN3BA\\$punk"
Date: Wed, 25 Feb 2026 01:50:28 -0800
Subject: [PATCH 7/9] chore; linting
---
.../app/agents/new_chat/sandbox.py | 22 +++-
.../app/agents/new_chat/system_prompt.py | 14 +-
.../tools/google_drive/create_file.py | 24 +++-
.../new_chat/tools/google_drive/trash_file.py | 29 ++++-
.../app/agents/new_chat/tools/registry.py | 8 +-
.../app/routes/sandbox_routes.py | 2 +-
.../app/tasks/chat/stream_new_chat.py | 39 ++++--
.../dashboard/[search_space_id]/team/page.tsx | 2 +-
.../assistant-ui/tooltip-icon-button.tsx | 2 +-
.../components/settings/roles-manager.tsx | 6 +-
.../tool-ui/google-drive/create-file.tsx | 120 +++++++++---------
.../tool-ui/google-drive/trash-file.tsx | 4 +-
surfsense_web/components/tool-ui/index.ts | 14 +-
.../components/tool-ui/sandbox-execute.tsx | 66 +++++-----
.../components/ui/expanded-gif-overlay.tsx | 2 +-
surfsense_web/components/ui/hero-carousel.tsx | 41 +++---
16 files changed, 228 insertions(+), 167 deletions(-)
diff --git a/surfsense_backend/app/agents/new_chat/sandbox.py b/surfsense_backend/app/agents/new_chat/sandbox.py
index d2afd5df0..996414557 100644
--- a/surfsense_backend/app/agents/new_chat/sandbox.py
+++ b/surfsense_backend/app/agents/new_chat/sandbox.py
@@ -12,7 +12,12 @@ import asyncio
import logging
import os
-from daytona import CreateSandboxFromSnapshotParams, Daytona, DaytonaConfig, SandboxState
+from daytona import (
+ CreateSandboxFromSnapshotParams,
+ Daytona,
+ DaytonaConfig,
+ SandboxState,
+)
from deepagents.backends.protocol import ExecuteResponse
from langchain_daytona import DaytonaSandbox
@@ -38,9 +43,12 @@ class _TimeoutAwareSandbox(DaytonaSandbox):
truncated=False,
)
- async def aexecute(self, command: str, *, timeout: int | None = None) -> ExecuteResponse: # type: ignore[override]
+ async def aexecute(
+ self, command: str, *, timeout: int | None = None
+ ) -> ExecuteResponse: # type: ignore[override]
return await asyncio.to_thread(self.execute, command, timeout=timeout)
+
_daytona_client: Daytona | None = None
THREAD_LABEL_KEY = "surfsense_thread"
@@ -72,9 +80,7 @@ def _find_or_create(thread_id: str) -> _TimeoutAwareSandbox:
try:
sandbox = client.find_one(labels=labels)
- logger.info(
- "Found existing sandbox %s (state=%s)", sandbox.id, sandbox.state
- )
+ logger.info("Found existing sandbox %s (state=%s)", sandbox.id, sandbox.state)
if sandbox.state in (
SandboxState.STOPPED,
@@ -84,7 +90,11 @@ def _find_or_create(thread_id: str) -> _TimeoutAwareSandbox:
logger.info("Starting stopped sandbox %s …", sandbox.id)
sandbox.start(timeout=60)
logger.info("Sandbox %s is now started", sandbox.id)
- elif sandbox.state in (SandboxState.ERROR, SandboxState.BUILD_FAILED, SandboxState.DESTROYED):
+ elif sandbox.state in (
+ SandboxState.ERROR,
+ SandboxState.BUILD_FAILED,
+ SandboxState.DESTROYED,
+ ):
logger.warning(
"Sandbox %s in unrecoverable state %s — creating a new one",
sandbox.id,
diff --git a/surfsense_backend/app/agents/new_chat/system_prompt.py b/surfsense_backend/app/agents/new_chat/system_prompt.py
index c55a9c756..39ef7b70a 100644
--- a/surfsense_backend/app/agents/new_chat/system_prompt.py
+++ b/surfsense_backend/app/agents/new_chat/system_prompt.py
@@ -782,7 +782,12 @@ def build_surfsense_system_prompt(
tools_instructions = _get_tools_instructions(visibility)
citation_instructions = SURFSENSE_CITATION_INSTRUCTIONS
sandbox_instructions = SANDBOX_EXECUTION_INSTRUCTIONS if sandbox_enabled else ""
- return system_instructions + tools_instructions + citation_instructions + sandbox_instructions
+ return (
+ system_instructions
+ + tools_instructions
+ + citation_instructions
+ + sandbox_instructions
+ )
def build_configurable_system_prompt(
@@ -842,7 +847,12 @@ def build_configurable_system_prompt(
sandbox_instructions = SANDBOX_EXECUTION_INSTRUCTIONS if sandbox_enabled else ""
- return system_instructions + tools_instructions + citation_instructions + sandbox_instructions
+ return (
+ system_instructions
+ + tools_instructions
+ + citation_instructions
+ + sandbox_instructions
+ )
def get_default_system_instructions() -> str:
diff --git a/surfsense_backend/app/agents/new_chat/tools/google_drive/create_file.py b/surfsense_backend/app/agents/new_chat/tools/google_drive/create_file.py
index 0dd683f7e..af93ddc8f 100644
--- a/surfsense_backend/app/agents/new_chat/tools/google_drive/create_file.py
+++ b/surfsense_backend/app/agents/new_chat/tools/google_drive/create_file.py
@@ -58,7 +58,9 @@ def create_create_google_drive_file_tool(
- "Create a Google Doc called 'Meeting Notes'"
- "Create a spreadsheet named 'Budget 2026' with some sample data"
"""
- logger.info(f"create_google_drive_file called: name='{name}', type='{file_type}'")
+ logger.info(
+ f"create_google_drive_file called: name='{name}', type='{file_type}'"
+ )
if db_session is None or search_space_id is None or user_id is None:
return {
@@ -74,7 +76,9 @@ def create_create_google_drive_file_tool(
try:
metadata_service = GoogleDriveToolMetadataService(db_session)
- context = await metadata_service.get_creation_context(search_space_id, user_id)
+ context = await metadata_service.get_creation_context(
+ search_space_id, user_id
+ )
if "error" in context:
logger.error(f"Failed to fetch creation context: {context['error']}")
@@ -100,8 +104,12 @@ def create_create_google_drive_file_tool(
}
)
- decisions_raw = approval.get("decisions", []) if isinstance(approval, dict) else []
- decisions = decisions_raw if isinstance(decisions_raw, list) else [decisions_raw]
+ decisions_raw = (
+ approval.get("decisions", []) if isinstance(approval, dict) else []
+ )
+ decisions = (
+ decisions_raw if isinstance(decisions_raw, list) else [decisions_raw]
+ )
decisions = [d for d in decisions if isinstance(d, dict)]
if not decisions:
logger.warning("No approval decision received")
@@ -183,7 +191,9 @@ def create_create_google_drive_file_tool(
logger.info(
f"Creating Google Drive file: name='{final_name}', type='{final_file_type}', connector={actual_connector_id}"
)
- client = GoogleDriveClient(session=db_session, connector_id=actual_connector_id)
+ client = GoogleDriveClient(
+ session=db_session, connector_id=actual_connector_id
+ )
try:
created = await client.create_file(
name=final_name,
@@ -203,7 +213,9 @@ def create_create_google_drive_file_tool(
}
raise
- logger.info(f"Google Drive file created: id={created.get('id')}, name={created.get('name')}")
+ logger.info(
+ f"Google Drive file created: id={created.get('id')}, name={created.get('name')}"
+ )
return {
"status": "success",
"file_id": created.get("id"),
diff --git a/surfsense_backend/app/agents/new_chat/tools/google_drive/trash_file.py b/surfsense_backend/app/agents/new_chat/tools/google_drive/trash_file.py
index 600aae983..917ba3376 100644
--- a/surfsense_backend/app/agents/new_chat/tools/google_drive/trash_file.py
+++ b/surfsense_backend/app/agents/new_chat/tools/google_drive/trash_file.py
@@ -52,7 +52,9 @@ def create_delete_google_drive_file_tool(
- "Delete the 'Meeting Notes' file from Google Drive"
- "Trash the 'Old Budget' spreadsheet"
"""
- logger.info(f"delete_google_drive_file called: file_name='{file_name}', delete_from_kb={delete_from_kb}")
+ logger.info(
+ f"delete_google_drive_file called: file_name='{file_name}', delete_from_kb={delete_from_kb}"
+ )
if db_session is None or search_space_id is None or user_id is None:
return {
@@ -103,8 +105,12 @@ def create_delete_google_drive_file_tool(
}
)
- decisions_raw = approval.get("decisions", []) if isinstance(approval, dict) else []
- decisions = decisions_raw if isinstance(decisions_raw, list) else [decisions_raw]
+ decisions_raw = (
+ approval.get("decisions", []) if isinstance(approval, dict) else []
+ )
+ decisions = (
+ decisions_raw if isinstance(decisions_raw, list) else [decisions_raw]
+ )
decisions = [d for d in decisions if isinstance(d, dict)]
if not decisions:
logger.warning("No approval decision received")
@@ -130,11 +136,16 @@ def create_delete_google_drive_file_tool(
final_params = decision["args"]
final_file_id = final_params.get("file_id", file_id)
- final_connector_id = final_params.get("connector_id", connector_id_from_context)
+ final_connector_id = final_params.get(
+ "connector_id", connector_id_from_context
+ )
final_delete_from_kb = final_params.get("delete_from_kb", delete_from_kb)
if not final_connector_id:
- return {"status": "error", "message": "No connector found for this file."}
+ return {
+ "status": "error",
+ "message": "No connector found for this file.",
+ }
from sqlalchemy.future import select
@@ -174,7 +185,9 @@ def create_delete_google_drive_file_tool(
}
raise
- logger.info(f"Google Drive file deleted (moved to trash): file_id={final_file_id}")
+ logger.info(
+ f"Google Drive file deleted (moved to trash): file_id={final_file_id}"
+ )
trash_result: dict[str, Any] = {
"status": "success",
@@ -195,7 +208,9 @@ def create_delete_google_drive_file_tool(
await db_session.delete(document)
await db_session.commit()
deleted_from_kb = True
- logger.info(f"Deleted document {document_id} from knowledge base")
+ logger.info(
+ f"Deleted document {document_id} from knowledge base"
+ )
else:
logger.warning(f"Document {document_id} not found in KB")
except Exception as e:
diff --git a/surfsense_backend/app/agents/new_chat/tools/registry.py b/surfsense_backend/app/agents/new_chat/tools/registry.py
index 01342e920..dffed5e86 100644
--- a/surfsense_backend/app/agents/new_chat/tools/registry.py
+++ b/surfsense_backend/app/agents/new_chat/tools/registry.py
@@ -47,6 +47,10 @@ from app.db import ChatVisibility
from .display_image import create_display_image_tool
from .generate_image import create_generate_image_tool
+from .google_drive import (
+ create_create_google_drive_file_tool,
+ create_delete_google_drive_file_tool,
+)
from .knowledge_base import create_search_knowledge_base_tool
from .linear import (
create_create_linear_issue_tool,
@@ -55,10 +59,6 @@ from .linear import (
)
from .link_preview import create_link_preview_tool
from .mcp_tool import load_mcp_tools
-from .google_drive import (
- create_create_google_drive_file_tool,
- create_delete_google_drive_file_tool,
-)
from .notion import (
create_create_notion_page_tool,
create_delete_notion_page_tool,
diff --git a/surfsense_backend/app/routes/sandbox_routes.py b/surfsense_backend/app/routes/sandbox_routes.py
index af13e48fc..428eea09e 100644
--- a/surfsense_backend/app/routes/sandbox_routes.py
+++ b/surfsense_backend/app/routes/sandbox_routes.py
@@ -73,7 +73,7 @@ async def download_sandbox_file(
try:
sandbox = await get_or_create_sandbox(thread_id)
- raw_sandbox = sandbox._sandbox # noqa: SLF001
+ raw_sandbox = sandbox._sandbox
content: bytes = await asyncio.to_thread(raw_sandbox.fs.download_file, path)
except Exception as exc:
logger.warning("Sandbox file download failed for %s: %s", path, exc)
diff --git a/surfsense_backend/app/tasks/chat/stream_new_chat.py b/surfsense_backend/app/tasks/chat/stream_new_chat.py
index 327aa7977..ae04a6bee 100644
--- a/surfsense_backend/app/tasks/chat/stream_new_chat.py
+++ b/surfsense_backend/app/tasks/chat/stream_new_chat.py
@@ -10,14 +10,13 @@ Supports loading LLM configurations from:
"""
import json
+import logging
import re
from collections.abc import AsyncGenerator
from dataclasses import dataclass
from typing import Any
from uuid import UUID
-import logging
-
from langchain_core.messages import HumanMessage
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
@@ -31,7 +30,13 @@ from app.agents.new_chat.llm_config import (
load_agent_config,
load_llm_config_from_yaml,
)
-from app.db import ChatVisibility, Document, Report, SurfsenseDocsDocument, async_session_maker
+from app.db import (
+ ChatVisibility,
+ Document,
+ Report,
+ SurfsenseDocsDocument,
+ async_session_maker,
+)
from app.prompts import TITLE_GENERATION_PROMPT_TEMPLATE
from app.services.chat_session_state_service import (
clear_ai_responding,
@@ -645,9 +650,15 @@ async def _stream_agent_events(
m = re.match(r"^Exit code:\s*(\d+)", raw_text)
exit_code_val = int(m.group(1)) if m else None
if exit_code_val is not None and exit_code_val == 0:
- completed_items = [*last_active_step_items, "Completed successfully"]
+ completed_items = [
+ *last_active_step_items,
+ "Completed successfully",
+ ]
elif exit_code_val is not None:
- completed_items = [*last_active_step_items, f"Exit code: {exit_code_val}"]
+ completed_items = [
+ *last_active_step_items,
+ f"Exit code: {exit_code_val}",
+ ]
else:
completed_items = [*last_active_step_items, "Finished"]
yield streaming_service.format_thinking_step(
@@ -1037,13 +1048,18 @@ async def stream_new_chat(
# Optionally provision a sandboxed code execution environment
sandbox_backend = None
- from app.agents.new_chat.sandbox import is_sandbox_enabled, get_or_create_sandbox
+ from app.agents.new_chat.sandbox import (
+ get_or_create_sandbox,
+ is_sandbox_enabled,
+ )
+
if is_sandbox_enabled():
try:
sandbox_backend = await get_or_create_sandbox(chat_id)
except Exception as sandbox_err:
logging.getLogger(__name__).warning(
- "Sandbox creation failed, continuing without execute tool: %s", sandbox_err
+ "Sandbox creation failed, continuing without execute tool: %s",
+ sandbox_err,
)
visibility = thread_visibility or ChatVisibility.PRIVATE
@@ -1426,13 +1442,18 @@ async def stream_resume_chat(
checkpointer = await get_checkpointer()
sandbox_backend = None
- from app.agents.new_chat.sandbox import is_sandbox_enabled, get_or_create_sandbox
+ from app.agents.new_chat.sandbox import (
+ get_or_create_sandbox,
+ is_sandbox_enabled,
+ )
+
if is_sandbox_enabled():
try:
sandbox_backend = await get_or_create_sandbox(chat_id)
except Exception as sandbox_err:
logging.getLogger(__name__).warning(
- "Sandbox creation failed, continuing without execute tool: %s", sandbox_err
+ "Sandbox creation failed, continuing without execute tool: %s",
+ sandbox_err,
)
visibility = thread_visibility or ChatVisibility.PRIVATE
diff --git a/surfsense_web/app/dashboard/[search_space_id]/team/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/team/page.tsx
index f8a9cecd0..c33c2e341 100644
--- a/surfsense_web/app/dashboard/[search_space_id]/team/page.tsx
+++ b/surfsense_web/app/dashboard/[search_space_id]/team/page.tsx
@@ -16,8 +16,8 @@ import {
Link2,
ShieldUser,
Trash2,
- UserPlus,
User,
+ UserPlus,
Users,
} from "lucide-react";
import { motion } from "motion/react";
diff --git a/surfsense_web/components/assistant-ui/tooltip-icon-button.tsx b/surfsense_web/components/assistant-ui/tooltip-icon-button.tsx
index 55f7c6a2e..0ca96e912 100644
--- a/surfsense_web/components/assistant-ui/tooltip-icon-button.tsx
+++ b/surfsense_web/components/assistant-ui/tooltip-icon-button.tsx
@@ -1,7 +1,7 @@
"use client";
import { Slottable } from "@radix-ui/react-slot";
-import { type ComponentPropsWithRef, type ReactNode, forwardRef } from "react";
+import { type ComponentPropsWithRef, forwardRef, type ReactNode } from "react";
import { Button } from "@/components/ui/button";
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
import { cn } from "@/lib/utils";
diff --git a/surfsense_web/components/settings/roles-manager.tsx b/surfsense_web/components/settings/roles-manager.tsx
index 96ab2551f..820fd11e5 100644
--- a/surfsense_web/components/settings/roles-manager.tsx
+++ b/surfsense_web/components/settings/roles-manager.tsx
@@ -7,6 +7,7 @@ import {
Edit2,
FileText,
Globe,
+ Logs,
type LucideIcon,
MessageCircle,
MessageSquare,
@@ -14,7 +15,6 @@ import {
MoreHorizontal,
Plug,
Plus,
- Logs,
Settings,
Shield,
Trash2,
@@ -23,13 +23,13 @@ import {
import { motion } from "motion/react";
import { useCallback, useEffect, useMemo, useState } from "react";
import { toast } from "sonner";
+import { myAccessAtom } from "@/atoms/members/members-query.atoms";
+import { permissionsAtom } from "@/atoms/permissions/permissions-query.atoms";
import {
createRoleMutationAtom,
deleteRoleMutationAtom,
updateRoleMutationAtom,
} from "@/atoms/roles/roles-mutation.atoms";
-import { permissionsAtom } from "@/atoms/permissions/permissions-query.atoms";
-import { myAccessAtom } from "@/atoms/members/members-query.atoms";
import {
AlertDialog,
AlertDialogAction,
diff --git a/surfsense_web/components/tool-ui/google-drive/create-file.tsx b/surfsense_web/components/tool-ui/google-drive/create-file.tsx
index d6f08653d..f2cc97dcf 100644
--- a/surfsense_web/components/tool-ui/google-drive/create-file.tsx
+++ b/surfsense_web/components/tool-ui/google-drive/create-file.tsx
@@ -253,29 +253,31 @@ function ApprovalCard({
)}
- {/* Display mode */}
- {!isEditing && (
-
-
-
Name
-
{committedArgs?.name ?? args.name}
-
-
-
Type
-
- {FILE_TYPE_LABELS[committedArgs?.file_type ?? args.file_type] ?? committedArgs?.file_type ?? args.file_type}
-
-
- {(committedArgs?.content ?? args.content) && (
+ {/* Display mode */}
+ {!isEditing && (
+
-
Content
-
- {committedArgs?.content ?? args.content}
+
Name
+
{committedArgs?.name ?? args.name}
+
+
+
Type
+
+ {FILE_TYPE_LABELS[committedArgs?.file_type ?? args.file_type] ??
+ committedArgs?.file_type ??
+ args.file_type}
- )}
-
- )}
+ {(committedArgs?.content ?? args.content) && (
+
+
Content
+
+ {committedArgs?.content ?? args.content}
+
+
+ )}
+
+ )}
{/* Edit mode */}
{isEditing && !decided && (
@@ -341,26 +343,26 @@ function ApprovalCard({
) : isEditing ? (
<>
-
+