mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-04-25 00:36:31 +02:00
feat: added file handling for daytona sandboxes
- Added _TimeoutAwareSandbox class to handle per-command timeouts in DaytonaSandbox. - Updated _find_or_create function to manage sandbox states and restart stopped/archived sandboxes. - Enhanced get_or_create_sandbox to return the new sandbox class. - Introduced file download functionality in the frontend, allowing users to download generated files from the sandbox. - Updated system prompt to include guidelines for sharing generated files.
This commit is contained in:
parent
a6563f396a
commit
d570cae3c6
6 changed files with 307 additions and 17 deletions
|
|
@ -12,11 +12,35 @@ import asyncio
|
|||
import logging
|
||||
import os
|
||||
|
||||
from daytona import CreateSandboxFromSnapshotParams, Daytona, DaytonaConfig
|
||||
from daytona import CreateSandboxFromSnapshotParams, Daytona, DaytonaConfig, SandboxState
|
||||
from deepagents.backends.protocol import ExecuteResponse
|
||||
from langchain_daytona import DaytonaSandbox
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _TimeoutAwareSandbox(DaytonaSandbox):
|
||||
"""DaytonaSandbox subclass that accepts the per-command *timeout*
|
||||
kwarg required by the deepagents middleware.
|
||||
|
||||
The upstream ``langchain-daytona`` ``execute()`` ignores timeout,
|
||||
so deepagents raises *"This sandbox backend does not support
|
||||
per-command timeout overrides"* on every first call. This thin
|
||||
wrapper forwards the parameter to the Daytona SDK.
|
||||
"""
|
||||
|
||||
def execute(self, command: str, *, timeout: int | None = None) -> ExecuteResponse:
|
||||
t = timeout if timeout is not None else self._timeout
|
||||
result = self._sandbox.process.exec(command, timeout=t)
|
||||
return ExecuteResponse(
|
||||
output=result.result,
|
||||
exit_code=result.exit_code,
|
||||
truncated=False,
|
||||
)
|
||||
|
||||
async def aexecute(self, command: str, *, timeout: int | None = None) -> ExecuteResponse: # type: ignore[override]
|
||||
return await asyncio.to_thread(self.execute, command, timeout=timeout)
|
||||
|
||||
_daytona_client: Daytona | None = None
|
||||
THREAD_LABEL_KEY = "surfsense_thread"
|
||||
|
||||
|
|
@ -37,24 +61,53 @@ def _get_client() -> Daytona:
|
|||
return _daytona_client
|
||||
|
||||
|
||||
def _find_or_create(thread_id: str) -> DaytonaSandbox:
|
||||
"""Find an existing sandbox for *thread_id*, or create a new one."""
|
||||
def _find_or_create(thread_id: str) -> _TimeoutAwareSandbox:
|
||||
"""Find an existing sandbox for *thread_id*, or create a new one.
|
||||
|
||||
If an existing sandbox is found but is stopped/archived, it will be
|
||||
restarted automatically before returning.
|
||||
"""
|
||||
client = _get_client()
|
||||
labels = {THREAD_LABEL_KEY: thread_id}
|
||||
|
||||
try:
|
||||
sandbox = client.find_one(labels=labels)
|
||||
logger.info("Reusing existing sandbox: %s", sandbox.id)
|
||||
logger.info(
|
||||
"Found existing sandbox %s (state=%s)", sandbox.id, sandbox.state
|
||||
)
|
||||
|
||||
if sandbox.state in (
|
||||
SandboxState.STOPPED,
|
||||
SandboxState.STOPPING,
|
||||
SandboxState.ARCHIVED,
|
||||
):
|
||||
logger.info("Starting stopped sandbox %s …", sandbox.id)
|
||||
sandbox.start(timeout=60)
|
||||
logger.info("Sandbox %s is now started", sandbox.id)
|
||||
elif sandbox.state in (SandboxState.ERROR, SandboxState.BUILD_FAILED, SandboxState.DESTROYED):
|
||||
logger.warning(
|
||||
"Sandbox %s in unrecoverable state %s — creating a new one",
|
||||
sandbox.id,
|
||||
sandbox.state,
|
||||
)
|
||||
sandbox = client.create(
|
||||
CreateSandboxFromSnapshotParams(language="python", labels=labels)
|
||||
)
|
||||
logger.info("Created replacement sandbox: %s", sandbox.id)
|
||||
elif sandbox.state != SandboxState.STARTED:
|
||||
sandbox.wait_for_sandbox_start(timeout=60)
|
||||
|
||||
except Exception:
|
||||
logger.info("No existing sandbox for thread %s — creating one", thread_id)
|
||||
sandbox = client.create(
|
||||
CreateSandboxFromSnapshotParams(language="python", labels=labels)
|
||||
)
|
||||
logger.info("Created new sandbox: %s", sandbox.id)
|
||||
|
||||
return DaytonaSandbox(sandbox=sandbox)
|
||||
return _TimeoutAwareSandbox(sandbox=sandbox)
|
||||
|
||||
|
||||
async def get_or_create_sandbox(thread_id: int | str) -> DaytonaSandbox:
|
||||
async def get_or_create_sandbox(thread_id: int | str) -> _TimeoutAwareSandbox:
|
||||
"""Get or create a sandbox for a conversation thread.
|
||||
|
||||
Uses the thread_id as a label so the same sandbox persists
|
||||
|
|
|
|||
|
|
@ -679,13 +679,21 @@ Do not use the sandbox for:
|
|||
|
||||
## Working Guidelines
|
||||
|
||||
- **Working directory**: Use `/home` or `/tmp` for all work
|
||||
- **Working directory**: The shell starts in the sandbox user's home directory (e.g. `/home/daytona`). Use **relative paths** or `/tmp/` for all files you create. NEVER write directly to `/home/` — that is the parent directory and is not writable. Use `pwd` if you need to discover the current working directory.
|
||||
- **Iterative approach**: For complex tasks, break work into steps — write code, run it, check output, refine
|
||||
- **Error handling**: If code fails, read the error, fix the issue, and retry. Don't just report the error without attempting a fix.
|
||||
- **Show results**: When generating plots or outputs, present the key findings directly in your response. For plots, save to a file and describe the results.
|
||||
- **Be efficient**: Install packages once per session. Combine related commands when possible.
|
||||
- **Large outputs**: If command output is very large, use `head`, `tail`, or save to a file and read selectively.
|
||||
|
||||
## Sharing Generated Files
|
||||
|
||||
When your code creates output files (images, CSVs, PDFs, etc.) in the sandbox:
|
||||
- **Print the absolute path** at the end of your script so the user can download the file. Example: `print("SANDBOX_FILE: /tmp/chart.png")`
|
||||
- **DO NOT call `display_image`** for files created inside the sandbox. Sandbox files are not accessible via public URLs, so `display_image` will always show "Image not available".
|
||||
- You can output multiple files, one per line: `print("SANDBOX_FILE: /tmp/report.csv")`, `print("SANDBOX_FILE: /tmp/chart.png")`
|
||||
- Always describe what the file contains in your response text so the user knows what they are downloading.
|
||||
|
||||
## Data Analytics Best Practices
|
||||
|
||||
When the user asks you to analyze data:
|
||||
|
|
|
|||
|
|
@ -36,6 +36,7 @@ from .podcasts_routes import router as podcasts_router
|
|||
from .public_chat_routes import router as public_chat_router
|
||||
from .rbac_routes import router as rbac_router
|
||||
from .reports_routes import router as reports_router
|
||||
from .sandbox_routes import router as sandbox_router
|
||||
from .search_source_connectors_routes import router as search_source_connectors_router
|
||||
from .search_spaces_routes import router as search_spaces_router
|
||||
from .slack_add_connector_route import router as slack_add_connector_router
|
||||
|
|
@ -50,6 +51,7 @@ router.include_router(editor_router)
|
|||
router.include_router(documents_router)
|
||||
router.include_router(notes_router)
|
||||
router.include_router(new_chat_router) # Chat with assistant-ui persistence
|
||||
router.include_router(sandbox_router) # Sandbox file downloads (Daytona)
|
||||
router.include_router(chat_comments_router)
|
||||
router.include_router(podcasts_router) # Podcast task status and audio
|
||||
router.include_router(reports_router) # Report CRUD and export (PDF/DOCX)
|
||||
|
|
|
|||
91
surfsense_backend/app/routes/sandbox_routes.py
Normal file
91
surfsense_backend/app/routes/sandbox_routes.py
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
"""Routes for downloading files from Daytona sandbox environments."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from fastapi.responses import Response
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.future import select
|
||||
|
||||
from app.db import NewChatThread, Permission, User, get_async_session
|
||||
from app.users import current_active_user
|
||||
from app.utils.rbac import check_permission
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
MIME_TYPES: dict[str, str] = {
|
||||
".png": "image/png",
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
".gif": "image/gif",
|
||||
".webp": "image/webp",
|
||||
".svg": "image/svg+xml",
|
||||
".pdf": "application/pdf",
|
||||
".csv": "text/csv",
|
||||
".json": "application/json",
|
||||
".txt": "text/plain",
|
||||
".html": "text/html",
|
||||
".md": "text/markdown",
|
||||
".py": "text/x-python",
|
||||
".xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
".zip": "application/zip",
|
||||
}
|
||||
|
||||
|
||||
def _guess_media_type(filename: str) -> str:
|
||||
ext = ("." + filename.rsplit(".", 1)[-1].lower()) if "." in filename else ""
|
||||
return MIME_TYPES.get(ext, "application/octet-stream")
|
||||
|
||||
|
||||
@router.get("/threads/{thread_id}/sandbox/download")
|
||||
async def download_sandbox_file(
|
||||
thread_id: int,
|
||||
path: str = Query(..., description="Absolute path of the file inside the sandbox"),
|
||||
session: AsyncSession = Depends(get_async_session),
|
||||
user: User = Depends(current_active_user),
|
||||
):
|
||||
"""Download a file from the Daytona sandbox associated with a chat thread."""
|
||||
|
||||
from app.agents.new_chat.sandbox import get_or_create_sandbox, is_sandbox_enabled
|
||||
|
||||
if not is_sandbox_enabled():
|
||||
raise HTTPException(status_code=404, detail="Sandbox is not enabled")
|
||||
|
||||
result = await session.execute(
|
||||
select(NewChatThread).filter(NewChatThread.id == thread_id)
|
||||
)
|
||||
thread = result.scalars().first()
|
||||
if not thread:
|
||||
raise HTTPException(status_code=404, detail="Thread not found")
|
||||
|
||||
await check_permission(
|
||||
session,
|
||||
user,
|
||||
thread.search_space_id,
|
||||
Permission.CHATS_READ.value,
|
||||
"You don't have permission to access files in this thread",
|
||||
)
|
||||
|
||||
try:
|
||||
sandbox = await get_or_create_sandbox(thread_id)
|
||||
raw_sandbox = sandbox._sandbox # noqa: SLF001
|
||||
content: bytes = await asyncio.to_thread(raw_sandbox.fs.download_file, path)
|
||||
except Exception as exc:
|
||||
logger.warning("Sandbox file download failed for %s: %s", path, exc)
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Could not download file: {exc}"
|
||||
) from exc
|
||||
|
||||
filename = path.rsplit("/", 1)[-1] if "/" in path else path
|
||||
media_type = _guess_media_type(filename)
|
||||
|
||||
return Response(
|
||||
content=content,
|
||||
media_type=media_type,
|
||||
headers={"Content-Disposition": f'attachment; filename="{filename}"'},
|
||||
)
|
||||
|
|
@ -862,11 +862,13 @@ async def _stream_agent_events(
|
|||
exit_code = int(m.group(1))
|
||||
om = re.search(r"\nOutput:\n([\s\S]*)", raw_text)
|
||||
output_text = om.group(1) if om else ""
|
||||
thread_id_str = config.get("configurable", {}).get("thread_id", "")
|
||||
yield streaming_service.format_tool_output_available(
|
||||
tool_call_id,
|
||||
{
|
||||
"exit_code": exit_code,
|
||||
"output": output_text,
|
||||
"thread_id": thread_id_str,
|
||||
},
|
||||
)
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -5,19 +5,24 @@ import {
|
|||
AlertCircleIcon,
|
||||
CheckCircle2Icon,
|
||||
ChevronRightIcon,
|
||||
DownloadIcon,
|
||||
FileIcon,
|
||||
Loader2Icon,
|
||||
TerminalIcon,
|
||||
XCircleIcon,
|
||||
} from "lucide-react";
|
||||
import { useMemo, useState } from "react";
|
||||
import { useCallback, useMemo, useState } from "react";
|
||||
import { z } from "zod";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
Collapsible,
|
||||
CollapsibleContent,
|
||||
CollapsibleTrigger,
|
||||
} from "@/components/ui/collapsible";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { getBearerToken } from "@/lib/auth-utils";
|
||||
import { BACKEND_URL } from "@/lib/env-config";
|
||||
|
||||
// ============================================================================
|
||||
// Zod Schemas
|
||||
|
|
@ -34,6 +39,7 @@ const ExecuteResultSchema = z.object({
|
|||
output: z.string().nullish(),
|
||||
error: z.string().nullish(),
|
||||
status: z.string().nullish(),
|
||||
thread_id: z.string().nullish(),
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
|
|
@ -43,30 +49,63 @@ const ExecuteResultSchema = z.object({
|
|||
type ExecuteArgs = z.infer<typeof ExecuteArgsSchema>;
|
||||
type ExecuteResult = z.infer<typeof ExecuteResultSchema>;
|
||||
|
||||
interface SandboxFile {
|
||||
path: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
interface ParsedOutput {
|
||||
exitCode: number | null;
|
||||
output: string;
|
||||
displayOutput: string;
|
||||
truncated: boolean;
|
||||
isError: boolean;
|
||||
files: SandboxFile[];
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Helpers
|
||||
// ============================================================================
|
||||
|
||||
const SANDBOX_FILE_RE = /^SANDBOX_FILE:\s*(.+)$/gm;
|
||||
|
||||
function extractSandboxFiles(text: string): SandboxFile[] {
|
||||
const files: SandboxFile[] = [];
|
||||
let match: RegExpExecArray | null;
|
||||
while ((match = SANDBOX_FILE_RE.exec(text)) !== null) {
|
||||
const filePath = match[1].trim();
|
||||
if (filePath) {
|
||||
const name = filePath.includes("/")
|
||||
? filePath.split("/").pop() || filePath
|
||||
: filePath;
|
||||
files.push({ path: filePath, name });
|
||||
}
|
||||
}
|
||||
SANDBOX_FILE_RE.lastIndex = 0;
|
||||
return files;
|
||||
}
|
||||
|
||||
function stripSandboxFileLines(text: string): string {
|
||||
return text.replace(/^SANDBOX_FILE:\s*.+$/gm, "").replace(/\n{3,}/g, "\n\n").trim();
|
||||
}
|
||||
|
||||
function parseExecuteResult(result: ExecuteResult): ParsedOutput {
|
||||
const raw = result.result || result.output || "";
|
||||
|
||||
if (result.error) {
|
||||
return { exitCode: null, output: result.error, truncated: false, isError: true };
|
||||
return { exitCode: null, output: result.error, displayOutput: result.error, truncated: false, isError: true, files: [] };
|
||||
}
|
||||
|
||||
if (result.exit_code !== undefined && result.exit_code !== null) {
|
||||
const files = extractSandboxFiles(raw);
|
||||
const displayOutput = stripSandboxFileLines(raw);
|
||||
return {
|
||||
exitCode: result.exit_code,
|
||||
output: raw,
|
||||
displayOutput,
|
||||
truncated: raw.includes("[Output was truncated"),
|
||||
isError: result.exit_code !== 0,
|
||||
files,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
@ -75,19 +114,25 @@ function parseExecuteResult(result: ExecuteResult): ParsedOutput {
|
|||
const exitCode = parseInt(exitMatch[1], 10);
|
||||
const outputMatch = raw.match(/\nOutput:\n([\s\S]*)/);
|
||||
const output = outputMatch ? outputMatch[1] : "";
|
||||
const files = extractSandboxFiles(output);
|
||||
const displayOutput = stripSandboxFileLines(output);
|
||||
return {
|
||||
exitCode,
|
||||
output,
|
||||
displayOutput,
|
||||
truncated: raw.includes("[Output was truncated"),
|
||||
isError: exitCode !== 0,
|
||||
files,
|
||||
};
|
||||
}
|
||||
|
||||
if (raw.startsWith("Error:")) {
|
||||
return { exitCode: null, output: raw, truncated: false, isError: true };
|
||||
return { exitCode: null, output: raw, displayOutput: raw, truncated: false, isError: true, files: [] };
|
||||
}
|
||||
|
||||
return { exitCode: null, output: raw, truncated: false, isError: false };
|
||||
const files = extractSandboxFiles(raw);
|
||||
const displayOutput = stripSandboxFileLines(raw);
|
||||
return { exitCode: null, output: raw, displayOutput, truncated: false, isError: false, files };
|
||||
}
|
||||
|
||||
function truncateCommand(command: string, maxLen = 80): string {
|
||||
|
|
@ -95,6 +140,30 @@ function truncateCommand(command: string, maxLen = 80): string {
|
|||
return command.slice(0, maxLen) + "…";
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Download helper
|
||||
// ============================================================================
|
||||
|
||||
async function downloadSandboxFile(threadId: string, filePath: string, fileName: string) {
|
||||
const token = getBearerToken();
|
||||
const url = `${BACKEND_URL}/api/v1/threads/${threadId}/sandbox/download?path=${encodeURIComponent(filePath)}`;
|
||||
const res = await fetch(url, {
|
||||
headers: { Authorization: `Bearer ${token || ""}` },
|
||||
});
|
||||
if (!res.ok) {
|
||||
throw new Error(`Download failed: ${res.statusText}`);
|
||||
}
|
||||
const blob = await res.blob();
|
||||
const blobUrl = URL.createObjectURL(blob);
|
||||
const a = document.createElement("a");
|
||||
a.href = blobUrl;
|
||||
a.download = fileName;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
a.remove();
|
||||
URL.revokeObjectURL(blobUrl);
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Sub-Components
|
||||
// ============================================================================
|
||||
|
|
@ -140,16 +209,58 @@ function ExecuteCancelledState({ command }: { command: string }) {
|
|||
);
|
||||
}
|
||||
|
||||
function ExecuteResult({
|
||||
function SandboxFileDownload({ file, threadId }: { file: SandboxFile; threadId: string }) {
|
||||
const [downloading, setDownloading] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const handleDownload = useCallback(async () => {
|
||||
setDownloading(true);
|
||||
setError(null);
|
||||
try {
|
||||
await downloadSandboxFile(threadId, file.path, file.name);
|
||||
} catch (e) {
|
||||
setError(e instanceof Error ? e.message : "Download failed");
|
||||
} finally {
|
||||
setDownloading(false);
|
||||
}
|
||||
}, [threadId, file.path, file.name]);
|
||||
|
||||
return (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-8 gap-2 rounded-lg bg-zinc-800/60 hover:bg-zinc-700/60 text-zinc-200 text-xs font-mono px-3"
|
||||
onClick={handleDownload}
|
||||
disabled={downloading}
|
||||
>
|
||||
{downloading ? (
|
||||
<Loader2Icon className="size-3.5 animate-spin" />
|
||||
) : (
|
||||
<DownloadIcon className="size-3.5" />
|
||||
)}
|
||||
<FileIcon className="size-3 text-zinc-400" />
|
||||
<span className="truncate max-w-[200px]">{file.name}</span>
|
||||
{error && (
|
||||
<span className="text-destructive text-[10px] ml-1">{error}</span>
|
||||
)}
|
||||
</Button>
|
||||
);
|
||||
}
|
||||
|
||||
function ExecuteCompleted({
|
||||
command,
|
||||
parsed,
|
||||
threadId,
|
||||
}: {
|
||||
command: string;
|
||||
parsed: ParsedOutput;
|
||||
threadId: string | null;
|
||||
}) {
|
||||
const [open, setOpen] = useState(false);
|
||||
const isLongCommand = command.length > 80 || command.includes("\n");
|
||||
const hasContent = parsed.output.trim().length > 0 || isLongCommand;
|
||||
const hasTextContent = parsed.displayOutput.trim().length > 0 || isLongCommand;
|
||||
const hasFiles = parsed.files.length > 0 && !!threadId;
|
||||
const hasContent = hasTextContent || hasFiles;
|
||||
|
||||
const exitBadge = useMemo(() => {
|
||||
if (parsed.exitCode === null) return null;
|
||||
|
|
@ -194,6 +305,12 @@ function ExecuteResult({
|
|||
<code className="min-w-0 flex-1 truncate text-sm font-mono">
|
||||
{truncateCommand(command)}
|
||||
</code>
|
||||
{hasFiles && !open && (
|
||||
<Badge variant="outline" className="gap-1 text-[10px] px-1.5 py-0 border-blue-500/30 text-blue-500">
|
||||
<FileIcon className="size-2.5" />
|
||||
{parsed.files.length}
|
||||
</Badge>
|
||||
)}
|
||||
{exitBadge}
|
||||
</CollapsibleTrigger>
|
||||
|
||||
|
|
@ -214,15 +331,15 @@ function ExecuteResult({
|
|||
</pre>
|
||||
</div>
|
||||
)}
|
||||
{parsed.output.trim().length > 0 && (
|
||||
{parsed.displayOutput.trim().length > 0 && (
|
||||
<div>
|
||||
{isLongCommand && (
|
||||
{(isLongCommand || hasFiles) && (
|
||||
<p className="mb-1.5 text-[10px] font-medium uppercase tracking-wider text-zinc-500">
|
||||
Output
|
||||
</p>
|
||||
)}
|
||||
<pre className="max-h-80 overflow-auto whitespace-pre-wrap break-all text-xs font-mono text-zinc-300 leading-relaxed">
|
||||
{parsed.output}
|
||||
{parsed.displayOutput}
|
||||
</pre>
|
||||
</div>
|
||||
)}
|
||||
|
|
@ -231,6 +348,22 @@ function ExecuteResult({
|
|||
Output was truncated due to size limits
|
||||
</p>
|
||||
)}
|
||||
{hasFiles && threadId && (
|
||||
<div>
|
||||
<p className="mb-1.5 text-[10px] font-medium uppercase tracking-wider text-zinc-500">
|
||||
Files
|
||||
</p>
|
||||
<div className="flex flex-wrap gap-2">
|
||||
{parsed.files.map((file) => (
|
||||
<SandboxFileDownload
|
||||
key={file.path}
|
||||
file={file}
|
||||
threadId={threadId}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</CollapsibleContent>
|
||||
</Collapsible>
|
||||
|
|
@ -274,7 +407,8 @@ export const SandboxExecuteToolUI = makeAssistantToolUI<ExecuteArgs, ExecuteResu
|
|||
}
|
||||
|
||||
const parsed = parseExecuteResult(result);
|
||||
return <ExecuteResult command={command} parsed={parsed} />;
|
||||
const threadId = result.thread_id || null;
|
||||
return <ExecuteCompleted command={command} parsed={parsed} threadId={threadId} />;
|
||||
},
|
||||
});
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue