mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-04-29 10:56:24 +02:00
Minor fixes and renaming to new chat ui components
This commit is contained in:
parent
5197269c43
commit
246e3d61ed
8 changed files with 616 additions and 473 deletions
|
|
@ -2,6 +2,13 @@
|
|||
|
||||
import { SuggestedQuestions } from "@llamaindex/chat-ui/widgets";
|
||||
import { getAnnotationData, Message, useChatUI } from "@llamaindex/chat-ui";
|
||||
import {
|
||||
Accordion,
|
||||
AccordionContent,
|
||||
AccordionItem,
|
||||
AccordionTrigger,
|
||||
} from "@/components/ui/accordion";
|
||||
import { Card } from "../ui/card";
|
||||
|
||||
export const ChatFurtherQuestions: React.FC<{message: Message}> = ({message}) => {
|
||||
const annotations: string[][] = getAnnotationData(message, "FURTHER_QUESTIONS");
|
||||
|
|
@ -14,5 +21,16 @@ export const ChatFurtherQuestions: React.FC<{message: Message}> = ({message}) =>
|
|||
return <></>;
|
||||
}
|
||||
|
||||
return <SuggestedQuestions questions={annotations[0]} append={append} requestData={requestData} />;
|
||||
return (
|
||||
<Accordion type="single" collapsible className="w-full px-2 border-2 rounded-lg shadow-lg">
|
||||
<AccordionItem value="suggested-questions">
|
||||
<AccordionTrigger className="text-sm font-semibold">
|
||||
Suggested Questions
|
||||
</AccordionTrigger>
|
||||
<AccordionContent>
|
||||
<SuggestedQuestions questions={annotations[0]} append={append} requestData={requestData} />
|
||||
</AccordionContent>
|
||||
</AccordionItem>
|
||||
</Accordion>
|
||||
);
|
||||
};
|
||||
|
|
@ -601,7 +601,7 @@ const CustomChatInputOptions = React.memo(
|
|||
|
||||
CustomChatInputOptions.displayName = "CustomChatInputOptions";
|
||||
|
||||
export const CustomChatInput = React.memo(
|
||||
export const ChatInputUI = React.memo(
|
||||
({
|
||||
onDocumentSelectionChange,
|
||||
selectedDocuments,
|
||||
|
|
@ -642,4 +642,4 @@ export const CustomChatInput = React.memo(
|
|||
}
|
||||
);
|
||||
|
||||
CustomChatInput.displayName = "CustomChatInput";
|
||||
ChatInputUI.displayName = "ChatInputUI";
|
||||
|
|
|
|||
|
|
@ -2,21 +2,13 @@
|
|||
|
||||
import React from "react";
|
||||
import {
|
||||
ChatSection,
|
||||
ChatSection as LlamaIndexChatSection,
|
||||
ChatHandler,
|
||||
ChatCanvas,
|
||||
ChatMessages,
|
||||
useChatUI,
|
||||
ChatMessage,
|
||||
Message,
|
||||
} from "@llamaindex/chat-ui";
|
||||
import { Document } from "@/hooks/use-documents";
|
||||
import { CustomChatInput } from "@/components/chat_v2/ChatInputGroup";
|
||||
import { ChatInputUI } from "@/components/chat_v2/ChatInputGroup";
|
||||
import { ResearchMode } from "@/components/chat";
|
||||
import TerminalDisplay from "@/components/chat_v2/ChatTerminal";
|
||||
import ChatSourcesDisplay from "@/components/chat_v2/ChatSources";
|
||||
import { CitationDisplay } from "@/components/chat_v2/ChatCitation";
|
||||
import { ChatFurtherQuestions } from "@/components/chat_v2/ChatFurtherQuestions";
|
||||
import { ChatMessagesUI } from "@/components/chat_v2/ChatMessages";
|
||||
|
||||
interface ChatInterfaceProps {
|
||||
handler: ChatHandler;
|
||||
|
|
@ -30,60 +22,6 @@ interface ChatInterfaceProps {
|
|||
onResearchModeChange?: (mode: ResearchMode) => void;
|
||||
}
|
||||
|
||||
|
||||
function ChatMessageDisplay({
|
||||
message,
|
||||
isLast,
|
||||
}: {
|
||||
message: Message;
|
||||
isLast: boolean;
|
||||
}) {
|
||||
return (
|
||||
<ChatMessage
|
||||
message={message}
|
||||
isLast={isLast}
|
||||
className="flex flex-col "
|
||||
>
|
||||
{message.role === "assistant" ? (
|
||||
<div className="flex-1 flex flex-col space-y-4">
|
||||
<TerminalDisplay message={message} open={isLast} />
|
||||
<ChatSourcesDisplay message={message} />
|
||||
<ChatMessage.Content className="flex-1">
|
||||
<ChatMessage.Content.Markdown citationComponent={CitationDisplay} />
|
||||
</ChatMessage.Content>
|
||||
<div className="flex flex-row justify-end gap-2">
|
||||
{isLast && <ChatFurtherQuestions message={message} />}
|
||||
<ChatMessage.Actions className="flex-1 flex-row" />
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<ChatMessage.Content className="flex-1">
|
||||
<ChatMessage.Content.Markdown />
|
||||
</ChatMessage.Content>
|
||||
)}
|
||||
</ChatMessage>
|
||||
);
|
||||
}
|
||||
|
||||
function ChatMessagesDisplay() {
|
||||
const { messages } = useChatUI();
|
||||
|
||||
return (
|
||||
<ChatMessages className="flex-1">
|
||||
<ChatMessages.List className="p-4">
|
||||
{messages.map((message, index) => (
|
||||
<ChatMessageDisplay
|
||||
key={`Message-${index}`}
|
||||
message={message}
|
||||
isLast={index === messages.length - 1}
|
||||
/>
|
||||
))}
|
||||
</ChatMessages.List>
|
||||
<ChatMessages.Loading />
|
||||
</ChatMessages>
|
||||
);
|
||||
}
|
||||
|
||||
export default function ChatInterface({
|
||||
handler,
|
||||
onDocumentSelectionChange,
|
||||
|
|
@ -96,11 +34,11 @@ export default function ChatInterface({
|
|||
onResearchModeChange,
|
||||
}: ChatInterfaceProps) {
|
||||
return (
|
||||
<ChatSection handler={handler} className="flex h-full">
|
||||
<LlamaIndexChatSection handler={handler} className="flex h-full">
|
||||
<div className="flex flex-1 flex-col">
|
||||
<ChatMessagesDisplay />
|
||||
<ChatMessagesUI />
|
||||
<div className="border-t p-4">
|
||||
<CustomChatInput
|
||||
<ChatInputUI
|
||||
onDocumentSelectionChange={onDocumentSelectionChange}
|
||||
selectedDocuments={selectedDocuments}
|
||||
onConnectorSelectionChange={onConnectorSelectionChange}
|
||||
|
|
@ -113,7 +51,6 @@ export default function ChatInterface({
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<ChatCanvas className="w-1/2 border-l" />
|
||||
</ChatSection>
|
||||
</LlamaIndexChatSection>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
71
surfsense_web/components/chat_v2/ChatMessages.tsx
Normal file
71
surfsense_web/components/chat_v2/ChatMessages.tsx
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
"use client";
|
||||
|
||||
import React from "react";
|
||||
import { ChatMessage as LlamaIndexChatMessage, ChatMessages as LlamaIndexChatMessages, Message, useChatUI } from "@llamaindex/chat-ui";
|
||||
import TerminalDisplay from "@/components/chat_v2/ChatTerminal";
|
||||
import ChatSourcesDisplay from "@/components/chat_v2/ChatSources";
|
||||
import { CitationDisplay } from "@/components/chat_v2/ChatCitation";
|
||||
import { ChatFurtherQuestions } from "@/components/chat_v2/ChatFurtherQuestions";
|
||||
|
||||
export function ChatMessagesUI() {
|
||||
const { messages } = useChatUI();
|
||||
|
||||
return (
|
||||
<LlamaIndexChatMessages className="flex-1">
|
||||
<LlamaIndexChatMessages.List className="p-4">
|
||||
{messages.map((message, index) => (
|
||||
<ChatMessageUI
|
||||
key={`Message-${index}`}
|
||||
message={message}
|
||||
isLast={index === messages.length - 1}
|
||||
/>
|
||||
))}
|
||||
</LlamaIndexChatMessages.List>
|
||||
<LlamaIndexChatMessages.Loading />
|
||||
</LlamaIndexChatMessages>
|
||||
);
|
||||
}
|
||||
|
||||
function ChatMessageUI({
|
||||
message,
|
||||
isLast,
|
||||
}: {
|
||||
message: Message;
|
||||
isLast: boolean;
|
||||
}) {
|
||||
|
||||
const bottomRef = React.useRef<HTMLDivElement>(null);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (isLast && bottomRef.current) {
|
||||
bottomRef.current.scrollIntoView({ behavior: "smooth" });
|
||||
}
|
||||
}, [message]);
|
||||
|
||||
return (
|
||||
<LlamaIndexChatMessage
|
||||
message={message}
|
||||
isLast={isLast}
|
||||
className="flex flex-col "
|
||||
>
|
||||
{message.role === "assistant" ? (
|
||||
<div className="flex-1 flex flex-col space-y-4">
|
||||
<TerminalDisplay message={message} open={isLast} />
|
||||
<ChatSourcesDisplay message={message} />
|
||||
<LlamaIndexChatMessage.Content className="flex-1">
|
||||
<LlamaIndexChatMessage.Content.Markdown citationComponent={CitationDisplay} />
|
||||
</LlamaIndexChatMessage.Content>
|
||||
<div ref={bottomRef} />
|
||||
<div className="flex flex-row justify-end gap-2">
|
||||
{isLast && <ChatFurtherQuestions message={message} />}
|
||||
<LlamaIndexChatMessage.Actions className="flex-1 flex-col" />
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<LlamaIndexChatMessage.Content className="flex-1">
|
||||
<LlamaIndexChatMessage.Content.Markdown />
|
||||
</LlamaIndexChatMessage.Content>
|
||||
)}
|
||||
</LlamaIndexChatMessage>
|
||||
);
|
||||
}
|
||||
|
|
@ -6,6 +6,8 @@ import { getAnnotationData, Message } from "@llamaindex/chat-ui";
|
|||
export default function TerminalDisplay({ message, open }: { message: Message, open: boolean }) {
|
||||
const [isCollapsed, setIsCollapsed] = React.useState(!open);
|
||||
|
||||
const bottomRef = React.useRef<HTMLDivElement>(null);
|
||||
|
||||
// Get the last assistant message that's not being typed
|
||||
if (!message) {
|
||||
return <></>;
|
||||
|
|
@ -26,6 +28,12 @@ export default function TerminalDisplay({ message, open }: { message: Message, o
|
|||
return <></>;
|
||||
}
|
||||
|
||||
React.useEffect(() => {
|
||||
if (bottomRef.current) {
|
||||
bottomRef.current.scrollTo({ top: bottomRef.current.scrollHeight, behavior: "smooth" });
|
||||
}
|
||||
}, [events]);
|
||||
|
||||
return (
|
||||
<div className="bg-gray-900 rounded-lg border border-gray-700 overflow-hidden font-mono text-sm shadow-lg">
|
||||
{/* Terminal Header */}
|
||||
|
|
@ -76,7 +84,7 @@ export default function TerminalDisplay({ message, open }: { message: Message, o
|
|||
|
||||
{/* Terminal Content */}
|
||||
{!isCollapsed && (
|
||||
<div className="h-64 overflow-y-auto p-4 space-y-1 bg-gray-900">
|
||||
<div ref={bottomRef} className="h-64 overflow-y-auto p-4 space-y-1 bg-gray-900">
|
||||
{events.map((event, index) => (
|
||||
<div
|
||||
key={`${event.id}-${index}`}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue