mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-05-08 23:32:40 +02:00
fix: moved chathistory in system promts
This commit is contained in:
parent
a5bd1ebe4f
commit
4264c0248f
5 changed files with 164 additions and 81 deletions
|
|
@ -8,6 +8,7 @@ from app.services.reranker_service import RerankerService
|
|||
from ..utils import (
|
||||
calculate_token_count,
|
||||
format_documents_section,
|
||||
langchain_chat_history_to_str,
|
||||
optimize_documents_for_token_limit,
|
||||
)
|
||||
from .configuration import Configuration
|
||||
|
|
@ -110,6 +111,7 @@ async def answer_question(state: State, config: RunnableConfig) -> dict[str, Any
|
|||
|
||||
# Determine if we have documents and optimize for token limits
|
||||
has_documents_initially = documents and len(documents) > 0
|
||||
chat_history_str = langchain_chat_history_to_str(state.chat_history)
|
||||
|
||||
if has_documents_initially:
|
||||
# Create base message template for token calculation (without documents)
|
||||
|
|
@ -124,9 +126,8 @@ async def answer_question(state: State, config: RunnableConfig) -> dict[str, Any
|
|||
"""
|
||||
|
||||
# Use initial system prompt for token calculation
|
||||
initial_system_prompt = get_qna_citation_system_prompt()
|
||||
initial_system_prompt = get_qna_citation_system_prompt(chat_history_str)
|
||||
base_messages = [
|
||||
*state.chat_history,
|
||||
SystemMessage(content=initial_system_prompt),
|
||||
HumanMessage(content=base_human_message_template),
|
||||
]
|
||||
|
|
@ -144,9 +145,9 @@ async def answer_question(state: State, config: RunnableConfig) -> dict[str, Any
|
|||
|
||||
# Choose system prompt based on final document availability
|
||||
system_prompt = (
|
||||
get_qna_citation_system_prompt()
|
||||
get_qna_citation_system_prompt(chat_history_str)
|
||||
if has_documents
|
||||
else get_qna_no_documents_system_prompt()
|
||||
else get_qna_no_documents_system_prompt(chat_history_str)
|
||||
)
|
||||
|
||||
# Generate documents section
|
||||
|
|
@ -178,7 +179,6 @@ async def answer_question(state: State, config: RunnableConfig) -> dict[str, Any
|
|||
|
||||
# Create final messages for the LLM
|
||||
messages_with_chat_history = [
|
||||
*state.chat_history,
|
||||
SystemMessage(content=system_prompt),
|
||||
HumanMessage(content=human_message_content),
|
||||
]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue