making Messages.Content optional, and having the upstream LLM fail if the right fields aren't set (#699)

Co-authored-by: Salman Paracha <salmanparacha@MacBook-Pro-342.local>
This commit is contained in:
Salman Paracha 2026-01-16 16:24:03 -08:00 committed by GitHub
parent 626f556cc6
commit cdc1d7cee2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 294 additions and 133 deletions

View file

@ -402,7 +402,7 @@ async fn handle_agent_chat(
// and add it to the conversation history
current_messages.push(OpenAIMessage {
role: hermesllm::apis::openai::Role::Assistant,
content: hermesllm::apis::openai::MessageContent::Text(response_text),
content: Some(hermesllm::apis::openai::MessageContent::Text(response_text)),
name: Some(agent_name.clone()),
tool_calls: None,
tool_call_id: None,

View file

@ -638,7 +638,7 @@ impl ArchFunctionHandler {
let system_prompt = self.format_system_prompt(tools)?;
processed_messages.push(Message {
role: Role::System,
content: MessageContent::Text(system_prompt),
content: Some(MessageContent::Text(system_prompt)),
name: None,
tool_calls: None,
tool_call_id: None,
@ -649,8 +649,9 @@ impl ArchFunctionHandler {
for (idx, message) in messages.iter().enumerate() {
let mut role = message.role.clone();
let mut content = match &message.content {
MessageContent::Text(text) => text.clone(),
MessageContent::Parts(_) => String::new(),
Some(MessageContent::Text(text)) => text.clone(),
Some(MessageContent::Parts(_)) => String::new(),
None => String::new(),
};
// Handle tool calls
@ -675,7 +676,8 @@ impl ArchFunctionHandler {
} else {
// Get the tool call from previous message
if idx > 0 {
if let MessageContent::Text(prev_content) = &messages[idx - 1].content {
if let Some(MessageContent::Text(prev_content)) = &messages[idx - 1].content
{
let mut tool_call_msg = prev_content.clone();
// Strip markdown code blocks
@ -721,7 +723,7 @@ impl ArchFunctionHandler {
processed_messages.push(Message {
role,
content: MessageContent::Text(content),
content: Some(MessageContent::Text(content)),
name: message.name.clone(),
tool_calls: None,
tool_call_id: None,
@ -740,7 +742,7 @@ impl ArchFunctionHandler {
// Add extra instruction if provided
if let Some(instruction) = extra_instruction {
if let Some(last) = processed_messages.last_mut() {
if let MessageContent::Text(content) = &mut last.content {
if let Some(MessageContent::Text(content)) = &mut last.content {
content.push('\n');
content.push_str(instruction);
}
@ -761,7 +763,7 @@ impl ArchFunctionHandler {
// Keep system message if present
if let Some(first) = messages.first() {
if first.role == Role::System {
if let MessageContent::Text(content) = &first.content {
if let Some(MessageContent::Text(content)) = &first.content {
num_tokens += content.len() / 4; // Approximate 4 chars per token
}
conversation_idx = 1;
@ -772,7 +774,7 @@ impl ArchFunctionHandler {
// Start with message_idx pointing past the end (will be used if no truncation needed)
let mut message_idx = messages.len();
for i in (conversation_idx..messages.len()).rev() {
if let MessageContent::Text(content) = &messages[i].content {
if let Some(MessageContent::Text(content)) = &messages[i].content {
num_tokens += content.len() / 4;
if num_tokens >= max_tokens && messages[i].role == Role::User {
// Set message_idx to current position and break
@ -802,7 +804,7 @@ impl ArchFunctionHandler {
pub fn prefill_message(&self, mut messages: Vec<Message>, prefill: &str) -> Vec<Message> {
messages.push(Message {
role: Role::Assistant,
content: MessageContent::Text(prefill.to_string()),
content: Some(MessageContent::Text(prefill.to_string())),
name: None,
tool_calls: None,
tool_call_id: None,

View file

@ -28,7 +28,7 @@ mod tests {
fn create_test_message(role: Role, content: &str) -> Message {
Message {
role,
content: MessageContent::Text(content.to_string()),
content: Some(MessageContent::Text(content.to_string())),
name: None,
tool_calls: None,
tool_call_id: None,
@ -129,7 +129,7 @@ mod tests {
let processed_messages = result.unwrap();
// With empty filter chain, should return the original messages unchanged
assert_eq!(processed_messages.len(), 1);
if let MessageContent::Text(content) = &processed_messages[0].content {
if let Some(MessageContent::Text(content)) = &processed_messages[0].content {
assert_eq!(content, "Hello world!");
} else {
panic!("Expected text content");

View file

@ -887,7 +887,7 @@ mod tests {
fn create_test_message(role: Role, content: &str) -> Message {
Message {
role,
content: MessageContent::Text(content.to_string()),
content: Some(MessageContent::Text(content.to_string())),
name: None,
tool_calls: None,
tool_call_id: None,

View file

@ -95,7 +95,9 @@ pub async fn router_chat_get_upstream_model(
.messages
.last()
.map_or("None".to_string(), |msg| {
msg.content.to_string().replace('\n', "\\n")
msg.content
.as_ref()
.map_or("None".to_string(), |c| c.to_string().replace('\n', "\\n"))
});
const MAX_MESSAGE_LENGTH: usize = 50;