Introduce hermesllm library to handle llm message translation (#501)

This commit is contained in:
Adil Hafeez 2025-06-10 12:53:27 -07:00 committed by GitHub
parent 96b583c819
commit 6c53510f49
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
33 changed files with 1693 additions and 690 deletions

View file

@ -1,10 +1,10 @@
use std::sync::Arc;
use common::{
api::open_ai::{ChatCompletionsResponse, ContentType, Message},
configuration::{LlmProvider, LlmRoute},
consts::ARCH_PROVIDER_HINT_HEADER,
};
use hermesllm::providers::openai::types::{ChatCompletionsResponse, ContentType, Message};
use hyper::header;
use thiserror::Error;
use tracing::{debug, info, warn};
@ -136,6 +136,11 @@ impl RouterService {
}
};
if chat_completion_response.choices.is_empty() {
warn!("No choices in router response: {}", body);
return Ok(None);
}
if let Some(ContentType::Text(content)) =
&chat_completion_response.choices[0].message.content
{

View file

@ -1,4 +1,4 @@
use common::api::open_ai::{ChatCompletionsRequest, Message};
use hermesllm::providers::openai::types::{ChatCompletionsRequest, Message};
use thiserror::Error;
#[derive(Debug, Error)]

View file

@ -1,8 +1,8 @@
use common::{
api::open_ai::{ChatCompletionsRequest, ContentType, Message},
configuration::LlmRoute,
consts::{SYSTEM_ROLE, TOOL_ROLE, USER_ROLE},
};
use hermesllm::providers::openai::types::{ChatCompletionsRequest, ContentType, Message};
use serde::{Deserialize, Serialize};
use tracing::{debug, warn};
@ -121,11 +121,13 @@ impl RouterModel for RouterModelV1 {
.iter()
.rev()
.map(|message| {
Message::new(
message.role.clone(),
Message {
role: message.role.clone(),
// we can unwrap here because we have already filtered out messages without content
message.content.as_ref().unwrap().to_string(),
)
content: Some(ContentType::Text(
message.content.as_ref().unwrap().to_string(),
)),
}
})
.collect::<Vec<Message>>();
@ -141,14 +143,8 @@ impl RouterModel for RouterModelV1 {
messages: vec![Message {
content: Some(ContentType::Text(messages_content)),
role: USER_ROLE.to_string(),
model: None,
tool_calls: None,
tool_call_id: None,
}],
tools: None,
stream: false,
stream_options: None,
metadata: None,
..Default::default()
}
}