Introduce hermesllm library to handle llm message translation (#501)

This commit is contained in:
Adil Hafeez 2025-06-10 12:53:27 -07:00 committed by GitHub
parent 96b583c819
commit 6c53510f49
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
33 changed files with 1693 additions and 690 deletions

View file

@ -1,14 +1,13 @@
use std::sync::Arc;
use bytes::Bytes;
use common::api::open_ai::ChatCompletionsRequest;
use common::consts::ARCH_PROVIDER_HINT_HEADER;
use hermesllm::providers::openai::types::ChatCompletionsRequest;
use http_body_util::combinators::BoxBody;
use http_body_util::{BodyExt, Full, StreamBody};
use hyper::body::Frame;
use hyper::header::{self};
use hyper::{Request, Response, StatusCode};
use serde_json::Value;
use tokio::sync::mpsc;
use tokio_stream::wrappers::ReceiverStream;
use tokio_stream::StreamExt;
@ -32,13 +31,15 @@ pub async fn chat_completions(
let chat_request_bytes = request.collect().await?.to_bytes();
let chat_completion_request: ChatCompletionsRequest =
match serde_json::from_slice(&chat_request_bytes) {
match ChatCompletionsRequest::try_from(chat_request_bytes.as_ref()) {
Ok(request) => request,
Err(err) => {
let v: Value = serde_json::from_slice(&chat_request_bytes).unwrap();
warn!(
"arch-router request body string: {}",
String::from_utf8_lossy(&chat_request_bytes)
);
let err_msg = format!("Failed to parse request body: {}", err);
warn!("{}", err_msg);
warn!("arch-router request body: {}", v.to_string());
let mut bad_request = Response::new(full(err_msg));
*bad_request.status_mut() = StatusCode::BAD_REQUEST;
return Ok(bad_request);

View file

@ -1,6 +1,6 @@
use bytes::Bytes;
use common::api::open_ai::Models;
use common::configuration::LlmProvider;
use common::configuration::{IntoModels, LlmProvider};
use hermesllm::providers::openai::types::Models;
use http_body_util::{combinators::BoxBody, BodyExt, Full};
use hyper::{Response, StatusCode};
use serde_json;
@ -11,7 +11,7 @@ pub async fn list_models(
) -> Response<BoxBody<Bytes, hyper::Error>> {
let prov = llm_providers.clone();
let providers = (*prov).clone();
let openai_models = Models::from(providers);
let openai_models: Models = providers.into_models();
match serde_json::to_string(&openai_models) {
Ok(json) => {