add support for v1/messages and transformations (#558)

* pushing draft PR

* transformations are working. Now need to add some tests next

* updated tests and added necessary response transformations for Anthropics' message response object

* fixed bugs for integration tests

* fixed doc tests

* fixed serialization issues with enums on response

* adding some debug logs to help

* fixed issues with non-streaming responses

* updated the stream_context to update response bytes

* the serialized bytes length must be set in the response side

* fixed the debug statement that was causing the integration tests for wasm to fail

* fixing json parsing errors

* intentionally removing the headers

* making sure that we convert the raw bytes to the correct provider type upstream

* fixing non-streaming responses to tranform correctly

* /v1/messages works with transformations to and from /v1/chat/completions

* updating the CLI and demos to support anthropic vs. claude

* adding the anthropic key to the preference based routing tests

* fixed test cases and added more structured logs

* fixed integration tests and cleaned up logs

* added python client tests for anthropic and openai

* cleaned up logs and fixed issue with connectivity for llm gateway in weather forecast demo

* fixing the tests. python dependency order was broken

* updated the openAI client to fix demos

* removed the raw response debug statement

* fixed the dup cloning issue and cleaned up the ProviderRequestType enum and traits

* fixing logs

* moved away from string literals to consts

* fixed streaming from Anthropic Client to OpenAI

* removed debug statement that would likely trip up integration tests

* fixed integration tests for llm_gateway

* cleaned up test cases and removed unnecessary crates

* fixing comments from PR

* fixed bug whereby we were sending an OpenAIChatCompletions request object to llm_gateway even though the request may have been AnthropicMessages

---------

Co-authored-by: Salman Paracha <salmanparacha@MacBook-Pro-4.local>
Co-authored-by: Salman Paracha <salmanparacha@MacBook-Pro-9.local>
Co-authored-by: Salman Paracha <salmanparacha@MacBook-Pro-10.local>
Co-authored-by: Salman Paracha <salmanparacha@MacBook-Pro-41.local>
Co-authored-by: Salman Paracha <salmanparacha@MacBook-Pro-136.local>
This commit is contained in:
Salman Paracha 2025-09-10 07:40:30 -07:00 committed by GitHub
parent bb71d041a0
commit fb0581fd39
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
38 changed files with 2842 additions and 919 deletions

View file

@ -6,12 +6,13 @@
//! # Examples
//!
//! ```rust
//! use hermesllm::clients::endpoints::{is_supported_endpoint, supported_endpoints};
//! use hermesllm::clients::endpoints::supported_endpoints;
//!
//! // Check if we support an endpoint
//! assert!(is_supported_endpoint("/v1/chat/completions"));
//! assert!(is_supported_endpoint("/v1/messages"));
//! assert!(!is_supported_endpoint("/v1/unknown"));
//! use hermesllm::clients::endpoints::SupportedAPIs;
//! assert!(SupportedAPIs::from_endpoint("/v1/chat/completions").is_some());
//! assert!(SupportedAPIs::from_endpoint("/v1/messages").is_some());
//! assert!(!SupportedAPIs::from_endpoint("/v1/unknown").is_some());
//!
//! // Get all supported endpoints
//! let endpoints = supported_endpoints();
@ -20,23 +21,81 @@
//! assert!(endpoints.contains(&"/v1/messages"));
//! ```
use crate::apis::{AnthropicApi, OpenAIApi, ApiDefinition};
use crate::{apis::{AnthropicApi, ApiDefinition, OpenAIApi}, ProviderId};
use std::fmt;
/// Check if the given endpoint path is supported
pub fn is_supported_endpoint(endpoint: &str) -> bool {
// Try OpenAI APIs
if OpenAIApi::from_endpoint(endpoint).is_some() {
return true;
}
// Try Anthropic APIs
if AnthropicApi::from_endpoint(endpoint).is_some() {
return true;
}
false
/// Unified enum representing all supported API endpoints across providers
#[derive(Debug, Clone, PartialEq)]
pub enum SupportedAPIs {
OpenAIChatCompletions(OpenAIApi),
AnthropicMessagesAPI(AnthropicApi),
}
impl fmt::Display for SupportedAPIs {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
SupportedAPIs::OpenAIChatCompletions(api) => write!(f, "OpenAI API ({})", api.endpoint()),
SupportedAPIs::AnthropicMessagesAPI(api) => write!(f, "Anthropic API ({})", api.endpoint()),
}
}
}
impl SupportedAPIs {
/// Create a SupportedApi from an endpoint path
pub fn from_endpoint(endpoint: &str) -> Option<Self> {
if let Some(openai_api) = OpenAIApi::from_endpoint(endpoint) {
return Some(SupportedAPIs::OpenAIChatCompletions(openai_api));
}
if let Some(anthropic_api) = AnthropicApi::from_endpoint(endpoint) {
return Some(SupportedAPIs::AnthropicMessagesAPI(anthropic_api));
}
None
}
/// Get the endpoint path for this API
pub fn endpoint(&self) -> &'static str {
match self {
SupportedAPIs::OpenAIChatCompletions(api) => api.endpoint(),
SupportedAPIs::AnthropicMessagesAPI(api) => api.endpoint(),
}
}
pub fn target_endpoint_for_provider(&self, provider_id: &ProviderId, request_path: &str) -> String {
let default_endpoint = "/v1/chat/completions".to_string();
match self {
SupportedAPIs::AnthropicMessagesAPI(AnthropicApi::Messages) => {
match provider_id {
ProviderId::Anthropic => "/v1/messages".to_string(),
_ => default_endpoint,
}
}
_ => {
match provider_id {
ProviderId::Groq => {
if request_path.starts_with("/v1/") {
format!("/openai{}", request_path)
} else {
default_endpoint
}
}
ProviderId::Gemini => {
if request_path.starts_with("/v1/") {
"/v1beta/openai/chat/completions".to_string()
} else {
default_endpoint
}
}
_ => default_endpoint,
}
}
}
}
}
/// Get all supported endpoint paths
pub fn supported_endpoints() -> Vec<&'static str> {
let mut endpoints = Vec::new();
@ -74,15 +133,15 @@ mod tests {
#[test]
fn test_is_supported_endpoint() {
// OpenAI endpoints
assert!(is_supported_endpoint("/v1/chat/completions"));
assert!(SupportedAPIs::from_endpoint("/v1/chat/completions").is_some());
// Anthropic endpoints
assert!(is_supported_endpoint("/v1/messages"));
assert!(SupportedAPIs::from_endpoint("/v1/messages").is_some());
// Unsupported endpoints
assert!(!is_supported_endpoint("/v1/unknown"));
assert!(!is_supported_endpoint("/v2/chat"));
assert!(!is_supported_endpoint(""));
assert!(!SupportedAPIs::from_endpoint("/v1/unknown").is_some());
assert!(!SupportedAPIs::from_endpoint("/v2/chat").is_some());
assert!(!SupportedAPIs::from_endpoint("").is_some());
}
#[test]

View file

@ -4,6 +4,6 @@ pub mod endpoints;
// Re-export the main items for easier access
pub use lib::*;
pub use endpoints::{is_supported_endpoint, supported_endpoints, identify_provider};
pub use endpoints::{SupportedAPIs, identify_provider};
// Note: transformer module contains TryFrom trait implementations that are automatically available

View file

@ -44,8 +44,6 @@
use serde_json::Value;
use std::time::{SystemTime, UNIX_EPOCH};
// Import centralized types
use crate::apis::*;
use super::TransformError;
@ -61,7 +59,7 @@ const DEFAULT_MAX_TOKENS: u32 = 4096;
// ============================================================================
/// Trait for extracting text content from various types
trait ExtractText {
pub trait ExtractText {
fn extract_text(&self) -> String;
}
@ -213,6 +211,7 @@ impl TryFrom<MessagesResponse> for ChatCompletionsResponse {
choices: vec![choice],
usage,
system_fingerprint: None,
service_tier: None,
})
}
}
@ -541,40 +540,6 @@ impl Into<Role> for MessagesRole {
}
}
// Content Extraction
impl ExtractText for MessageContent {
fn extract_text(&self) -> String {
match self {
MessageContent::Text(text) => text.clone(),
MessageContent::Parts(parts) => parts.extract_text()
}
}
}
impl ExtractText for Vec<ContentPart> {
fn extract_text(&self) -> String {
self.iter()
.filter_map(|part| match part {
ContentPart::Text { text } => Some(text.as_str()),
_ => None,
})
.collect::<Vec<_>>()
.join("\n")
}
}
impl ExtractText for Vec<MessagesContentBlock> {
fn extract_text(&self) -> String {
self.iter()
.filter_map(|block| match block {
MessagesContentBlock::Text { text } => Some(text.as_str()),
_ => None,
})
.collect::<Vec<_>>()
.join("\n")
}
}
// Content Utilities
impl ContentUtils<ToolCall> for Vec<MessagesContentBlock> {
fn extract_tool_calls(&self) -> Result<Option<Vec<ToolCall>>, TransformError> {