diff --git a/crates/hermesllm/src/apis/openai_responses.rs b/crates/hermesllm/src/apis/openai_responses.rs index 424430b6..eac8a452 100644 --- a/crates/hermesllm/src/apis/openai_responses.rs +++ b/crates/hermesllm/src/apis/openai_responses.rs @@ -1036,6 +1036,30 @@ pub struct ListInputItemsResponse { // ProviderRequest Implementation // ============================================================================ +fn append_input_content_text(buffer: &mut String, content: &InputContent) { + match content { + InputContent::InputText { text } => buffer.push_str(text), + InputContent::InputImage { .. } => buffer.push_str("[Image]"), + InputContent::InputFile { .. } => buffer.push_str("[File]"), + InputContent::InputAudio { .. } => buffer.push_str("[Audio]"), + } +} + +fn append_content_items_text(buffer: &mut String, content_items: &[InputContent]) { + for content in content_items { + // Preserve existing behavior: each content item is prefixed with a space. + buffer.push(' '); + append_input_content_text(buffer, content); + } +} + +fn append_message_content_text(buffer: &mut String, content: &MessageContent) { + match content { + MessageContent::Text(text) => buffer.push_str(text), + MessageContent::Items(content_items) => append_content_items_text(buffer, content_items), + } +} + impl ProviderRequest for ResponsesAPIRequest { fn model(&self) -> &str { &self.model @@ -1057,29 +1081,7 @@ impl ProviderRequest for ResponsesAPIRequest { match item { InputItem::Message(msg) => { let mut extracted = String::new(); - match &msg.content { - MessageContent::Text(text) => extracted.push_str(text), - MessageContent::Items(content_items) => { - for content in content_items { - // Preserve existing behavior: each content item is prefixed with a space. - extracted.push(' '); - match content { - InputContent::InputText { text } => { - extracted.push_str(text) - } - InputContent::InputImage { .. } => { - extracted.push_str("[Image]") - } - InputContent::InputFile { .. } => { - extracted.push_str("[File]") - } - InputContent::InputAudio { .. } => { - extracted.push_str("[Audio]") - } - } - } - } - } + append_message_content_text(&mut extracted, &msg.content); extracted } _ => String::new(), @@ -1091,29 +1093,7 @@ impl ProviderRequest for ResponsesAPIRequest { if let InputItem::Message(msg) = item { // Preserve existing behavior: each message is prefixed with a space. extracted.push(' '); - match &msg.content { - MessageContent::Text(text) => extracted.push_str(text), - MessageContent::Items(content_items) => { - for content in content_items { - // Preserve existing behavior: each content item is prefixed with a space. - extracted.push(' '); - match content { - InputContent::InputText { text } => { - extracted.push_str(text) - } - InputContent::InputImage { .. } => { - extracted.push_str("[Image]") - } - InputContent::InputFile { .. } => { - extracted.push_str("[File]") - } - InputContent::InputAudio { .. } => { - extracted.push_str("[Audio]") - } - } - } - } - } + append_message_content_text(&mut extracted, &msg.content); } } extracted diff --git a/crates/hermesllm/src/clients/endpoints.rs b/crates/hermesllm/src/clients/endpoints.rs index 26240ffb..23e14604 100644 --- a/crates/hermesllm/src/clients/endpoints.rs +++ b/crates/hermesllm/src/clients/endpoints.rs @@ -185,7 +185,7 @@ impl SupportedAPIsFromClient { // For Responses API, check if provider supports it, otherwise translate to chat/completions match provider_id { // Providers that support /v1/responses natively - ProviderId::OpenAI => route_by_provider("/responses"), + ProviderId::OpenAI | ProviderId::XAI => route_by_provider("/responses"), // All other providers: translate to /chat/completions _ => route_by_provider("/chat/completions"), } @@ -656,7 +656,7 @@ mod tests { } #[test] - fn test_responses_api_targets_xai_chat_completions_endpoint() { + fn test_responses_api_targets_xai_native_responses_endpoint() { let api = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses); assert_eq!( api.target_endpoint_for_provider( @@ -666,7 +666,7 @@ mod tests { false, None ), - "/v1/chat/completions" + "/v1/responses" ); } } diff --git a/crates/hermesllm/src/providers/id.rs b/crates/hermesllm/src/providers/id.rs index fe76b8b8..11008711 100644 --- a/crates/hermesllm/src/providers/id.rs +++ b/crates/hermesllm/src/providers/id.rs @@ -166,10 +166,11 @@ impl ProviderId { SupportedAPIsFromClient::OpenAIChatCompletions(_), ) => SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions), - // OpenAI Responses API - (ProviderId::OpenAI, SupportedAPIsFromClient::OpenAIResponsesAPI(_)) => { - SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses) - } + // OpenAI Responses API - OpenAI and xAI support this natively + ( + ProviderId::OpenAI | ProviderId::XAI, + SupportedAPIsFromClient::OpenAIResponsesAPI(_), + ) => SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses), // Amazon Bedrock natively supports Bedrock APIs (ProviderId::AmazonBedrock, SupportedAPIsFromClient::OpenAIChatCompletions(_)) => { @@ -330,14 +331,14 @@ mod tests { } #[test] - fn test_xai_uses_chat_completions_for_responses_clients() { + fn test_xai_uses_responses_api_for_responses_clients() { use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs}; let client_api = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses); let upstream = ProviderId::XAI.compatible_api_for_client(&client_api, false); assert!(matches!( upstream, - SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions) + SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses) )); } }