xAI support for Responses API by routing to native endpoint + refactor code

This commit is contained in:
Musa 2026-03-10 12:18:03 -07:00
parent fdef2b9d61
commit fe1cc31849
No known key found for this signature in database
3 changed files with 36 additions and 55 deletions

View file

@ -1036,6 +1036,30 @@ pub struct ListInputItemsResponse {
// ProviderRequest Implementation
// ============================================================================
fn append_input_content_text(buffer: &mut String, content: &InputContent) {
match content {
InputContent::InputText { text } => buffer.push_str(text),
InputContent::InputImage { .. } => buffer.push_str("[Image]"),
InputContent::InputFile { .. } => buffer.push_str("[File]"),
InputContent::InputAudio { .. } => buffer.push_str("[Audio]"),
}
}
fn append_content_items_text(buffer: &mut String, content_items: &[InputContent]) {
for content in content_items {
// Preserve existing behavior: each content item is prefixed with a space.
buffer.push(' ');
append_input_content_text(buffer, content);
}
}
fn append_message_content_text(buffer: &mut String, content: &MessageContent) {
match content {
MessageContent::Text(text) => buffer.push_str(text),
MessageContent::Items(content_items) => append_content_items_text(buffer, content_items),
}
}
impl ProviderRequest for ResponsesAPIRequest {
fn model(&self) -> &str {
&self.model
@ -1057,29 +1081,7 @@ impl ProviderRequest for ResponsesAPIRequest {
match item {
InputItem::Message(msg) => {
let mut extracted = String::new();
match &msg.content {
MessageContent::Text(text) => extracted.push_str(text),
MessageContent::Items(content_items) => {
for content in content_items {
// Preserve existing behavior: each content item is prefixed with a space.
extracted.push(' ');
match content {
InputContent::InputText { text } => {
extracted.push_str(text)
}
InputContent::InputImage { .. } => {
extracted.push_str("[Image]")
}
InputContent::InputFile { .. } => {
extracted.push_str("[File]")
}
InputContent::InputAudio { .. } => {
extracted.push_str("[Audio]")
}
}
}
}
}
append_message_content_text(&mut extracted, &msg.content);
extracted
}
_ => String::new(),
@ -1091,29 +1093,7 @@ impl ProviderRequest for ResponsesAPIRequest {
if let InputItem::Message(msg) = item {
// Preserve existing behavior: each message is prefixed with a space.
extracted.push(' ');
match &msg.content {
MessageContent::Text(text) => extracted.push_str(text),
MessageContent::Items(content_items) => {
for content in content_items {
// Preserve existing behavior: each content item is prefixed with a space.
extracted.push(' ');
match content {
InputContent::InputText { text } => {
extracted.push_str(text)
}
InputContent::InputImage { .. } => {
extracted.push_str("[Image]")
}
InputContent::InputFile { .. } => {
extracted.push_str("[File]")
}
InputContent::InputAudio { .. } => {
extracted.push_str("[Audio]")
}
}
}
}
}
append_message_content_text(&mut extracted, &msg.content);
}
}
extracted

View file

@ -185,7 +185,7 @@ impl SupportedAPIsFromClient {
// For Responses API, check if provider supports it, otherwise translate to chat/completions
match provider_id {
// Providers that support /v1/responses natively
ProviderId::OpenAI => route_by_provider("/responses"),
ProviderId::OpenAI | ProviderId::XAI => route_by_provider("/responses"),
// All other providers: translate to /chat/completions
_ => route_by_provider("/chat/completions"),
}
@ -656,7 +656,7 @@ mod tests {
}
#[test]
fn test_responses_api_targets_xai_chat_completions_endpoint() {
fn test_responses_api_targets_xai_native_responses_endpoint() {
let api = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
assert_eq!(
api.target_endpoint_for_provider(
@ -666,7 +666,7 @@ mod tests {
false,
None
),
"/v1/chat/completions"
"/v1/responses"
);
}
}

View file

@ -166,10 +166,11 @@ impl ProviderId {
SupportedAPIsFromClient::OpenAIChatCompletions(_),
) => SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions),
// OpenAI Responses API
(ProviderId::OpenAI, SupportedAPIsFromClient::OpenAIResponsesAPI(_)) => {
SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses)
}
// OpenAI Responses API - OpenAI and xAI support this natively
(
ProviderId::OpenAI | ProviderId::XAI,
SupportedAPIsFromClient::OpenAIResponsesAPI(_),
) => SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses),
// Amazon Bedrock natively supports Bedrock APIs
(ProviderId::AmazonBedrock, SupportedAPIsFromClient::OpenAIChatCompletions(_)) => {
@ -330,14 +331,14 @@ mod tests {
}
#[test]
fn test_xai_uses_chat_completions_for_responses_clients() {
fn test_xai_uses_responses_api_for_responses_clients() {
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};
let client_api = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
let upstream = ProviderId::XAI.compatible_api_for_client(&client_api, false);
assert!(matches!(
upstream,
SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions)
SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses)
));
}
}