adding PR suggestions for transformations and code quality

This commit is contained in:
Musa 2026-03-09 12:23:14 -07:00
parent 6b37c5a133
commit 546ad1b8e1
No known key found for this signature in database
10 changed files with 219 additions and 255 deletions

View file

@ -147,8 +147,6 @@ pub enum InputItem {
call_id: String,
output: serde_json::Value,
},
/// Forward-compat fallback for unknown input item shapes.
Unknown(serde_json::Value),
}
/// Input message with role and content
@ -201,9 +199,6 @@ pub enum InputContent {
data: Option<String>,
format: Option<String>,
},
/// Forward-compat fallback for unknown content parts.
#[serde(other)]
Unknown,
}
/// Modality options
@ -1055,61 +1050,39 @@ impl ProviderRequest for ResponsesAPIRequest {
}
fn extract_messages_text(&self) -> String {
fn content_items_to_text(content_items: &[InputContent]) -> String {
content_items.iter().fold(String::new(), |acc, content| {
acc + " "
+ &match content {
InputContent::InputText { text } => text.clone(),
InputContent::InputImage { .. } => "[Image]".to_string(),
InputContent::InputFile { .. } => "[File]".to_string(),
InputContent::InputAudio { .. } => "[Audio]".to_string(),
}
})
}
fn message_content_to_text(content: &MessageContent) -> String {
match content {
MessageContent::Text(text) => text.clone(),
MessageContent::Items(content_items) => content_items_to_text(content_items),
}
}
match &self.input {
InputParam::Text(text) => text.clone(),
InputParam::SingleItem(item) => {
// Normalize single-item input for extraction behavior parity.
match item {
InputItem::Message(msg) => match &msg.content {
MessageContent::Text(text) => text.clone(),
MessageContent::Items(content_items) => {
content_items.iter().fold(String::new(), |acc, content| {
acc + " "
+ &match content {
InputContent::InputText { text } => text.clone(),
InputContent::InputImage { .. } => "[Image]".to_string(),
InputContent::InputFile { .. } => "[File]".to_string(),
InputContent::InputAudio { .. } => "[Audio]".to_string(),
InputContent::Unknown => String::new(),
}
})
}
},
InputItem::Message(msg) => message_content_to_text(&msg.content),
_ => String::new(),
}
}
InputParam::Items(items) => {
items.iter().fold(String::new(), |acc, item| {
match item {
InputItem::Message(msg) => {
let content_text = match &msg.content {
MessageContent::Text(text) => text.clone(),
MessageContent::Items(content_items) => {
content_items.iter().fold(String::new(), |acc, content| {
acc + " "
+ &match content {
InputContent::InputText { text } => text.clone(),
InputContent::InputImage { .. } => {
"[Image]".to_string()
}
InputContent::InputFile { .. } => {
"[File]".to_string()
}
InputContent::InputAudio { .. } => {
"[Audio]".to_string()
}
InputContent::Unknown => String::new(),
}
})
}
};
acc + " " + &content_text
}
// Skip non-message items (references, outputs, etc.)
_ => acc,
}
})
}
InputParam::Items(items) => items.iter().fold(String::new(), |acc, item| match item {
InputItem::Message(msg) => acc + " " + &message_content_to_text(&msg.content),
// Skip non-message items (references, outputs, etc.)
_ => acc,
}),
}
}

View file

@ -185,7 +185,7 @@ impl SupportedAPIsFromClient {
// For Responses API, check if provider supports it, otherwise translate to chat/completions
match provider_id {
// Providers that support /v1/responses natively
ProviderId::OpenAI | ProviderId::XAI => route_by_provider("/responses"),
ProviderId::OpenAI => route_by_provider("/responses"),
// All other providers: translate to /chat/completions
_ => route_by_provider("/chat/completions"),
}
@ -656,7 +656,7 @@ mod tests {
}
#[test]
fn test_responses_api_targets_xai_native_responses_endpoint() {
fn test_responses_api_targets_xai_chat_completions_endpoint() {
let api = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
assert_eq!(
api.target_endpoint_for_provider(
@ -666,7 +666,7 @@ mod tests {
false,
None
),
"/v1/responses"
"/v1/chat/completions"
);
}
}

View file

@ -166,11 +166,10 @@ impl ProviderId {
SupportedAPIsFromClient::OpenAIChatCompletions(_),
) => SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions),
// OpenAI Responses API - OpenAI and xAI support this natively
(
ProviderId::OpenAI | ProviderId::XAI,
SupportedAPIsFromClient::OpenAIResponsesAPI(_),
) => SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses),
// OpenAI Responses API
(ProviderId::OpenAI, SupportedAPIsFromClient::OpenAIResponsesAPI(_)) => {
SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses)
}
// Amazon Bedrock natively supports Bedrock APIs
(ProviderId::AmazonBedrock, SupportedAPIsFromClient::OpenAIChatCompletions(_)) => {
@ -331,14 +330,14 @@ mod tests {
}
#[test]
fn test_xai_uses_responses_api_for_responses_clients() {
fn test_xai_uses_chat_completions_for_responses_clients() {
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};
let client_api = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
let upstream = ProviderId::XAI.compatible_api_for_client(&client_api, false);
assert!(matches!(
upstream,
SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses)
SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions)
));
}
}

View file

@ -5,6 +5,7 @@ use crate::apis::amazon_bedrock::{ConverseRequest, ConverseStreamRequest};
use crate::apis::openai_responses::ResponsesAPIRequest;
use crate::clients::endpoints::SupportedAPIsFromClient;
use crate::clients::endpoints::SupportedUpstreamAPIs;
use crate::ProviderId;
use serde_json::Value;
use std::collections::HashMap;
@ -70,6 +71,25 @@ impl ProviderRequestType {
Self::ResponsesAPIRequest(r) => r.set_messages(messages),
}
}
/// Apply provider-specific request normalization before sending upstream.
pub fn normalize_for_upstream(
&mut self,
provider_id: ProviderId,
upstream_api: &SupportedUpstreamAPIs,
) {
if provider_id == ProviderId::XAI
&& matches!(
upstream_api,
SupportedUpstreamAPIs::OpenAIChatCompletions(_)
)
{
if let Self::ChatCompletionsRequest(req) = self {
// xAI's legacy live-search shape is deprecated on chat/completions.
req.web_search_options = None;
}
}
}
}
impl ProviderRequest for ProviderRequestType {
@ -787,6 +807,62 @@ mod tests {
}
}
#[test]
fn test_normalize_for_upstream_xai_clears_chat_web_search_options() {
use crate::apis::openai::{Message, MessageContent, OpenAIApi, Role};
let mut request = ProviderRequestType::ChatCompletionsRequest(ChatCompletionsRequest {
model: "grok-4".to_string(),
messages: vec![Message {
role: Role::User,
content: Some(MessageContent::Text("hello".to_string())),
name: None,
tool_calls: None,
tool_call_id: None,
}],
web_search_options: Some(serde_json::json!({"search_context_size":"medium"})),
..Default::default()
});
request.normalize_for_upstream(
ProviderId::XAI,
&SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions),
);
let ProviderRequestType::ChatCompletionsRequest(req) = request else {
panic!("expected chat request");
};
assert!(req.web_search_options.is_none());
}
#[test]
fn test_normalize_for_upstream_non_xai_keeps_chat_web_search_options() {
use crate::apis::openai::{Message, MessageContent, OpenAIApi, Role};
let mut request = ProviderRequestType::ChatCompletionsRequest(ChatCompletionsRequest {
model: "gpt-4o".to_string(),
messages: vec![Message {
role: Role::User,
content: Some(MessageContent::Text("hello".to_string())),
name: None,
tool_calls: None,
tool_call_id: None,
}],
web_search_options: Some(serde_json::json!({"search_context_size":"medium"})),
..Default::default()
});
request.normalize_for_upstream(
ProviderId::OpenAI,
&SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions),
);
let ProviderRequestType::ChatCompletionsRequest(req) = request else {
panic!("expected chat request");
};
assert!(req.web_search_options.is_some());
}
#[test]
fn test_responses_api_to_anthropic_messages_conversion() {
use crate::apis::anthropic::AnthropicApi::Messages;

View file

@ -136,7 +136,6 @@ impl TryFrom<ResponsesInputConverter> for Vec<Message> {
}
InputContent::InputFile { .. } => None, // Skip files for now
InputContent::InputAudio { .. } => None, // Skip audio for now
InputContent::Unknown => None,
})
.collect(),
)
@ -162,7 +161,6 @@ impl TryFrom<ResponsesInputConverter> for Vec<Message> {
}
InputContent::InputFile { .. } => None, // Skip files for now
InputContent::InputAudio { .. } => None, // Skip audio for now
InputContent::Unknown => None,
})
.collect(),
)
@ -228,7 +226,7 @@ impl TryFrom<ResponsesInputConverter> for Vec<Message> {
tool_calls: Some(vec![tool_call]),
});
}
InputItem::ItemReference { .. } | InputItem::Unknown(_) => {
InputItem::ItemReference { .. } => {
// Item references/unknown entries are metadata-like and can be skipped
// for chat-completions conversion.
}