Support for Codex via Plano (#808)

* Add Codex CLI support; xAI response improvements

* Add native Plano running check and update CLI agent error handling

* adding PR suggestions for transformations and code quality

* message extraction logic in ResponsesAPIRequest

* xAI support for Responses API by routing to native endpoint + refactor code
This commit is contained in:
Musa 2026-03-10 20:54:14 -07:00 committed by GitHub
parent 5189f7907a
commit 6610097659
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
18 changed files with 1297 additions and 200 deletions

View file

@ -166,10 +166,11 @@ impl ProviderId {
SupportedAPIsFromClient::OpenAIChatCompletions(_),
) => SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions),
// OpenAI Responses API - only OpenAI supports this
(ProviderId::OpenAI, SupportedAPIsFromClient::OpenAIResponsesAPI(_)) => {
SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses)
}
// OpenAI Responses API - OpenAI and xAI support this natively
(
ProviderId::OpenAI | ProviderId::XAI,
SupportedAPIsFromClient::OpenAIResponsesAPI(_),
) => SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses),
// Amazon Bedrock natively supports Bedrock APIs
(ProviderId::AmazonBedrock, SupportedAPIsFromClient::OpenAIChatCompletions(_)) => {
@ -328,4 +329,16 @@ mod tests {
"AmazonBedrock should have models (mapped to amazon)"
);
}
#[test]
fn test_xai_uses_responses_api_for_responses_clients() {
use crate::clients::endpoints::{SupportedAPIsFromClient, SupportedUpstreamAPIs};
let client_api = SupportedAPIsFromClient::OpenAIResponsesAPI(OpenAIApi::Responses);
let upstream = ProviderId::XAI.compatible_api_for_client(&client_api, false);
assert!(matches!(
upstream,
SupportedUpstreamAPIs::OpenAIResponsesAPI(OpenAIApi::Responses)
));
}
}

View file

@ -5,6 +5,7 @@ use crate::apis::amazon_bedrock::{ConverseRequest, ConverseStreamRequest};
use crate::apis::openai_responses::ResponsesAPIRequest;
use crate::clients::endpoints::SupportedAPIsFromClient;
use crate::clients::endpoints::SupportedUpstreamAPIs;
use crate::ProviderId;
use serde_json::Value;
use std::collections::HashMap;
@ -70,6 +71,25 @@ impl ProviderRequestType {
Self::ResponsesAPIRequest(r) => r.set_messages(messages),
}
}
/// Apply provider-specific request normalization before sending upstream.
pub fn normalize_for_upstream(
&mut self,
provider_id: ProviderId,
upstream_api: &SupportedUpstreamAPIs,
) {
if provider_id == ProviderId::XAI
&& matches!(
upstream_api,
SupportedUpstreamAPIs::OpenAIChatCompletions(_)
)
{
if let Self::ChatCompletionsRequest(req) = self {
// xAI's legacy live-search shape is deprecated on chat/completions.
req.web_search_options = None;
}
}
}
}
impl ProviderRequest for ProviderRequestType {
@ -787,6 +807,62 @@ mod tests {
}
}
#[test]
fn test_normalize_for_upstream_xai_clears_chat_web_search_options() {
use crate::apis::openai::{Message, MessageContent, OpenAIApi, Role};
let mut request = ProviderRequestType::ChatCompletionsRequest(ChatCompletionsRequest {
model: "grok-4".to_string(),
messages: vec![Message {
role: Role::User,
content: Some(MessageContent::Text("hello".to_string())),
name: None,
tool_calls: None,
tool_call_id: None,
}],
web_search_options: Some(serde_json::json!({"search_context_size":"medium"})),
..Default::default()
});
request.normalize_for_upstream(
ProviderId::XAI,
&SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions),
);
let ProviderRequestType::ChatCompletionsRequest(req) = request else {
panic!("expected chat request");
};
assert!(req.web_search_options.is_none());
}
#[test]
fn test_normalize_for_upstream_non_xai_keeps_chat_web_search_options() {
use crate::apis::openai::{Message, MessageContent, OpenAIApi, Role};
let mut request = ProviderRequestType::ChatCompletionsRequest(ChatCompletionsRequest {
model: "gpt-4o".to_string(),
messages: vec![Message {
role: Role::User,
content: Some(MessageContent::Text("hello".to_string())),
name: None,
tool_calls: None,
tool_call_id: None,
}],
web_search_options: Some(serde_json::json!({"search_context_size":"medium"})),
..Default::default()
});
request.normalize_for_upstream(
ProviderId::OpenAI,
&SupportedUpstreamAPIs::OpenAIChatCompletions(OpenAIApi::ChatCompletions),
);
let ProviderRequestType::ChatCompletionsRequest(req) = request else {
panic!("expected chat request");
};
assert!(req.web_search_options.is_some());
}
#[test]
fn test_responses_api_to_anthropic_messages_conversion() {
use crate::apis::anthropic::AnthropicApi::Messages;