mirror of
https://github.com/katanemo/plano.git
synced 2026-05-08 07:12:42 +02:00
Support for Codex via Plano (#808)
* Add Codex CLI support; xAI response improvements * Add native Plano running check and update CLI agent error handling * adding PR suggestions for transformations and code quality * message extraction logic in ResponsesAPIRequest * xAI support for Responses API by routing to native endpoint + refactor code
This commit is contained in:
parent
5189f7907a
commit
6610097659
18 changed files with 1297 additions and 200 deletions
|
|
@ -198,6 +198,7 @@ async fn llm_chat_inner(
|
|||
let temperature = client_request.get_temperature();
|
||||
let is_streaming_request = client_request.is_streaming();
|
||||
let alias_resolved_model = resolve_model_alias(&model_from_request, &model_aliases);
|
||||
let (provider_id, _) = get_provider_info(&llm_providers, &alias_resolved_model).await;
|
||||
|
||||
// Validate that the requested model exists in configuration
|
||||
// This matches the validation in llm_gateway routing.rs
|
||||
|
|
@ -249,7 +250,11 @@ async fn llm_chat_inner(
|
|||
if client_request.remove_metadata_key("plano_preference_config") {
|
||||
debug!("removed plano_preference_config from metadata");
|
||||
}
|
||||
|
||||
if let Some(ref client_api_kind) = client_api {
|
||||
let upstream_api =
|
||||
provider_id.compatible_api_for_client(client_api_kind, is_streaming_request);
|
||||
client_request.normalize_for_upstream(provider_id, &upstream_api);
|
||||
}
|
||||
// === v1/responses state management: Determine upstream API and combine input if needed ===
|
||||
// Do this BEFORE routing since routing consumes the request
|
||||
// Only process state if state_storage is configured
|
||||
|
|
@ -496,7 +501,6 @@ async fn llm_chat_inner(
|
|||
.into_response()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolves model aliases by looking up the requested model in the model_aliases map.
|
||||
/// Returns the target model if an alias is found, otherwise returns the original model.
|
||||
fn resolve_model_alias(
|
||||
|
|
|
|||
|
|
@ -130,6 +130,7 @@ pub fn extract_input_items(input: &InputParam) -> Vec<InputItem> {
|
|||
}]),
|
||||
})]
|
||||
}
|
||||
InputParam::SingleItem(item) => vec![item.clone()],
|
||||
InputParam::Items(items) => items.clone(),
|
||||
}
|
||||
}
|
||||
|
|
@ -146,3 +147,101 @@ pub async fn retrieve_and_combine_input(
|
|||
let combined_input = storage.merge(&prev_state, current_input);
|
||||
Ok(combined_input)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::extract_input_items;
|
||||
use hermesllm::apis::openai_responses::{
|
||||
InputContent, InputItem, InputMessage, InputParam, MessageContent, MessageRole,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_extract_input_items_converts_text_to_user_message_item() {
|
||||
let extracted = extract_input_items(&InputParam::Text("hello world".to_string()));
|
||||
assert_eq!(extracted.len(), 1);
|
||||
|
||||
let InputItem::Message(message) = &extracted[0] else {
|
||||
panic!("expected InputItem::Message");
|
||||
};
|
||||
assert!(matches!(message.role, MessageRole::User));
|
||||
|
||||
let MessageContent::Items(items) = &message.content else {
|
||||
panic!("expected MessageContent::Items");
|
||||
};
|
||||
assert_eq!(items.len(), 1);
|
||||
|
||||
let InputContent::InputText { text } = &items[0] else {
|
||||
panic!("expected InputContent::InputText");
|
||||
};
|
||||
assert_eq!(text, "hello world");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_input_items_preserves_single_item() {
|
||||
let item = InputItem::Message(InputMessage {
|
||||
role: MessageRole::Assistant,
|
||||
content: MessageContent::Items(vec![InputContent::InputText {
|
||||
text: "assistant note".to_string(),
|
||||
}]),
|
||||
});
|
||||
|
||||
let extracted = extract_input_items(&InputParam::SingleItem(item.clone()));
|
||||
assert_eq!(extracted.len(), 1);
|
||||
let InputItem::Message(message) = &extracted[0] else {
|
||||
panic!("expected InputItem::Message");
|
||||
};
|
||||
assert!(matches!(message.role, MessageRole::Assistant));
|
||||
let MessageContent::Items(items) = &message.content else {
|
||||
panic!("expected MessageContent::Items");
|
||||
};
|
||||
let InputContent::InputText { text } = &items[0] else {
|
||||
panic!("expected InputContent::InputText");
|
||||
};
|
||||
assert_eq!(text, "assistant note");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_extract_input_items_preserves_items_list() {
|
||||
let items = vec![
|
||||
InputItem::Message(InputMessage {
|
||||
role: MessageRole::User,
|
||||
content: MessageContent::Items(vec![InputContent::InputText {
|
||||
text: "first".to_string(),
|
||||
}]),
|
||||
}),
|
||||
InputItem::Message(InputMessage {
|
||||
role: MessageRole::Assistant,
|
||||
content: MessageContent::Items(vec![InputContent::InputText {
|
||||
text: "second".to_string(),
|
||||
}]),
|
||||
}),
|
||||
];
|
||||
|
||||
let extracted = extract_input_items(&InputParam::Items(items.clone()));
|
||||
assert_eq!(extracted.len(), items.len());
|
||||
|
||||
let InputItem::Message(first) = &extracted[0] else {
|
||||
panic!("expected first item to be message");
|
||||
};
|
||||
assert!(matches!(first.role, MessageRole::User));
|
||||
let MessageContent::Items(first_items) = &first.content else {
|
||||
panic!("expected MessageContent::Items");
|
||||
};
|
||||
let InputContent::InputText { text: first_text } = &first_items[0] else {
|
||||
panic!("expected InputContent::InputText");
|
||||
};
|
||||
assert_eq!(first_text, "first");
|
||||
|
||||
let InputItem::Message(second) = &extracted[1] else {
|
||||
panic!("expected second item to be message");
|
||||
};
|
||||
assert!(matches!(second.role, MessageRole::Assistant));
|
||||
let MessageContent::Items(second_items) = &second.content else {
|
||||
panic!("expected MessageContent::Items");
|
||||
};
|
||||
let InputContent::InputText { text: second_text } = &second_items[0] else {
|
||||
panic!("expected InputContent::InputText");
|
||||
};
|
||||
assert_eq!(second_text, "second");
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue