Use mcp tools for filter chain (#621)

* agents framework demo

* more changes

* add more changes

* pending changes

* fix tests

* fix more

* rebase with main and better handle error from mcp

* add trace for filters

* add test for client error, server error and for mcp error

* update schema validate code and rename kind => type in agent_filter

* fix agent description and pre-commit

* fix tests

* add provider specific request parsing in agents chat

* fix precommit and tests

* cleanup demo

* update readme

* fix pre-commit

* refactor tracing

* fix fmt

* fix: handle MessageContent enum in responses API conversion

- Update request.rs to handle new MessageContent enum structure from main
- MessageContent can now be Text(String) or Items(Vec<InputContent>)
- Handle new InputItem variants (ItemReference, FunctionCallOutput)
- Fixes compilation error after merging latest main (#632)

* address pr feedback

* fix span

* fix build

* update openai version
This commit is contained in:
Adil Hafeez 2025-12-17 17:30:14 -08:00 committed by GitHub
parent cb82a83c7b
commit 2f9121407b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
40 changed files with 4886 additions and 190 deletions

View file

@ -233,6 +233,104 @@ impl ProviderRequest for ConverseRequest {
fn get_temperature(&self) -> Option<f32> {
self.inference_config.as_ref()?.temperature
}
fn get_messages(&self) -> Vec<crate::apis::openai::Message> {
use crate::apis::openai::{Message, MessageContent, Role};
let mut openai_messages = Vec::new();
// Add system messages if present
if let Some(system) = &self.system {
for sys_block in system {
match sys_block {
SystemContentBlock::Text { text } => {
openai_messages.push(Message {
role: Role::System,
content: MessageContent::Text(text.clone()),
name: None,
tool_calls: None,
tool_call_id: None,
});
}
_ => {} // Skip other system content types
}
}
}
// Convert conversation messages
if let Some(messages) = &self.messages {
for msg in messages {
let role = match msg.role {
ConversationRole::User => Role::User,
ConversationRole::Assistant => Role::Assistant,
};
// Extract text from content blocks
let content = msg.content.iter()
.filter_map(|block| {
if let ContentBlock::Text { text } = block {
Some(text.clone())
} else {
None
}
})
.collect::<Vec<_>>()
.join("\n");
openai_messages.push(Message {
role,
content: MessageContent::Text(content),
name: None,
tool_calls: None,
tool_call_id: None,
});
}
}
openai_messages
}
fn set_messages(&mut self, messages: &[crate::apis::openai::Message]) {
// Convert OpenAI messages to Bedrock format
use crate::apis::amazon_bedrock::{ContentBlock, ConversationRole, SystemContentBlock};
let mut system_blocks = Vec::new();
let mut bedrock_messages = Vec::new();
for msg in messages {
match msg.role {
crate::apis::openai::Role::System => {
if let crate::apis::openai::MessageContent::Text(text) = &msg.content {
system_blocks.push(SystemContentBlock::Text { text: text.clone() });
}
}
crate::apis::openai::Role::User | crate::apis::openai::Role::Assistant => {
let role = match msg.role {
crate::apis::openai::Role::User => ConversationRole::User,
crate::apis::openai::Role::Assistant => ConversationRole::Assistant,
_ => continue,
};
let content = if let crate::apis::openai::MessageContent::Text(text) = &msg.content {
vec![ContentBlock::Text { text: text.clone() }]
} else {
vec![]
};
bedrock_messages.push(crate::apis::amazon_bedrock::Message {
role,
content,
});
}
_ => {}
}
}
if !system_blocks.is_empty() {
self.system = Some(system_blocks);
}
self.messages = Some(bedrock_messages);
}
}
// ============================================================================

View file

@ -541,6 +541,65 @@ impl ProviderRequest for MessagesRequest {
fn get_temperature(&self) -> Option<f32> {
self.temperature
}
fn get_messages(&self) -> Vec<crate::apis::openai::Message> {
use crate::apis::openai::Message;
let mut openai_messages = Vec::new();
// Add system prompt as system message if present
if let Some(system) = &self.system {
openai_messages.push(system.clone().into());
}
// Convert each Anthropic message to OpenAI format
for msg in &self.messages {
if let Ok(converted_msgs) = TryInto::<Vec<Message>>::try_into(msg.clone()) {
openai_messages.extend(converted_msgs);
}
}
openai_messages
}
fn set_messages(&mut self, messages: &[crate::apis::openai::Message]) {
// Convert OpenAI messages to Anthropic format
// Separate system messages from regular messages
let mut system_messages = Vec::new();
let mut regular_messages = Vec::new();
for msg in messages {
if msg.role == crate::apis::openai::Role::System {
system_messages.push(msg.clone());
} else {
regular_messages.push(msg.clone());
}
}
// Set system prompt if there are system messages
if !system_messages.is_empty() {
// Combine all system messages into one
let system_text = system_messages.iter()
.filter_map(|msg| {
if let crate::apis::openai::MessageContent::Text(text) = &msg.content {
Some(text.as_str())
} else {
None
}
})
.collect::<Vec<_>>()
.join("\n");
self.system = Some(crate::apis::anthropic::MessagesSystemPrompt::Single(system_text));
}
// Convert regular messages
self.messages = regular_messages.iter()
.filter_map(|msg| {
msg.clone().try_into().ok()
})
.collect();
}
}
impl MessagesResponse {

View file

@ -735,6 +735,14 @@ impl ProviderRequest for ChatCompletionsRequest {
fn get_temperature(&self) -> Option<f32> {
self.temperature
}
fn get_messages(&self) -> Vec<crate::apis::openai::Message> {
self.messages.clone()
}
fn set_messages(&mut self, messages: &[crate::apis::openai::Message]) {
self.messages = messages.to_vec();
}
}
/// Implementation of ProviderResponse for ChatCompletionsResponse

View file

@ -1134,6 +1134,140 @@ impl ProviderRequest for ResponsesAPIRequest {
fn get_temperature(&self) -> Option<f32> {
self.temperature
}
fn get_messages(&self) -> Vec<crate::apis::openai::Message> {
use crate::apis::openai::{Message, MessageContent, Role};
let mut openai_messages = Vec::new();
// Add instructions as system message if present
if let Some(instructions) = &self.instructions {
openai_messages.push(Message {
role: Role::System,
content: MessageContent::Text(instructions.clone()),
name: None,
tool_calls: None,
tool_call_id: None,
});
}
// Convert input to messages
match &self.input {
InputParam::Text(text) => {
openai_messages.push(Message {
role: Role::User,
content: MessageContent::Text(text.clone()),
name: None,
tool_calls: None,
tool_call_id: None,
});
}
InputParam::Items(items) => {
for item in items {
match item {
InputItem::Message(msg) => {
// Convert message role
let role = match msg.role {
MessageRole::User => Role::User,
MessageRole::Assistant => Role::Assistant,
MessageRole::System => Role::System,
MessageRole::Developer => Role::System, // Map developer to system
};
// Extract text from message content
let content = match &msg.content {
crate::apis::openai_responses::MessageContent::Text(text) => text.clone(),
crate::apis::openai_responses::MessageContent::Items(items) => {
items.iter()
.filter_map(|c| {
if let InputContent::InputText { text } = c {
Some(text.clone())
} else {
None
}
})
.collect::<Vec<_>>()
.join("\n")
}
};
openai_messages.push(Message {
role,
content: MessageContent::Text(content),
name: None,
tool_calls: None,
tool_call_id: None,
});
}
// Skip other input item types for now
InputItem::ItemReference { .. } | InputItem::FunctionCallOutput { .. } => {
// These are not yet supported in agent framework
}
}
}
}
}
openai_messages
}
fn set_messages(&mut self, messages: &[crate::apis::openai::Message]) {
// For ResponsesAPI, we need to convert messages back to input format
// Extract system messages as instructions
let system_text = messages.iter()
.filter(|msg| msg.role == crate::apis::openai::Role::System)
.filter_map(|msg| {
if let crate::apis::openai::MessageContent::Text(text) = &msg.content {
Some(text.as_str())
} else {
None
}
})
.collect::<Vec<_>>()
.join("\n");
if !system_text.is_empty() {
self.instructions = Some(system_text);
}
// Convert user/assistant messages to InputParam
// For simplicity, we'll use the last user message as the input
// or combine all non-system messages
let input_messages: Vec<_> = messages.iter()
.filter(|msg| msg.role != crate::apis::openai::Role::System)
.collect();
if !input_messages.is_empty() {
// If there's only one message, use Text format
if input_messages.len() == 1 {
if let crate::apis::openai::MessageContent::Text(text) = &input_messages[0].content {
self.input = crate::apis::openai_responses::InputParam::Text(text.clone());
}
} else {
// Multiple messages - combine them as text for now
// A more sophisticated approach would use InputParam::Items
let combined_text = input_messages.iter()
.filter_map(|msg| {
if let crate::apis::openai::MessageContent::Text(text) = &msg.content {
Some(format!("{}: {}",
match msg.role {
crate::apis::openai::Role::User => "User",
crate::apis::openai::Role::Assistant => "Assistant",
_ => "Unknown",
},
text
))
} else {
None
}
})
.collect::<Vec<_>>()
.join("\n");
self.input = crate::apis::openai_responses::InputParam::Text(combined_text);
}
}
}
}
// ============================================================================