mirror of
https://github.com/katanemo/plano.git
synced 2026-05-09 15:52:44 +02:00
cargo clippy (#660)
This commit is contained in:
parent
c75e7606f9
commit
ca95ffb63d
62 changed files with 1864 additions and 1187 deletions
|
|
@ -85,13 +85,19 @@ impl StateStorage for MemoryConversationalStorage {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use hermesllm::apis::openai_responses::{InputItem, InputMessage, MessageRole, InputContent, MessageContent};
|
||||
use hermesllm::apis::openai_responses::{
|
||||
InputContent, InputItem, InputMessage, MessageContent, MessageRole,
|
||||
};
|
||||
|
||||
fn create_test_state(response_id: &str, num_messages: usize) -> OpenAIConversationState {
|
||||
let mut input_items = Vec::new();
|
||||
for i in 0..num_messages {
|
||||
input_items.push(InputItem::Message(InputMessage {
|
||||
role: if i % 2 == 0 { MessageRole::User } else { MessageRole::Assistant },
|
||||
role: if i % 2 == 0 {
|
||||
MessageRole::User
|
||||
} else {
|
||||
MessageRole::Assistant
|
||||
},
|
||||
content: MessageContent::Items(vec![InputContent::InputText {
|
||||
text: format!("Message {}", i),
|
||||
}]),
|
||||
|
|
@ -252,7 +258,9 @@ mod tests {
|
|||
let merged = storage.merge(&prev_state, current_input);
|
||||
|
||||
// Verify order: prev messages first, then current
|
||||
let InputItem::Message(msg) = &merged[0] else { panic!("Expected Message") };
|
||||
let InputItem::Message(msg) = &merged[0] else {
|
||||
panic!("Expected Message")
|
||||
};
|
||||
match &msg.content {
|
||||
MessageContent::Items(items) => match &items[0] {
|
||||
InputContent::InputText { text } => assert_eq!(text, "Message 0"),
|
||||
|
|
@ -261,7 +269,9 @@ mod tests {
|
|||
_ => panic!("Expected MessageContent::Items"),
|
||||
}
|
||||
|
||||
let InputItem::Message(msg) = &merged[2] else { panic!("Expected Message") };
|
||||
let InputItem::Message(msg) = &merged[2] else {
|
||||
panic!("Expected Message")
|
||||
};
|
||||
match &msg.content {
|
||||
MessageContent::Items(items) => match &items[0] {
|
||||
InputContent::InputText { text } => assert_eq!(text, "Message 2"),
|
||||
|
|
@ -404,7 +414,8 @@ mod tests {
|
|||
let current_input = vec![InputItem::Message(InputMessage {
|
||||
role: MessageRole::User,
|
||||
content: MessageContent::Items(vec![InputContent::InputText {
|
||||
text: "Function result: {\"temperature\": 72, \"condition\": \"sunny\"}".to_string(),
|
||||
text: "Function result: {\"temperature\": 72, \"condition\": \"sunny\"}"
|
||||
.to_string(),
|
||||
}]),
|
||||
})];
|
||||
|
||||
|
|
@ -415,7 +426,9 @@ mod tests {
|
|||
assert_eq!(merged.len(), 3);
|
||||
|
||||
// Verify the order and content
|
||||
let InputItem::Message(msg1) = &merged[0] else { panic!("Expected Message") };
|
||||
let InputItem::Message(msg1) = &merged[0] else {
|
||||
panic!("Expected Message")
|
||||
};
|
||||
assert!(matches!(msg1.role, MessageRole::User));
|
||||
match &msg1.content {
|
||||
MessageContent::Items(items) => match &items[0] {
|
||||
|
|
@ -427,7 +440,9 @@ mod tests {
|
|||
_ => panic!("Expected MessageContent::Items"),
|
||||
}
|
||||
|
||||
let InputItem::Message(msg2) = &merged[1] else { panic!("Expected Message") };
|
||||
let InputItem::Message(msg2) = &merged[1] else {
|
||||
panic!("Expected Message")
|
||||
};
|
||||
assert!(matches!(msg2.role, MessageRole::Assistant));
|
||||
match &msg2.content {
|
||||
MessageContent::Items(items) => match &items[0] {
|
||||
|
|
@ -439,7 +454,9 @@ mod tests {
|
|||
_ => panic!("Expected MessageContent::Items"),
|
||||
}
|
||||
|
||||
let InputItem::Message(msg3) = &merged[2] else { panic!("Expected Message") };
|
||||
let InputItem::Message(msg3) = &merged[2] else {
|
||||
panic!("Expected Message")
|
||||
};
|
||||
assert!(matches!(msg3.role, MessageRole::User));
|
||||
match &msg3.content {
|
||||
MessageContent::Items(items) => match &items[0] {
|
||||
|
|
@ -508,11 +525,15 @@ mod tests {
|
|||
assert_eq!(merged.len(), 5);
|
||||
|
||||
// Verify first item is original user message
|
||||
let InputItem::Message(first) = &merged[0] else { panic!("Expected Message") };
|
||||
let InputItem::Message(first) = &merged[0] else {
|
||||
panic!("Expected Message")
|
||||
};
|
||||
assert!(matches!(first.role, MessageRole::User));
|
||||
|
||||
// Verify last two are function outputs
|
||||
let InputItem::Message(second_last) = &merged[3] else { panic!("Expected Message") };
|
||||
let InputItem::Message(second_last) = &merged[3] else {
|
||||
panic!("Expected Message")
|
||||
};
|
||||
assert!(matches!(second_last.role, MessageRole::User));
|
||||
match &second_last.content {
|
||||
MessageContent::Items(items) => match &items[0] {
|
||||
|
|
@ -522,7 +543,9 @@ mod tests {
|
|||
_ => panic!("Expected MessageContent::Items"),
|
||||
}
|
||||
|
||||
let InputItem::Message(last) = &merged[4] else { panic!("Expected Message") };
|
||||
let InputItem::Message(last) = &merged[4] else {
|
||||
panic!("Expected Message")
|
||||
};
|
||||
assert!(matches!(last.role, MessageRole::User));
|
||||
match &last.content {
|
||||
MessageContent::Items(items) => match &items[0] {
|
||||
|
|
@ -590,7 +613,9 @@ mod tests {
|
|||
assert_eq!(merged.len(), 5);
|
||||
|
||||
// Verify the entire conversation flow is preserved
|
||||
let InputItem::Message(first) = &merged[0] else { panic!("Expected Message") };
|
||||
let InputItem::Message(first) = &merged[0] else {
|
||||
panic!("Expected Message")
|
||||
};
|
||||
match &first.content {
|
||||
MessageContent::Items(items) => match &items[0] {
|
||||
InputContent::InputText { text } => assert!(text.contains("What's the weather")),
|
||||
|
|
@ -599,7 +624,9 @@ mod tests {
|
|||
_ => panic!("Expected MessageContent::Items"),
|
||||
}
|
||||
|
||||
let InputItem::Message(last) = &merged[4] else { panic!("Expected Message") };
|
||||
let InputItem::Message(last) = &merged[4] else {
|
||||
panic!("Expected Message")
|
||||
};
|
||||
match &last.content {
|
||||
MessageContent::Items(items) => match &items[0] {
|
||||
InputContent::InputText { text } => assert!(text.contains("umbrella")),
|
||||
|
|
|
|||
|
|
@ -1,14 +1,16 @@
|
|||
use async_trait::async_trait;
|
||||
use hermesllm::apis::openai_responses::{InputItem, InputMessage, InputContent, MessageContent, MessageRole, InputParam};
|
||||
use hermesllm::apis::openai_responses::{
|
||||
InputContent, InputItem, InputMessage, InputParam, MessageContent, MessageRole,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::error::Error;
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
use tracing::{debug};
|
||||
use tracing::debug;
|
||||
|
||||
pub mod memory;
|
||||
pub mod response_state_processor;
|
||||
pub mod postgresql;
|
||||
pub mod response_state_processor;
|
||||
|
||||
/// Represents the conversational state for a v1/responses request
|
||||
/// Contains the complete input/output history that can be restored
|
||||
|
|
@ -47,7 +49,9 @@ pub enum StateStorageError {
|
|||
impl fmt::Display for StateStorageError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
StateStorageError::NotFound(id) => write!(f, "Conversation state not found for response_id: {}", id),
|
||||
StateStorageError::NotFound(id) => {
|
||||
write!(f, "Conversation state not found for response_id: {}", id)
|
||||
}
|
||||
StateStorageError::StorageError(msg) => write!(f, "Storage error: {}", msg),
|
||||
StateStorageError::SerializationError(msg) => write!(f, "Serialization error: {}", msg),
|
||||
}
|
||||
|
|
@ -96,8 +100,6 @@ pub trait StateStorage: Send + Sync {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
/// Storage backend type enum
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum StorageBackend {
|
||||
|
|
@ -106,7 +108,7 @@ pub enum StorageBackend {
|
|||
}
|
||||
|
||||
impl StorageBackend {
|
||||
pub fn from_str(s: &str) -> Option<Self> {
|
||||
pub fn parse_backend(s: &str) -> Option<Self> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"memory" => Some(StorageBackend::Memory),
|
||||
"supabase" => Some(StorageBackend::Supabase),
|
||||
|
|
@ -139,7 +141,6 @@ pub async fn retrieve_and_combine_input(
|
|||
previous_response_id: &str,
|
||||
current_input: Vec<InputItem>,
|
||||
) -> Result<Vec<InputItem>, StateStorageError> {
|
||||
|
||||
// First get the previous state
|
||||
let prev_state = storage.get(previous_response_id).await?;
|
||||
let combined_input = storage.merge(&prev_state, current_input);
|
||||
|
|
|
|||
|
|
@ -149,13 +149,12 @@ impl StateStorage for PostgreSQLConversationStorage {
|
|||
let provider: String = row.get("provider");
|
||||
|
||||
// Deserialize input_items from JSONB
|
||||
let input_items =
|
||||
serde_json::from_value(input_items_json).map_err(|e| {
|
||||
StateStorageError::StorageError(format!(
|
||||
"Failed to deserialize input_items: {}",
|
||||
e
|
||||
))
|
||||
})?;
|
||||
let input_items = serde_json::from_value(input_items_json).map_err(|e| {
|
||||
StateStorageError::StorageError(format!(
|
||||
"Failed to deserialize input_items: {}",
|
||||
e
|
||||
))
|
||||
})?;
|
||||
|
||||
Ok(OpenAIConversationState {
|
||||
response_id,
|
||||
|
|
@ -230,7 +229,9 @@ Run that SQL file against your database before using this storage backend.
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use hermesllm::apis::openai_responses::{InputContent, InputItem, InputMessage, MessageContent, MessageRole};
|
||||
use hermesllm::apis::openai_responses::{
|
||||
InputContent, InputItem, InputMessage, MessageContent, MessageRole,
|
||||
};
|
||||
|
||||
fn create_test_state(response_id: &str) -> OpenAIConversationState {
|
||||
OpenAIConversationState {
|
||||
|
|
@ -320,7 +321,10 @@ mod tests {
|
|||
|
||||
let result = storage.get("nonexistent_id").await;
|
||||
assert!(result.is_err());
|
||||
assert!(matches!(result.unwrap_err(), StateStorageError::NotFound(_)));
|
||||
assert!(matches!(
|
||||
result.unwrap_err(),
|
||||
StateStorageError::NotFound(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
@ -372,7 +376,10 @@ mod tests {
|
|||
|
||||
let result = storage.delete("nonexistent_id").await;
|
||||
assert!(result.is_err());
|
||||
assert!(matches!(result.unwrap_err(), StateStorageError::NotFound(_)));
|
||||
assert!(matches!(
|
||||
result.unwrap_err(),
|
||||
StateStorageError::NotFound(_)
|
||||
));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
@ -423,9 +430,13 @@ mod tests {
|
|||
|
||||
println!("✅ Data written to Supabase!");
|
||||
println!("Check your Supabase dashboard:");
|
||||
println!(" SELECT * FROM conversation_states WHERE response_id = 'manual_test_verification';");
|
||||
println!(
|
||||
" SELECT * FROM conversation_states WHERE response_id = 'manual_test_verification';"
|
||||
);
|
||||
println!("\nTo cleanup, run:");
|
||||
println!(" DELETE FROM conversation_states WHERE response_id = 'manual_test_verification';");
|
||||
println!(
|
||||
" DELETE FROM conversation_states WHERE response_id = 'manual_test_verification';"
|
||||
);
|
||||
|
||||
// DON'T cleanup - leave it for manual verification
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,13 +1,11 @@
|
|||
use bytes::Bytes;
|
||||
use flate2::read::GzDecoder;
|
||||
use hermesllm::apis::openai_responses::{
|
||||
InputItem, OutputItem, ResponsesAPIStreamEvent,
|
||||
};
|
||||
use hermesllm::apis::openai_responses::{InputItem, OutputItem, ResponsesAPIStreamEvent};
|
||||
use hermesllm::apis::streaming_shapes::sse::SseStreamIter;
|
||||
use hermesllm::transforms::response::output_to_input::outputs_to_inputs;
|
||||
use std::io::Read;
|
||||
use std::sync::Arc;
|
||||
use tracing::{info, debug, warn};
|
||||
use tracing::{debug, info, warn};
|
||||
|
||||
use crate::handlers::utils::StreamProcessor;
|
||||
use crate::state::{OpenAIConversationState, StateStorage};
|
||||
|
|
@ -53,6 +51,7 @@ pub struct ResponsesStateProcessor<P: StreamProcessor> {
|
|||
}
|
||||
|
||||
impl<P: StreamProcessor> ResponsesStateProcessor<P> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
inner: P,
|
||||
storage: Arc<dyn StateStorage>,
|
||||
|
|
@ -139,20 +138,19 @@ impl<P: StreamProcessor> ResponsesStateProcessor<P> {
|
|||
for event in sse_iter {
|
||||
// Only process data lines (skip event-only lines)
|
||||
if let Some(data_str) = &event.data {
|
||||
// Try to parse as ResponsesAPIStreamEvent
|
||||
if let Ok(stream_event) = serde_json::from_str::<ResponsesAPIStreamEvent>(data_str) {
|
||||
// Check if this is a ResponseCompleted event
|
||||
if let ResponsesAPIStreamEvent::ResponseCompleted { response, .. } = stream_event {
|
||||
info!(
|
||||
"[PLANO_REQ_ID:{}] | STATE_PROCESSOR | Captured streaming response.completed: response_id={}, output_items={}",
|
||||
self.request_id,
|
||||
response.id,
|
||||
response.output.len()
|
||||
);
|
||||
self.response_id = Some(response.id.clone());
|
||||
self.output_items = Some(response.output.clone());
|
||||
return; // Found what we need, exit early
|
||||
}
|
||||
// Try to parse as ResponsesAPIStreamEvent and check if it's a ResponseCompleted event
|
||||
if let Ok(ResponsesAPIStreamEvent::ResponseCompleted { response, .. }) =
|
||||
serde_json::from_str::<ResponsesAPIStreamEvent>(data_str)
|
||||
{
|
||||
info!(
|
||||
"[PLANO_REQ_ID:{}] | STATE_PROCESSOR | Captured streaming response.completed: response_id={}, output_items={}",
|
||||
self.request_id,
|
||||
response.id,
|
||||
response.output.len()
|
||||
);
|
||||
self.response_id = Some(response.id.clone());
|
||||
self.output_items = Some(response.output.clone());
|
||||
return; // Found what we need, exit early
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -172,7 +170,9 @@ impl<P: StreamProcessor> ResponsesStateProcessor<P> {
|
|||
let decompressed = self.decompress_buffer();
|
||||
|
||||
// Parse complete JSON response
|
||||
match serde_json::from_slice::<hermesllm::apis::openai_responses::ResponsesAPIResponse>(&decompressed) {
|
||||
match serde_json::from_slice::<hermesllm::apis::openai_responses::ResponsesAPIResponse>(
|
||||
&decompressed,
|
||||
) {
|
||||
Ok(response) => {
|
||||
info!(
|
||||
"[PLANO_REQ_ID:{}] | STATE_PROCESSOR | Captured non-streaming response: response_id={}, output_items={}",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue