Code refactor and some improvements - see description

- this is follow up to PR#190
- revert rename of files
- bring in fix for panic from https://github.com/katanemo/arch/pull/183
-
This commit is contained in:
Adil Hafeez 2024-10-17 17:59:59 -07:00
parent 6cd05572c4
commit 3e7f7be838
7 changed files with 49 additions and 79 deletions

View file

@ -1,4 +1,4 @@
use crate::llm_stream_context::LlmGatewayStreamContext;
use crate::stream_context::StreamContext;
use common::configuration::Configuration;
use common::http::Client;
use common::llm_providers::LlmProviders;
@ -28,19 +28,19 @@ impl WasmMetrics {
}
#[derive(Debug)]
pub struct FilterCallContext {}
pub struct CallContext {}
#[derive(Debug)]
pub struct LlmGatewayFilterContext {
pub struct FilterContext {
metrics: Rc<WasmMetrics>,
// callouts stores token_id to request mapping that we use during #on_http_call_response to match the response to the request.
callouts: RefCell<HashMap<u32, FilterCallContext>>,
callouts: RefCell<HashMap<u32, CallContext>>,
llm_providers: Option<Rc<LlmProviders>>,
}
impl LlmGatewayFilterContext {
pub fn new() -> LlmGatewayFilterContext {
LlmGatewayFilterContext {
impl FilterContext {
pub fn new() -> FilterContext {
FilterContext {
callouts: RefCell::new(HashMap::new()),
metrics: Rc::new(WasmMetrics::new()),
llm_providers: None,
@ -48,8 +48,8 @@ impl LlmGatewayFilterContext {
}
}
impl Client for LlmGatewayFilterContext {
type CallContext = FilterCallContext;
impl Client for FilterContext {
type CallContext = CallContext;
fn callouts(&self) -> &RefCell<HashMap<u32, Self::CallContext>> {
&self.callouts
@ -60,10 +60,10 @@ impl Client for LlmGatewayFilterContext {
}
}
impl Context for LlmGatewayFilterContext {}
impl Context for FilterContext {}
// RootContext allows the Rust code to reach into the Envoy Config
impl RootContext for LlmGatewayFilterContext {
impl RootContext for FilterContext {
fn on_configure(&mut self, _: usize) -> bool {
let config_bytes = self
.get_plugin_configuration()
@ -90,8 +90,7 @@ impl RootContext for LlmGatewayFilterContext {
context_id
);
// No StreamContext can be created until the Embedding Store is fully initialized.
Some(Box::new(LlmGatewayStreamContext::new(
Some(Box::new(StreamContext::new(
context_id,
Rc::clone(&self.metrics),
Rc::clone(

View file

@ -1,13 +1,13 @@
use llm_filter_context::LlmGatewayFilterContext;
use filter_context::FilterContext;
use proxy_wasm::traits::*;
use proxy_wasm::types::*;
mod llm_filter_context;
mod llm_stream_context;
mod filter_context;
mod stream_context;
proxy_wasm::main! {{
proxy_wasm::set_log_level(LogLevel::Trace);
proxy_wasm::set_root_context(|_| -> Box<dyn RootContext> {
Box::new(LlmGatewayFilterContext::new())
Box::new(FilterContext::new())
});
}}

View file

@ -1,4 +1,4 @@
use crate::llm_filter_context::WasmMetrics;
use crate::filter_context::WasmMetrics;
use common::common_types::open_ai::{
ArchState, ChatCompletionChunkResponse, ChatCompletionsRequest, ChatCompletionsResponse,
Message, ToolCall, ToolCallState,
@ -34,7 +34,7 @@ pub enum ServerError {
BadRequest { why: String },
}
pub struct LlmGatewayStreamContext {
pub struct StreamContext {
context_id: u32,
metrics: Rc<WasmMetrics>,
tool_calls: Option<Vec<ToolCall>>,
@ -52,10 +52,10 @@ pub struct LlmGatewayStreamContext {
request_id: Option<String>,
}
impl LlmGatewayStreamContext {
impl StreamContext {
#[allow(clippy::too_many_arguments)]
pub fn new(context_id: u32, metrics: Rc<WasmMetrics>, llm_providers: Rc<LlmProviders>) -> Self {
LlmGatewayStreamContext {
StreamContext {
context_id,
metrics,
chat_completions_request: None,
@ -160,7 +160,7 @@ impl LlmGatewayStreamContext {
}
// HttpContext is the trait that allows the Rust code to interact with HTTP objects.
impl HttpContext for LlmGatewayStreamContext {
impl HttpContext for StreamContext {
// Envoy's HTTP model is event driven. The WASM ABI has given implementors events to hook onto
// the lifecycle of the http request and response.
fn on_http_request_headers(&mut self, _num_headers: usize, _end_of_stream: bool) -> Action {
@ -418,4 +418,4 @@ impl HttpContext for LlmGatewayStreamContext {
}
}
impl Context for LlmGatewayStreamContext {}
impl Context for StreamContext {}