pass model name in header when a route is selected when using usage preferences (#531)

This commit is contained in:
Adil Hafeez 2025-07-17 13:41:58 -07:00 committed by GitHub
parent 2340a45353
commit f819ee3507
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 150 additions and 117 deletions

View file

@ -104,7 +104,7 @@ pub async fn chat_completions(
debug!("usage preferences from request: {:?}", usage_preferences);
let mut determined_route = match router_service
let determined_model = match router_service
.determine_route(
&chat_completion_request.messages,
trace_parent.clone(),
@ -121,14 +121,17 @@ pub async fn chat_completions(
}
};
if determined_route.is_none() {
debug!("No LLM model selected, using default from request");
determined_route = Some(chat_completion_request.model.clone());
}
info!(
"sending request to llm provider: {} with llm model: {:?}",
llm_provider_endpoint, determined_route
"sending request to llm provider: {} determined_model: {:?}, model from request: {}",
llm_provider_endpoint, determined_model, chat_completion_request.model
);
request_headers.insert(
ARCH_PROVIDER_HINT_HEADER,
header::HeaderValue::from_str(
&determined_model.unwrap_or(chat_completion_request.model.clone()),
)
.unwrap(),
);
if let Some(trace_parent) = trace_parent {
@ -138,13 +141,6 @@ pub async fn chat_completions(
);
}
if let Some(selected_route) = determined_route {
request_headers.insert(
ARCH_PROVIDER_HINT_HEADER,
header::HeaderValue::from_str(&selected_route).unwrap(),
);
}
let chat_request_parsed_bytes =
serde_json::to_string(&chat_request_user_preferences_removed).unwrap();