add top-level routing_preferences with selection_policy and model metrics fetch

This commit is contained in:
Adil Hafeez 2026-03-26 17:35:39 -07:00
parent 406fa92802
commit 2ef938ac5f
9 changed files with 568 additions and 49 deletions

View file

@ -1,7 +1,9 @@
use std::{collections::HashMap, sync::Arc};
use common::{
configuration::{LlmProvider, ModelUsagePreference, RoutingPreference},
configuration::{
LlmProvider, ModelUsagePreference, RoutingPreference, TopLevelRoutingPreference,
},
consts::{ARCH_PROVIDER_HINT_HEADER, REQUEST_ID_HEADER, TRACE_PARENT_HEADER},
};
use hermesllm::apis::openai::Message;
@ -10,6 +12,7 @@ use thiserror::Error;
use tracing::{debug, info};
use super::http::{self, post_and_extract_content};
use super::model_metrics::ModelMetricsService;
use super::router_model::RouterModel;
use crate::router::router_model_v1;
@ -20,6 +23,8 @@ pub struct RouterService {
router_model: Arc<dyn RouterModel>,
routing_provider_name: String,
llm_usage_defined: bool,
top_level_preferences: HashMap<String, TopLevelRoutingPreference>,
metrics_service: Option<Arc<ModelMetricsService>>,
}
#[derive(Debug, Error)]
@ -36,25 +41,58 @@ pub type Result<T> = std::result::Result<T, RoutingError>;
impl RouterService {
pub fn new(
providers: Vec<LlmProvider>,
top_level_prefs: Option<Vec<TopLevelRoutingPreference>>,
metrics_service: Option<Arc<ModelMetricsService>>,
router_url: String,
routing_model_name: String,
routing_provider_name: String,
) -> Self {
let providers_with_usage = providers
.iter()
.filter(|provider| provider.routing_preferences.is_some())
.cloned()
.collect::<Vec<LlmProvider>>();
// Build top-level preference map and sentinel llm_routes when v0.4.0 format is used.
let (top_level_preferences, llm_routes, llm_usage_defined) =
if let Some(top_prefs) = top_level_prefs {
let top_level_map: HashMap<String, TopLevelRoutingPreference> = top_prefs
.into_iter()
.map(|p| (p.name.clone(), p))
.collect();
// Build sentinel routes: route_name → first model (RouterModelV1 needs a model
// mapping, but RouterService overrides the selection via metrics_service).
let sentinel_routes: HashMap<String, Vec<RoutingPreference>> = top_level_map
.iter()
.filter_map(|(name, pref)| {
pref.models.first().map(|first_model| {
(
first_model.clone(),
vec![RoutingPreference {
name: name.clone(),
description: pref.description.clone(),
}],
)
})
})
.collect();
let defined = !top_level_map.is_empty();
(top_level_map, sentinel_routes, defined)
} else {
// Legacy per-provider format.
let providers_with_usage = providers
.iter()
.filter(|provider| provider.routing_preferences.is_some())
.cloned()
.collect::<Vec<LlmProvider>>();
let llm_routes: HashMap<String, Vec<RoutingPreference>> = providers_with_usage
.iter()
.filter_map(|provider| {
provider
.routing_preferences
.as_ref()
.map(|prefs| (provider.name.clone(), prefs.clone()))
})
.collect();
let routes: HashMap<String, Vec<RoutingPreference>> = providers_with_usage
.iter()
.filter_map(|provider| {
provider
.routing_preferences
.as_ref()
.map(|prefs| (provider.name.clone(), prefs.clone()))
})
.collect();
let defined = !providers_with_usage.is_empty();
(HashMap::new(), routes, defined)
};
let router_model = Arc::new(router_model_v1::RouterModelV1::new(
llm_routes,
@ -67,7 +105,9 @@ impl RouterService {
client: reqwest::Client::new(),
router_model,
routing_provider_name,
llm_usage_defined: !providers_with_usage.is_empty(),
llm_usage_defined,
top_level_preferences,
metrics_service,
}
}
@ -76,23 +116,58 @@ impl RouterService {
messages: &[Message],
traceparent: &str,
usage_preferences: Option<Vec<ModelUsagePreference>>,
inline_routing_preferences: Option<Vec<TopLevelRoutingPreference>>,
request_id: &str,
) -> Result<Option<(String, String)>> {
if messages.is_empty() {
return Ok(None);
}
// Build inline top-level map from request if present (inline overrides config).
let inline_top_map: Option<HashMap<String, TopLevelRoutingPreference>> =
inline_routing_preferences.map(|prefs| {
prefs.into_iter().map(|p| (p.name.clone(), p)).collect()
});
// Determine whether any routing is defined.
let has_top_level = inline_top_map.is_some() || !self.top_level_preferences.is_empty();
if usage_preferences
.as_ref()
.is_none_or(|prefs| prefs.len() < 2)
&& !self.llm_usage_defined
&& !has_top_level
{
return Ok(None);
}
// For top-level format, build a synthetic ModelUsagePreference list so RouterModelV1
// generates the correct prompt (route name + description pairs).
let effective_usage_preferences: Option<Vec<ModelUsagePreference>> =
if let Some(ref inline_map) = inline_top_map {
Some(
inline_map
.values()
.map(|p| ModelUsagePreference {
model: p.models.first().cloned().unwrap_or_default(),
routing_preferences: vec![RoutingPreference {
name: p.name.clone(),
description: p.description.clone(),
}],
})
.collect(),
)
} else if !self.top_level_preferences.is_empty() {
// Config top-level prefs: already encoded as sentinel routes in RouterModelV1,
// pass None so it uses the pre-built llm_route_json_str.
None
} else {
usage_preferences.clone()
};
let router_request = self
.router_model
.generate_request(messages, &usage_preferences);
.generate_request(messages, &effective_usage_preferences);
debug!(
model = %self.router_model.get_model_name(),
@ -132,17 +207,40 @@ impl RouterService {
return Ok(None);
};
// Parse the route name from the router response.
let parsed = self
.router_model
.parse_response(&content, &usage_preferences)?;
.parse_response(&content, &effective_usage_preferences)?;
let result = if let Some((route_name, _sentinel_model)) = parsed {
// Check if this route belongs to the top-level preference format.
let top_pref = inline_top_map
.as_ref()
.and_then(|m| m.get(&route_name))
.or_else(|| self.top_level_preferences.get(&route_name));
if let Some(pref) = top_pref {
let selected_model = match &self.metrics_service {
Some(svc) => {
svc.select_model(&pref.models, &pref.selection_policy).await
}
None => pref.models.first().cloned().unwrap_or_default(),
};
Some((route_name, selected_model))
} else {
Some((route_name, _sentinel_model))
}
} else {
None
};
info!(
content = %content.replace("\n", "\\n"),
selected_model = ?parsed,
selected_model = ?result,
response_time_ms = elapsed.as_millis(),
"arch-router determined route"
);
Ok(parsed)
Ok(result)
}
}

View file

@ -1,5 +1,6 @@
pub(crate) mod http;
pub mod llm;
pub mod model_metrics;
pub mod orchestrator;
pub mod orchestrator_model;
pub mod orchestrator_model_v1;

View file

@ -0,0 +1,209 @@
use std::collections::HashMap;
use std::sync::Arc;
use std::time::Duration;
use common::configuration::{ModelMetricsSources, SelectionPolicy, SelectionPreference};
use serde::Deserialize;
use tokio::sync::RwLock;
use tracing::{info, warn};
#[derive(Deserialize)]
struct MetricsResponse {
#[serde(default)]
cost: HashMap<String, f64>,
#[serde(default)]
latency: HashMap<String, f64>,
}
pub struct ModelMetricsService {
cost: Arc<RwLock<HashMap<String, f64>>>,
latency: Arc<RwLock<HashMap<String, f64>>>,
}
impl ModelMetricsService {
pub async fn new(sources: &ModelMetricsSources, client: reqwest::Client) -> Self {
let cost_data = Arc::new(RwLock::new(HashMap::new()));
let latency_data = Arc::new(RwLock::new(HashMap::new()));
let metrics = fetch_metrics(&sources.url, &client).await;
info!(
cost_models = metrics.cost.len(),
latency_models = metrics.latency.len(),
url = %sources.url,
"fetched model metrics"
);
*cost_data.write().await = metrics.cost;
*latency_data.write().await = metrics.latency;
if let Some(interval_secs) = sources.refresh_interval {
let cost_clone = Arc::clone(&cost_data);
let latency_clone = Arc::clone(&latency_data);
let client_clone = client.clone();
let url = sources.url.clone();
tokio::spawn(async move {
let interval = Duration::from_secs(interval_secs);
loop {
tokio::time::sleep(interval).await;
let metrics = fetch_metrics(&url, &client_clone).await;
info!(
cost_models = metrics.cost.len(),
latency_models = metrics.latency.len(),
url = %url,
"refreshed model metrics"
);
*cost_clone.write().await = metrics.cost;
*latency_clone.write().await = metrics.latency;
}
});
}
ModelMetricsService {
cost: cost_data,
latency: latency_data,
}
}
/// Select the best model from `models` according to `policy`.
/// Falls back to `models[0]` if metric data is unavailable for all candidates.
pub async fn select_model(&self, models: &[String], policy: &SelectionPolicy) -> String {
match policy.prefer {
SelectionPreference::Cheapest => {
let data = self.cost.read().await;
select_by_ascending_metric(models, &data)
}
SelectionPreference::Fastest => {
let data = self.latency.read().await;
select_by_ascending_metric(models, &data)
}
SelectionPreference::Random => {
let idx = rand_index(models.len());
models[idx].clone()
}
}
}
}
fn select_by_ascending_metric(models: &[String], data: &HashMap<String, f64>) -> String {
models
.iter()
.filter_map(|m| data.get(m.as_str()).map(|v| (m, *v)))
.min_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal))
.map(|(m, _)| m.clone())
.unwrap_or_else(|| models[0].clone())
}
/// Simple non-crypto random index using system time nanoseconds.
fn rand_index(len: usize) -> usize {
use std::time::{SystemTime, UNIX_EPOCH};
let nanos = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.subsec_nanos() as usize)
.unwrap_or(0);
nanos % len
}
async fn fetch_metrics(url: &str, client: &reqwest::Client) -> MetricsResponse {
match client.get(url).send().await {
Ok(resp) => match resp.json::<MetricsResponse>().await {
Ok(data) => data,
Err(err) => {
warn!(error = %err, url = %url, "failed to parse metrics response");
MetricsResponse {
cost: HashMap::new(),
latency: HashMap::new(),
}
}
},
Err(err) => {
warn!(error = %err, url = %url, "failed to fetch metrics");
MetricsResponse {
cost: HashMap::new(),
latency: HashMap::new(),
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use common::configuration::SelectionPreference;
fn make_policy(prefer: SelectionPreference) -> SelectionPolicy {
SelectionPolicy { prefer }
}
#[test]
fn test_select_by_ascending_metric_picks_lowest() {
let models = vec!["a".to_string(), "b".to_string(), "c".to_string()];
let mut data = HashMap::new();
data.insert("a".to_string(), 0.01);
data.insert("b".to_string(), 0.005);
data.insert("c".to_string(), 0.02);
assert_eq!(select_by_ascending_metric(&models, &data), "b");
}
#[test]
fn test_select_by_ascending_metric_fallback_to_first() {
let models = vec!["x".to_string(), "y".to_string()];
let data = HashMap::new();
assert_eq!(select_by_ascending_metric(&models, &data), "x");
}
#[test]
fn test_select_by_ascending_metric_partial_data() {
let models = vec!["a".to_string(), "b".to_string()];
let mut data = HashMap::new();
data.insert("b".to_string(), 100.0);
assert_eq!(select_by_ascending_metric(&models, &data), "b");
}
#[tokio::test]
async fn test_select_model_cheapest() {
let service = ModelMetricsService {
cost: Arc::new(RwLock::new({
let mut m = HashMap::new();
m.insert("gpt-4o".to_string(), 0.005);
m.insert("gpt-4o-mini".to_string(), 0.0001);
m
})),
latency: Arc::new(RwLock::new(HashMap::new())),
};
let models = vec!["gpt-4o".to_string(), "gpt-4o-mini".to_string()];
let result = service
.select_model(&models, &make_policy(SelectionPreference::Cheapest))
.await;
assert_eq!(result, "gpt-4o-mini");
}
#[tokio::test]
async fn test_select_model_fastest() {
let service = ModelMetricsService {
cost: Arc::new(RwLock::new(HashMap::new())),
latency: Arc::new(RwLock::new({
let mut m = HashMap::new();
m.insert("gpt-4o".to_string(), 200.0);
m.insert("claude-sonnet".to_string(), 120.0);
m
})),
};
let models = vec!["gpt-4o".to_string(), "claude-sonnet".to_string()];
let result = service
.select_model(&models, &make_policy(SelectionPreference::Fastest))
.await;
assert_eq!(result, "claude-sonnet");
}
#[tokio::test]
async fn test_select_model_fallback_no_metrics() {
let service = ModelMetricsService {
cost: Arc::new(RwLock::new(HashMap::new())),
latency: Arc::new(RwLock::new(HashMap::new())),
};
let models = vec!["model-a".to_string(), "model-b".to_string()];
let result = service
.select_model(&models, &make_policy(SelectionPreference::Cheapest))
.await;
assert_eq!(result, "model-a");
}
}