diff --git a/web/src/routes/server-admin/ai/+page.svelte b/web/src/routes/server-admin/ai/+page.svelte index fcab35b..bcd0153 100644 --- a/web/src/routes/server-admin/ai/+page.svelte +++ b/web/src/routes/server-admin/ai/+page.svelte @@ -2040,7 +2040,7 @@ /* Usage/tokenforbruk */ .usage-row { - grid-template-columns: auto auto auto auto auto auto auto; + grid-template-columns: 1fr 2fr 70px 80px 80px 80px 70px; } .col-model-actual { diff --git a/worker/src/handlers/ai_text_process.rs b/worker/src/handlers/ai_text_process.rs index e1e0090..36a18f2 100644 --- a/worker/src/handlers/ai_text_process.rs +++ b/worker/src/handlers/ai_text_process.rs @@ -80,9 +80,16 @@ impl JobHandler for AiTextProcessHandler { let prompt_override = payload.get("prompt_override").and_then(|v| v.as_str()); - // Slå opp modellalias fra jobbrutingen, fall tilbake til payload eller default - let model_from_routing: Option = sqlx::query_scalar( - "SELECT a.alias FROM ai_job_routing r JOIN ai_model_aliases a ON a.id = r.alias_id WHERE r.job_type = 'ai_text_process'" + // Slå opp modellalias og faktisk modellnavn fra jobbrutingen + let routing_info: Option<(String, String)> = sqlx::query_as( + r#"SELECT a.alias, p.litellm_model + FROM ai_job_routing r + JOIN ai_model_aliases a ON a.id = r.alias_id + JOIN ai_model_providers p ON p.alias_id = a.id + JOIN ai_api_keys k ON k.env_name = p.api_key_env + WHERE r.job_type = 'ai_text_process' + AND p.is_active = true AND k.is_enabled = true + ORDER BY p.priority ASC LIMIT 1"# ) .fetch_optional(pool) .await @@ -93,9 +100,11 @@ impl JobHandler for AiTextProcessHandler { .get("model") .and_then(|v| v.as_str()) .map(|s| s.to_string()) - .or(model_from_routing) + .or_else(|| routing_info.as_ref().map(|(alias, _)| alias.clone())) .unwrap_or_else(|| "sidelinja/rutine".to_string()); + let expected_model = routing_info.map(|(_, m)| m); + info!( message_id = %message_id, action = action, @@ -192,6 +201,14 @@ impl JobHandler for AiTextProcessHandler { .context("Feil ved oppdatering av metadata")?; // 8. Logg tokenforbruk til ai_usage_log + // LiteLLM returnerer alias-navnet i model-feltet — bruk expected_model fra DB + let actual_model = match &ai_resp.model_actual { + Some(m) if m != &model => Some(m.clone()), // Gateway returnerte faktisk modellnavn + _ => expected_model, // Bruk oppslaget fra providers-tabellen + }; + // Strip openrouter/-prefiks for lesbarhet + let actual_model_clean = actual_model.map(|m| m.replace("openrouter/", "").replace("gemini/", "google/")); + sqlx::query( r#" INSERT INTO ai_usage_log (workspace_id, job_id, job_type, model_alias, model_actual, prompt_tokens, completion_tokens, total_tokens) @@ -200,8 +217,8 @@ impl JobHandler for AiTextProcessHandler { ) .bind(workspace_id) .bind(job_id) - .bind(model) - .bind(&ai_resp.model_actual) + .bind(&model) + .bind(&actual_model_clean) .bind(ai_resp.prompt_tokens) .bind(ai_resp.completion_tokens) .bind(ai_resp.total_tokens)