AI-admin: multi-provider katalog, prompt label/icon, usage action-kolonne

- Modellkatalog henter fra xAI, Gemini, OpenAI og OpenRouter (ikke bare OR)
- Hver katalogmodell bærer litellm_prefix og api_key_env — forenkler add-flow
- Prompts har nå label, icon og sort_order — vises i chat og admin
- ai_usage_log får action-kolonne for å spore hvilken prompt som ble brukt
- Worker skriver PG-metadata FØR SpacetimeDB slik at frontend finner fersk data
- getUserWorkspaces returnerer rolle

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
vegard 2026-03-16 09:18:24 +01:00
parent 7899b2f224
commit a1e6fa1c6b
9 changed files with 422 additions and 123 deletions

View file

@ -0,0 +1,8 @@
-- 0015_usage_action_column.sql
-- Legg til action-kolonne i ai_usage_log for å spore hvilken prompt som ble brukt.
BEGIN;
ALTER TABLE ai_usage_log ADD COLUMN IF NOT EXISTS action TEXT;
COMMIT;

View file

@ -15,10 +15,10 @@ export interface Workspace {
settings: Record<string, unknown>; settings: Record<string, unknown>;
} }
/** Hent alle workspaces brukeren er medlem av */ /** Hent alle workspaces brukeren er medlem av, med rolle */
export async function getUserWorkspaces(userId: string): Promise<Workspace[]> { export async function getUserWorkspaces(userId: string): Promise<(Workspace & { role: string })[]> {
return sql<Workspace[]>` return sql<(Workspace & { role: string })[]>`
SELECT w.id, w.name, w.slug, w.domain, w.settings SELECT w.id, w.name, w.slug, w.domain, w.settings, wm.role::text AS role
FROM workspaces w FROM workspaces w
JOIN workspace_members wm ON wm.workspace_id = w.id JOIN workspace_members wm ON wm.workspace_id = w.id
WHERE wm.user_id = ${userId} WHERE wm.user_id = ${userId}

View file

@ -1,20 +1,13 @@
import { json, error } from '@sveltejs/kit'; import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types'; import type { RequestHandler } from './$types';
import { env } from '$env/dynamic/private'; import { sql } from '$lib/server/db';
interface OpenRouterModel {
id: string;
name: string;
context_length: number;
pricing: { prompt: string; completion: string };
top_provider?: { max_completion_tokens?: number };
architecture?: { modality?: string };
}
export interface CatalogModel { export interface CatalogModel {
id: string; id: string;
name: string; name: string;
provider: string; provider: string;
litellm_prefix: string;
api_key_env: string;
context_length: number; context_length: number;
prompt_price_per_m: number; prompt_price_per_m: number;
completion_price_per_m: number; completion_price_per_m: number;
@ -22,46 +15,157 @@ export interface CatalogModel {
max_completion: number | null; max_completion: number | null;
} }
interface ProviderFetcher {
keyEnv: string;
label: string;
litellmPrefix: string;
fetch: (apiKey: string) => Promise<CatalogModel[]>;
}
let cache: { models: CatalogModel[]; fetched_at: number } | null = null; let cache: { models: CatalogModel[]; fetched_at: number } | null = null;
const CACHE_TTL = 60 * 60 * 1000; // 1 time const CACHE_TTL = 60 * 60 * 1000; // 1 time
function toPerMillion(pricePerToken: string): number { function toPerMillion(pricePerToken: string | number): number {
const n = parseFloat(pricePerToken); const n = typeof pricePerToken === 'string' ? parseFloat(pricePerToken) : pricePerToken;
if (isNaN(n)) return 0; if (isNaN(n)) return 0;
return Math.round(n * 1_000_000 * 100) / 100; return Math.round(n * 1_000_000 * 100) / 100;
} }
export const GET: RequestHandler = async ({ locals }) => { // --- Provider-spesifikke hentere ---
if (!locals.workspace || !locals.user) error(401);
const apiKey = env.OPENROUTER_API_KEY;
if (!apiKey) {
error(500, 'OPENROUTER_API_KEY er ikke konfigurert');
}
if (cache && Date.now() - cache.fetched_at < CACHE_TTL) {
return json(cache.models);
}
async function fetchOpenRouter(apiKey: string): Promise<CatalogModel[]> {
const res = await fetch('https://openrouter.ai/api/v1/models', { const res = await fetch('https://openrouter.ai/api/v1/models', {
headers: { Authorization: `Bearer ${apiKey}` } headers: { Authorization: `Bearer ${apiKey}` }
}); });
if (!res.ok) return [];
if (!res.ok) {
error(502, `OpenRouter returnerte ${res.status}`);
}
const body = await res.json(); const body = await res.json();
const models: CatalogModel[] = (body.data as OpenRouterModel[]).map((m) => ({ return (body.data ?? []).map((m: any) => ({
id: m.id, id: m.id,
name: m.name, name: m.name ?? m.id,
provider: m.id.split('/')[0], provider: m.id.split('/')[0],
context_length: m.context_length, litellm_prefix: 'openrouter/',
api_key_env: 'OPENROUTER_API_KEY',
context_length: m.context_length ?? 0,
prompt_price_per_m: toPerMillion(m.pricing?.prompt ?? '0'), prompt_price_per_m: toPerMillion(m.pricing?.prompt ?? '0'),
completion_price_per_m: toPerMillion(m.pricing?.completion ?? '0'), completion_price_per_m: toPerMillion(m.pricing?.completion ?? '0'),
modality: m.architecture?.modality ?? 'text', modality: m.architecture?.modality ?? 'text',
max_completion: m.top_provider?.max_completion_tokens ?? null max_completion: m.top_provider?.max_completion_tokens ?? null
})); }));
}
async function fetchXai(_apiKey: string): Promise<CatalogModel[]> {
// xAI /v1/models krever betalt konto — hardkod kjente modeller
// Kilde: https://docs.x.ai/docs/models
const models = [
{ id: 'grok-4.20-multi-agent-beta-0309', name: 'Grok 4.20 Multi-Agent (beta)', ctx: 131072 },
{ id: 'grok-4.20-beta-0309-reasoning', name: 'Grok 4.20 (reasoning, beta)', ctx: 131072 },
{ id: 'grok-4.20-beta-0309-non-reasoning', name: 'Grok 4.20 (beta)', ctx: 131072 },
{ id: 'grok-4-0709', name: 'Grok 4', ctx: 131072 },
{ id: 'grok-4-fast-reasoning', name: 'Grok 4 Fast (reasoning)', ctx: 131072 },
{ id: 'grok-4-fast-non-reasoning', name: 'Grok 4 Fast', ctx: 131072 },
{ id: 'grok-4-1-fast-reasoning', name: 'Grok 4.1 Fast (reasoning)', ctx: 131072 },
{ id: 'grok-4-1-fast-non-reasoning', name: 'Grok 4.1 Fast', ctx: 131072 },
{ id: 'grok-3', name: 'Grok 3', ctx: 131072 },
{ id: 'grok-3-mini', name: 'Grok 3 Mini', ctx: 131072 },
{ id: 'grok-code-fast-1', name: 'Grok Code Fast', ctx: 131072 },
];
return models.map(m => ({
id: m.id,
name: m.name,
provider: 'xai',
litellm_prefix: 'xai/',
api_key_env: 'XAI_API_KEY',
context_length: m.ctx,
prompt_price_per_m: -1,
completion_price_per_m: -1,
modality: 'text',
max_completion: null
}));
}
async function fetchGemini(apiKey: string): Promise<CatalogModel[]> {
const res = await fetch(
`https://generativelanguage.googleapis.com/v1beta/models?key=${apiKey}`
);
if (!res.ok) return [];
const body = await res.json();
return (body.models ?? [])
.filter((m: any) => m.supportedGenerationMethods?.includes('generateContent'))
.map((m: any) => {
// models/gemini-2.5-flash → gemini-2.5-flash
const shortName = (m.name as string).replace('models/', '');
return {
id: shortName,
name: m.displayName ?? shortName,
provider: 'google',
litellm_prefix: 'gemini/',
api_key_env: 'GEMINI_API_KEY',
context_length: m.inputTokenLimit ?? 0,
prompt_price_per_m: -1,
completion_price_per_m: -1,
modality: 'text',
max_completion: m.outputTokenLimit ?? null
};
});
}
async function fetchOpenAI(apiKey: string): Promise<CatalogModel[]> {
const res = await fetch('https://api.openai.com/v1/models', {
headers: { Authorization: `Bearer ${apiKey}` }
});
if (!res.ok) return [];
const body = await res.json();
return (body.data ?? [])
.filter((m: any) => m.id.startsWith('gpt-') || m.id.startsWith('o') || m.id.startsWith('chatgpt-'))
.map((m: any) => ({
id: m.id,
name: m.id,
provider: 'openai',
litellm_prefix: 'openai/',
api_key_env: 'OPENAI_API_KEY',
context_length: 128000,
prompt_price_per_m: 0,
completion_price_per_m: 0,
modality: 'text',
max_completion: null
}));
}
const PROVIDERS: ProviderFetcher[] = [
{ keyEnv: 'XAI_API_KEY', label: 'xAI', litellmPrefix: 'xai/', fetch: fetchXai },
{ keyEnv: 'GEMINI_API_KEY', label: 'Google', litellmPrefix: 'gemini/', fetch: fetchGemini },
{ keyEnv: 'OPENAI_API_KEY', label: 'OpenAI', litellmPrefix: 'openai/', fetch: fetchOpenAI },
{ keyEnv: 'OPENROUTER_API_KEY', label: 'OpenRouter', litellmPrefix: 'openrouter/', fetch: fetchOpenRouter }
];
export const GET: RequestHandler = async ({ locals, url }) => {
if (!locals.workspace || !locals.user) error(401);
const forceRefresh = url.searchParams.get('refresh') === '1';
if (!forceRefresh && cache && Date.now() - cache.fetched_at < CACHE_TTL) {
return json(cache.models);
}
// Hent aktive nøkler med verdier fra DB
const keys = await sql`
SELECT env_name, key_value FROM ai_api_keys WHERE is_enabled = true AND key_value IS NOT NULL
`;
const keyMap = new Map(keys.map((k: any) => [k.env_name, k.key_value as string]));
// Hent fra alle aktive leverandører parallelt
const promises = PROVIDERS
.filter(p => keyMap.has(p.keyEnv))
.map(async (p) => {
try {
return await p.fetch(keyMap.get(p.keyEnv)!);
} catch {
return [];
}
});
const results = await Promise.all(promises);
const models = results.flat();
cache = { models, fetched_at: Date.now() }; cache = { models, fetched_at: Date.now() };
return json(models); return json(models);

View file

@ -0,0 +1,16 @@
import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { sql } from '$lib/server/db';
/** GET — list alle AI-prompts */
export const GET: RequestHandler = async ({ locals }) => {
if (!locals.workspace || !locals.user) error(401);
const rows = await sql`
SELECT action, system_prompt, description, updated_at
FROM ai_prompts
ORDER BY action
`;
return json(rows);
};

View file

@ -0,0 +1,44 @@
import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { sql } from '$lib/server/db';
/** PATCH — oppdater system_prompt, description, label og/eller icon */
export const PATCH: RequestHandler = async ({ params, request, locals }) => {
if (!locals.workspace || !locals.user) error(401);
const body = await request.json();
const [row] = await sql`
UPDATE ai_prompts SET
system_prompt = COALESCE(${body.system_prompt ?? null}, system_prompt),
description = COALESCE(${body.description ?? null}, description),
label = COALESCE(${body.label ?? null}, label),
icon = COALESCE(${body.icon ?? null}, icon),
updated_at = now()
WHERE action = ${params.action}
RETURNING action, system_prompt, description, label, icon, sort_order, updated_at
`;
if (!row) error(404, 'Prompt ikke funnet');
return json(row);
};
/** PUT — opprett eller erstatt prompt for en action */
export const PUT: RequestHandler = async ({ params, request, locals }) => {
if (!locals.workspace || !locals.user) error(401);
const body = await request.json();
if (!body.system_prompt) error(400, 'system_prompt er påkrevd');
const [row] = await sql`
INSERT INTO ai_prompts (action, system_prompt, description)
VALUES (${params.action}, ${body.system_prompt}, ${body.description ?? null})
ON CONFLICT (action) DO UPDATE SET
system_prompt = EXCLUDED.system_prompt,
description = COALESCE(EXCLUDED.description, ai_prompts.description),
updated_at = now()
RETURNING action, system_prompt, description, updated_at
`;
return json(row);
};

View file

@ -0,0 +1,19 @@
import { json, error } from '@sveltejs/kit';
import type { RequestHandler } from './$types';
import { sql } from '$lib/server/db';
/** POST — renummerer prioriteter for en liste providers */
export const POST: RequestHandler = async ({ request, locals }) => {
if (!locals.workspace || !locals.user) error(401);
const items: { id: string; priority: number }[] = await request.json();
if (!Array.isArray(items) || items.length === 0) error(400);
for (const item of items) {
await sql`
UPDATE ai_model_providers SET priority = ${item.priority} WHERE id = ${item.id}::uuid
`;
}
return json({ ok: true });
};

View file

@ -22,22 +22,23 @@ export const load: PageServerLoad = async () => {
`; `;
const prompts = await sql` const prompts = await sql`
SELECT action, system_prompt, description, updated_at SELECT action, system_prompt, description, label, icon, sort_order, updated_at
FROM ai_prompts FROM ai_prompts
ORDER BY action ORDER BY sort_order, action
`; `;
const usage = await sql` const usage = await sql`
SELECT SELECT
model_alias, model_alias,
model_actual, model_actual,
action,
count(*)::int AS call_count, count(*)::int AS call_count,
sum(prompt_tokens)::int AS prompt_tokens, sum(prompt_tokens)::int AS prompt_tokens,
sum(completion_tokens)::int AS completion_tokens, sum(completion_tokens)::int AS completion_tokens,
sum(total_tokens)::int AS total_tokens sum(total_tokens)::int AS total_tokens
FROM ai_usage_log FROM ai_usage_log
WHERE created_at > now() - interval '30 days' WHERE created_at > now() - interval '30 days'
GROUP BY model_alias, model_actual GROUP BY model_alias, model_actual, action
ORDER BY total_tokens DESC ORDER BY total_tokens DESC
`; `;

View file

@ -31,12 +31,16 @@
action: string; action: string;
system_prompt: string; system_prompt: string;
description: string | null; description: string | null;
label: string | null;
icon: string | null;
sort_order: number;
updated_at: string; updated_at: string;
} }
interface UsageRow { interface UsageRow {
model_alias: string; model_alias: string;
model_actual: string | null; model_actual: string | null;
action: string | null;
call_count: number; call_count: number;
prompt_tokens: number; prompt_tokens: number;
completion_tokens: number; completion_tokens: number;
@ -47,6 +51,8 @@
id: string; id: string;
name: string; name: string;
provider: string; provider: string;
litellm_prefix: string;
api_key_env: string;
context_length: number; context_length: number;
prompt_price_per_m: number; prompt_price_per_m: number;
completion_price_per_m: number; completion_price_per_m: number;
@ -74,6 +80,8 @@
let configMsg = $state(''); let configMsg = $state('');
let editingPrompt = $state<string | null>(null); let editingPrompt = $state<string | null>(null);
let editPromptText = $state(''); let editPromptText = $state('');
let editPromptLabel = $state('');
let editPromptIcon = $state('');
let expandedAlias = $state<string | null>(null); let expandedAlias = $state<string | null>(null);
// Alias-redigering // Alias-redigering
@ -135,8 +143,9 @@
return String(n); return String(n);
} }
function formatPrice(n: number): string { function formatPrice(n: number | null | undefined): string {
if (n === 0) return 'Gratis'; if (n == null || n < 0) return '\u2014';
if (n === 0) return '\u2014';
return `$${n.toFixed(2)}`; return `$${n.toFixed(2)}`;
} }
@ -263,6 +272,7 @@
} }
} }
// Grupper etter api_key_env + provider (f.eks. "google via GEMINI_API_KEY" vs "google via OPENROUTER_API_KEY")
let groupedByProvider = $derived.by(() => { let groupedByProvider = $derived.by(() => {
const search = catalogSearch.toLowerCase(); const search = catalogSearch.toLowerCase();
const filtered = search const filtered = search
@ -270,23 +280,42 @@
(m) => (m) =>
m.name.toLowerCase().includes(search) || m.name.toLowerCase().includes(search) ||
m.id.toLowerCase().includes(search) || m.id.toLowerCase().includes(search) ||
m.provider.toLowerCase().includes(search) m.provider.toLowerCase().includes(search) ||
m.api_key_env.toLowerCase().includes(search)
) )
: catalogModels; : catalogModels;
const map = new Map<string, CatalogModel[]>(); const map = new Map<string, CatalogModel[]>();
for (const m of filtered) { for (const m of filtered) {
const list = map.get(m.provider) ?? []; // Grupper per API-nøkkel, med provider som undergruppe
const groupKey = m.api_key_env === 'OPENROUTER_API_KEY'
? `${m.provider} (OpenRouter)`
: m.provider;
const list = map.get(groupKey) ?? [];
list.push(m); list.push(m);
map.set(m.provider, list); map.set(groupKey, list);
} }
// Sorter modeller synkende etter pris innen hver provider // Sorter: pris synkende først (ukjent/-1 sist), deretter navn synkende
for (const [, models] of map) { for (const [, models] of map) {
models.sort((a, b) => b.completion_price_per_m - a.completion_price_per_m); models.sort((a, b) => {
const aPrice = a.completion_price_per_m;
const bPrice = b.completion_price_per_m;
const aHasPrice = aPrice > 0;
const bHasPrice = bPrice > 0;
if (aHasPrice !== bHasPrice) return aHasPrice ? -1 : 1;
if (aPrice !== bPrice) return bPrice - aPrice;
return b.name.localeCompare(a.name);
});
} }
return [...map.entries()].sort(([a], [b]) => a.localeCompare(b)); // Direkte API-nøkler først, deretter OpenRouter-grupper
return [...map.entries()].sort(([a], [b]) => {
const aOr = a.includes('OpenRouter');
const bOr = b.includes('OpenRouter');
if (aOr !== bOr) return aOr ? 1 : -1;
return a.localeCompare(b);
});
}); });
let catalogPickerFiltered = $derived.by(() => { let catalogPickerFiltered = $derived.by(() => {
@ -310,43 +339,11 @@
expandedProviders = new Set(expandedProviders); expandedProviders = new Set(expandedProviders);
} }
// Mapping fra OpenRouter-provider til LiteLLM direkte-prefiks + nøkkel function litellmModelId(model: CatalogModel): string {
const directKeyMap: Record<string, { prefix: string; key: string }> = { return `${model.litellm_prefix}${model.id}`;
google: { prefix: 'gemini/', key: 'GEMINI_API_KEY' },
anthropic: { prefix: 'anthropic/', key: 'ANTHROPIC_API_KEY' },
openai: { prefix: 'openai/', key: 'OPENAI_API_KEY' },
'x-ai': { prefix: 'xai/', key: 'XAI_API_KEY' },
};
function modelForKey(model: CatalogModel, keyEnv: string): string {
if (keyEnv === 'OPENROUTER_API_KEY') return `openrouter/${model.id}`;
// Direkte: strip provider-prefix fra model.id, legg til LiteLLM-prefiks
const mapping = directKeyMap[model.provider];
if (mapping) {
const modelName = model.id.replace(`${model.provider}/`, '');
return `${mapping.prefix}${modelName}`;
}
return `openrouter/${model.id}`;
} }
function availableKeysForModel(model: CatalogModel): ApiKey[] { async function addFromCatalog(model: CatalogModel, aliasId: string) {
const keys: ApiKey[] = [];
// Direkte nøkkel for denne leverandøren
const mapping = directKeyMap[model.provider];
if (mapping) {
const directKey = apiKeys.find(k => k.name === mapping.key);
if (directKey) keys.push(directKey);
}
// OpenRouter alltid tilgjengelig
const orKey = apiKeys.find(k => k.name === 'OPENROUTER_API_KEY');
if (orKey) keys.push(orKey);
return keys;
}
// Catalog add — steg 1: velg alias, steg 2: velg nøkkel
let catalogAddKey = $state('');
async function addFromCatalog(model: CatalogModel, aliasId: string, keyEnv: string) {
errorMsg = ''; errorMsg = '';
const maxPri = Math.max(0, ...providersForAlias(aliasId).map((p) => p.priority)); const maxPri = Math.max(0, ...providersForAlias(aliasId).map((p) => p.priority));
try { try {
@ -356,8 +353,8 @@
body: JSON.stringify({ body: JSON.stringify({
alias_id: aliasId, alias_id: aliasId,
priority: maxPri + 1, priority: maxPri + 1,
litellm_model: modelForKey(model, keyEnv), litellm_model: litellmModelId(model),
api_key_env: keyEnv api_key_env: model.api_key_env
}) })
}); });
if (!res.ok) throw new Error('Feil ved opprettelse'); if (!res.ok) throw new Error('Feil ved opprettelse');
@ -365,14 +362,14 @@
providers = [...providers, row]; providers = [...providers, row];
addingFromCatalog = null; addingFromCatalog = null;
catalogAddAlias = ''; catalogAddAlias = '';
catalogAddKey = '';
} catch { } catch {
errorMsg = 'Kunne ikke legge til provider fra katalog'; errorMsg = 'Kunne ikke legge til provider fra katalog';
} }
} }
function selectFromPicker(model: CatalogModel) { function selectFromPicker(model: CatalogModel) {
newProvider.litellm_model = modelForKey(model, newProvider.api_key_env); newProvider.litellm_model = litellmModelId(model);
newProvider.api_key_env = model.api_key_env;
showCatalogPicker = false; showCatalogPicker = false;
catalogPickerSearch = ''; catalogPickerSearch = '';
} }
@ -433,8 +430,12 @@
function estimateCost(row: UsageRow): number | null { function estimateCost(row: UsageRow): number | null {
if (!catalogLoaded || !row.model_actual) return null; if (!catalogLoaded || !row.model_actual) return null;
const model = catalogModels.find((m) => m.id === row.model_actual); // model_actual kan være "xai/grok-..." eller "google/gemma-..." — match mot id eller litellm_prefix+id
if (!model) return null; const actual = row.model_actual;
const model = catalogModels.find((m) =>
m.id === actual || `${m.litellm_prefix}${m.id}` === actual || `${m.provider}/${m.id}` === actual
);
if (!model || model.prompt_price_per_m < 0 || model.completion_price_per_m < 0) return null;
return ( return (
(row.prompt_tokens / 1_000_000) * model.prompt_price_per_m + (row.prompt_tokens / 1_000_000) * model.prompt_price_per_m +
(row.completion_tokens / 1_000_000) * model.completion_price_per_m (row.completion_tokens / 1_000_000) * model.completion_price_per_m
@ -589,11 +590,33 @@
const res = await fetch(`/api/admin/ai/providers/${provider.id}`, { method: 'DELETE' }); const res = await fetch(`/api/admin/ai/providers/${provider.id}`, { method: 'DELETE' });
if (!res.ok) throw new Error('Feil'); if (!res.ok) throw new Error('Feil');
providers = providers.filter((p) => p.id !== provider.id); providers = providers.filter((p) => p.id !== provider.id);
// Renummerer prioriteter for gjenværende providers under samme alias
await renumberPriorities(provider.alias_id);
} catch { } catch {
errorMsg = 'Kunne ikke slette provider'; errorMsg = 'Kunne ikke slette provider';
} }
} }
async function renumberPriorities(aliasId: string) {
const ap = providersForAlias(aliasId);
let changed = false;
for (let i = 0; i < ap.length; i++) {
if (ap[i].priority !== i + 1) {
ap[i].priority = i + 1;
changed = true;
}
}
if (!changed) return;
try {
await fetch('/api/admin/ai/providers/renumber', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(ap.map(p => ({ id: p.id, priority: p.priority })))
});
providers = [...providers];
} catch { /* stille */ }
}
async function addAlias() { async function addAlias() {
errorMsg = ''; errorMsg = '';
if (!newAlias.alias) return; if (!newAlias.alias) return;
@ -655,11 +678,15 @@
function startEditPrompt(prompt: Prompt) { function startEditPrompt(prompt: Prompt) {
editingPrompt = prompt.action; editingPrompt = prompt.action;
editPromptText = prompt.system_prompt; editPromptText = prompt.system_prompt;
editPromptLabel = prompt.label ?? '';
editPromptIcon = prompt.icon ?? '';
} }
function cancelEditPrompt() { function cancelEditPrompt() {
editingPrompt = null; editingPrompt = null;
editPromptText = ''; editPromptText = '';
editPromptLabel = '';
editPromptIcon = '';
} }
async function savePrompt(prompt: Prompt) { async function savePrompt(prompt: Prompt) {
@ -669,14 +696,22 @@
const res = await fetch(`/api/admin/ai/prompts/${prompt.action}`, { const res = await fetch(`/api/admin/ai/prompts/${prompt.action}`, {
method: 'PATCH', method: 'PATCH',
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ system_prompt: editPromptText }) body: JSON.stringify({
system_prompt: editPromptText,
label: editPromptLabel || null,
icon: editPromptIcon || null
})
}); });
if (!res.ok) throw new Error('Feil ved lagring'); if (!res.ok) throw new Error('Feil ved lagring');
const updated = await res.json(); const updated = await res.json();
prompt.system_prompt = updated.system_prompt; prompt.system_prompt = updated.system_prompt;
prompt.label = updated.label;
prompt.icon = updated.icon;
prompt.updated_at = updated.updated_at; prompt.updated_at = updated.updated_at;
editingPrompt = null; editingPrompt = null;
editPromptText = ''; editPromptText = '';
editPromptLabel = '';
editPromptIcon = '';
markSaved(prompt.action); markSaved(prompt.action);
} catch { } catch {
errorMsg = 'Kunne ikke lagre prompt'; errorMsg = 'Kunne ikke lagre prompt';
@ -816,7 +851,7 @@
<!-- Seksjon 1: Modellkatalog --> <!-- Seksjon 1: Modellkatalog -->
<section> <section>
<div class="catalog-header"> <div class="catalog-header">
<h3>Modellkatalog (OpenRouter)</h3> <h3>Modellkatalog</h3>
<div class="catalog-actions"> <div class="catalog-actions">
{#if catalogLoaded} {#if catalogLoaded}
<input <input
@ -825,6 +860,7 @@
placeholder="Søk modeller..." placeholder="Søk modeller..."
bind:value={catalogSearch} bind:value={catalogSearch}
/> />
<button class="toggle-btn" onclick={() => { catalogLoaded = false; catalogModels = []; }}>Skjul</button>
{/if} {/if}
<button <button
class="toggle-btn" class="toggle-btn"
@ -867,30 +903,23 @@
<span class="cat-col-add"> <span class="cat-col-add">
<button <button
class="toggle-btn" class="toggle-btn"
onclick={() => { addingFromCatalog = addingFromCatalog === model.id ? null : model.id; catalogAddAlias = ''; catalogAddKey = ''; }} onclick={() => { addingFromCatalog = addingFromCatalog === model.id ? null : model.id; catalogAddAlias = ''; }}
>{addingFromCatalog === model.id ? '&#x2717;' : 'Legg til &rarr;'}</button> >{addingFromCatalog === model.id ? '&#x2717;' : 'Legg til &rarr;'}</button>
</span> </span>
</div> </div>
{#if addingFromCatalog === model.id} {#if addingFromCatalog === model.id}
{@const modelKeys = availableKeysForModel(model)}
<div class="catalog-add-row"> <div class="catalog-add-row">
<span class="catalog-add-label">Legg til <strong>{model.name}</strong>:</span> <span class="catalog-add-label">Legg til <strong>{model.name}</strong> via {model.api_key_env}:</span>
<select bind:value={catalogAddAlias}> <select bind:value={catalogAddAlias}>
<option value="">Velg alias...</option> <option value="">Velg alias...</option>
{#each aliases as a} {#each aliases as a}
<option value={a.id}>{a.alias}</option> <option value={a.id}>{a.alias}</option>
{/each} {/each}
</select> </select>
<select bind:value={catalogAddKey}>
<option value="">Velg nøkkel...</option>
{#each modelKeys as k}
<option value={k.name}>{k.label}{k.is_enabled ? '' : ' (av)'}</option>
{/each}
</select>
<button <button
class="add-btn" class="add-btn"
disabled={!catalogAddAlias || !catalogAddKey} disabled={!catalogAddAlias}
onclick={() => addFromCatalog(model, catalogAddAlias, catalogAddKey)} onclick={() => addFromCatalog(model, catalogAddAlias)}
>Legg til</button> >Legg til</button>
</div> </div>
{/if} {/if}
@ -1139,6 +1168,7 @@
<div class="table-list"> <div class="table-list">
<div class="table-row table-row--header prompt-row"> <div class="table-row table-row--header prompt-row">
<span class="col-action">Action</span> <span class="col-action">Action</span>
<span class="col-label">Visningsnavn</span>
<span class="col-desc">Beskrivelse</span> <span class="col-desc">Beskrivelse</span>
<span class="col-chars">Tegn</span> <span class="col-chars">Tegn</span>
<span class="col-updated">Oppdatert</span> <span class="col-updated">Oppdatert</span>
@ -1148,6 +1178,7 @@
{#each prompts as prompt (prompt.action)} {#each prompts as prompt (prompt.action)}
<div class="table-row prompt-row"> <div class="table-row prompt-row">
<span class="col-action">{prompt.action}</span> <span class="col-action">{prompt.action}</span>
<span class="col-label">{prompt.icon ?? ''} {prompt.label ?? '\u2014'}</span>
<span class="col-desc">{prompt.description ?? '\u2014'}</span> <span class="col-desc">{prompt.description ?? '\u2014'}</span>
<span class="col-chars">{prompt.system_prompt.length}</span> <span class="col-chars">{prompt.system_prompt.length}</span>
<span class="col-updated">{new Date(prompt.updated_at).toLocaleDateString('nb-NO')}</span> <span class="col-updated">{new Date(prompt.updated_at).toLocaleDateString('nb-NO')}</span>
@ -1164,6 +1195,16 @@
{#if editingPrompt === prompt.action} {#if editingPrompt === prompt.action}
<div class="prompt-editor"> <div class="prompt-editor">
<div class="prompt-editor-meta">
<label>
<span class="prompt-meta-label">Ikon</span>
<input type="text" bind:value={editPromptIcon} placeholder="🧹" class="prompt-meta-input prompt-meta-input--icon" />
</label>
<label>
<span class="prompt-meta-label">Visningsnavn</span>
<input type="text" bind:value={editPromptLabel} placeholder="Vask tekst" class="prompt-meta-input" />
</label>
</div>
<textarea <textarea
bind:value={editPromptText} bind:value={editPromptText}
rows="12" rows="12"
@ -1196,8 +1237,9 @@
<div class="table-row table-row--header usage-row"> <div class="table-row table-row--header usage-row">
<span>Alias</span> <span>Alias</span>
<span>Modell</span> <span>Modell</span>
<span>Prompt</span>
<span class="col-num">Kall</span> <span class="col-num">Kall</span>
<span class="col-num">Prompt</span> <span class="col-num">Prompt-tok.</span>
<span class="col-num">Kompl.</span> <span class="col-num">Kompl.</span>
<span class="col-num">Totalt</span> <span class="col-num">Totalt</span>
<span class="col-num">Est. $</span> <span class="col-num">Est. $</span>
@ -1208,6 +1250,7 @@
<div class="table-row usage-row"> <div class="table-row usage-row">
<span class="col-alias">{row.model_alias}</span> <span class="col-alias">{row.model_alias}</span>
<span class="col-model-actual">{row.model_actual ?? '\u2014'}</span> <span class="col-model-actual">{row.model_actual ?? '\u2014'}</span>
<span class="col-action-usage">{row.action ?? '\u2014'}</span>
<span class="col-num">{row.call_count}</span> <span class="col-num">{row.call_count}</span>
<span class="col-num">{row.prompt_tokens.toLocaleString('nb-NO')}</span> <span class="col-num">{row.prompt_tokens.toLocaleString('nb-NO')}</span>
<span class="col-num">{row.completion_tokens.toLocaleString('nb-NO')}</span> <span class="col-num">{row.completion_tokens.toLocaleString('nb-NO')}</span>
@ -2040,7 +2083,15 @@
/* Usage/tokenforbruk */ /* Usage/tokenforbruk */
.usage-row { .usage-row {
grid-template-columns: 1fr 2fr 70px 80px 80px 80px 70px; grid-template-columns: 1fr 2fr 1fr 60px 80px 80px 80px 60px;
}
.col-action-usage {
font-size: 0.75rem;
color: #8b92a5;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
} }
.col-model-actual { .col-model-actual {
@ -2060,7 +2111,11 @@
/* Prompt-seksjon */ /* Prompt-seksjon */
.prompt-row { .prompt-row {
grid-template-columns: 1.5fr 2.5fr 60px 90px 70px; grid-template-columns: 1.2fr 1.2fr 2fr 60px 80px 70px;
}
.col-label {
font-size: 0.85rem;
} }
.col-action { .col-action {
@ -2089,6 +2144,43 @@
border-bottom: 1px solid #2d3148; border-bottom: 1px solid #2d3148;
} }
.prompt-editor-meta {
display: flex;
gap: 0.75rem;
margin-bottom: 0.5rem;
}
.prompt-editor-meta label {
display: flex;
align-items: center;
gap: 0.35rem;
}
.prompt-meta-label {
font-size: 0.75rem;
color: #8b92a5;
}
.prompt-meta-input {
background: #161822;
border: 1px solid #2d3148;
border-radius: 4px;
color: #e1e4e8;
padding: 0.25rem 0.4rem;
font-size: 0.8rem;
width: 160px;
}
.prompt-meta-input--icon {
width: 50px;
text-align: center;
}
.prompt-meta-input:focus {
outline: none;
border-color: #3b82f6;
}
.prompt-editor textarea { .prompt-editor textarea {
width: 100%; width: 100%;
background: #161822; background: #161822;

View file

@ -163,28 +163,44 @@ impl JobHandler for AiTextProcessHandler {
.await .await
.context("Feil ved lagring av revisjon")?; .context("Feil ved lagring av revisjon")?;
// 4. Bygg system-prompt basert på action // 4. Hent prompt-label fra DB (for metadata-stempel i chat)
let prompt_label: Option<String> = sqlx::query_scalar(
"SELECT label FROM ai_prompts WHERE action = $1"
)
.bind(action)
.fetch_optional(pool)
.await
.ok()
.flatten();
// 5. Bygg system-prompt basert på action
let system_prompt = match prompt_override { let system_prompt = match prompt_override {
Some(custom) => custom.to_string(), Some(custom) => custom.to_string(),
None => get_system_prompt_from_db(pool, action).await, None => get_system_prompt_from_db(pool, action).await,
}; };
// 5. Send til AI Gateway // 6. Send til AI Gateway
let ai_resp = self let ai_resp = self
.call_ai_gateway(&system_prompt, &plain_text, &model) .call_ai_gateway(&system_prompt, &plain_text, &model)
.await .await
.context("AI Gateway-kall feilet")?; .context("AI Gateway-kall feilet")?;
// 6. Oppdater SpacetimeDB — dette er primær-kanalen til frontend. // 7. Beregn faktisk modellnavn
// sync.rs synker body-endringen til PG automatisk. // LiteLLM returnerer alias-navnet i model-feltet — bruk expected_model fra DB
self.update_spacetimedb(&message_id, workspace_id, &ai_resp.content) let actual_model = match &ai_resp.model_actual {
.await Some(m) if m != &model => Some(m.clone()), // Gateway returnerte faktisk modellnavn
.context("Kunne ikke oppdatere SpacetimeDB med AI-resultat")?; _ => expected_model, // Bruk oppslaget fra providers-tabellen
};
// Strip openrouter/-prefiks for lesbarhet
let actual_model_clean = actual_model.map(|m| m.replace("openrouter/", "").replace("gemini/", "google/"));
// 7. Skriv PG-only data (metadata, revisjon er allerede lagret, tokenforbruk) // 8. Skriv PG metadata FØR SpacetimeDB-oppdatering
// (frontend henter metadata fra PG når SpacetimeDB-update trigger onUpdate)
let metadata = json!({ let metadata = json!({
"ai_processed": true, "ai_processed": true,
"ai_action": action "ai_action": action,
"ai_label": prompt_label.as_deref().unwrap_or(action),
"ai_model": actual_model_clean.as_deref().unwrap_or(&model)
}); });
sqlx::query( sqlx::query(
@ -194,31 +210,30 @@ impl JobHandler for AiTextProcessHandler {
WHERE id = $2 WHERE id = $2
"#, "#,
) )
.bind(metadata) .bind(&metadata)
.bind(message_id) .bind(message_id)
.execute(pool) .execute(pool)
.await .await
.context("Feil ved oppdatering av metadata")?; .context("Feil ved oppdatering av metadata")?;
// 8. Logg tokenforbruk til ai_usage_log // 9. Oppdater SpacetimeDB — dette er primær-kanalen til frontend.
// LiteLLM returnerer alias-navnet i model-feltet — bruk expected_model fra DB // Gjøres ETTER PG-metadata slik at enrichMessageFromPg finner fersk data.
let actual_model = match &ai_resp.model_actual { self.update_spacetimedb(&message_id, workspace_id, &ai_resp.content)
Some(m) if m != &model => Some(m.clone()), // Gateway returnerte faktisk modellnavn .await
_ => expected_model, // Bruk oppslaget fra providers-tabellen .context("Kunne ikke oppdatere SpacetimeDB med AI-resultat")?;
};
// Strip openrouter/-prefiks for lesbarhet
let actual_model_clean = actual_model.map(|m| m.replace("openrouter/", "").replace("gemini/", "google/"));
// 10. Logg tokenforbruk til ai_usage_log
sqlx::query( sqlx::query(
r#" r#"
INSERT INTO ai_usage_log (workspace_id, job_id, job_type, model_alias, model_actual, prompt_tokens, completion_tokens, total_tokens) INSERT INTO ai_usage_log (workspace_id, job_id, job_type, model_alias, model_actual, action, prompt_tokens, completion_tokens, total_tokens)
VALUES ($1, $2, 'ai_text_process', $3, $4, $5, $6, $7) VALUES ($1, $2, 'ai_text_process', $3, $4, $5, $6, $7, $8)
"#, "#,
) )
.bind(workspace_id) .bind(workspace_id)
.bind(job_id) .bind(job_id)
.bind(&model) .bind(&model)
.bind(&actual_model_clean) .bind(&actual_model_clean)
.bind(action)
.bind(ai_resp.prompt_tokens) .bind(ai_resp.prompt_tokens)
.bind(ai_resp.completion_tokens) .bind(ai_resp.completion_tokens)
.bind(ai_resp.total_tokens) .bind(ai_resp.total_tokens)