AI-admin: multi-provider katalog, prompt label/icon, usage action-kolonne
- Modellkatalog henter fra xAI, Gemini, OpenAI og OpenRouter (ikke bare OR) - Hver katalogmodell bærer litellm_prefix og api_key_env — forenkler add-flow - Prompts har nå label, icon og sort_order — vises i chat og admin - ai_usage_log får action-kolonne for å spore hvilken prompt som ble brukt - Worker skriver PG-metadata FØR SpacetimeDB slik at frontend finner fersk data - getUserWorkspaces returnerer rolle Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
7899b2f224
commit
a1e6fa1c6b
9 changed files with 422 additions and 123 deletions
8
migrations/0015_usage_action_column.sql
Normal file
8
migrations/0015_usage_action_column.sql
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
-- 0015_usage_action_column.sql
|
||||
-- Legg til action-kolonne i ai_usage_log for å spore hvilken prompt som ble brukt.
|
||||
|
||||
BEGIN;
|
||||
|
||||
ALTER TABLE ai_usage_log ADD COLUMN IF NOT EXISTS action TEXT;
|
||||
|
||||
COMMIT;
|
||||
|
|
@ -15,10 +15,10 @@ export interface Workspace {
|
|||
settings: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/** Hent alle workspaces brukeren er medlem av */
|
||||
export async function getUserWorkspaces(userId: string): Promise<Workspace[]> {
|
||||
return sql<Workspace[]>`
|
||||
SELECT w.id, w.name, w.slug, w.domain, w.settings
|
||||
/** Hent alle workspaces brukeren er medlem av, med rolle */
|
||||
export async function getUserWorkspaces(userId: string): Promise<(Workspace & { role: string })[]> {
|
||||
return sql<(Workspace & { role: string })[]>`
|
||||
SELECT w.id, w.name, w.slug, w.domain, w.settings, wm.role::text AS role
|
||||
FROM workspaces w
|
||||
JOIN workspace_members wm ON wm.workspace_id = w.id
|
||||
WHERE wm.user_id = ${userId}
|
||||
|
|
|
|||
|
|
@ -1,20 +1,13 @@
|
|||
import { json, error } from '@sveltejs/kit';
|
||||
import type { RequestHandler } from './$types';
|
||||
import { env } from '$env/dynamic/private';
|
||||
|
||||
interface OpenRouterModel {
|
||||
id: string;
|
||||
name: string;
|
||||
context_length: number;
|
||||
pricing: { prompt: string; completion: string };
|
||||
top_provider?: { max_completion_tokens?: number };
|
||||
architecture?: { modality?: string };
|
||||
}
|
||||
import { sql } from '$lib/server/db';
|
||||
|
||||
export interface CatalogModel {
|
||||
id: string;
|
||||
name: string;
|
||||
provider: string;
|
||||
litellm_prefix: string;
|
||||
api_key_env: string;
|
||||
context_length: number;
|
||||
prompt_price_per_m: number;
|
||||
completion_price_per_m: number;
|
||||
|
|
@ -22,46 +15,157 @@ export interface CatalogModel {
|
|||
max_completion: number | null;
|
||||
}
|
||||
|
||||
interface ProviderFetcher {
|
||||
keyEnv: string;
|
||||
label: string;
|
||||
litellmPrefix: string;
|
||||
fetch: (apiKey: string) => Promise<CatalogModel[]>;
|
||||
}
|
||||
|
||||
let cache: { models: CatalogModel[]; fetched_at: number } | null = null;
|
||||
const CACHE_TTL = 60 * 60 * 1000; // 1 time
|
||||
|
||||
function toPerMillion(pricePerToken: string): number {
|
||||
const n = parseFloat(pricePerToken);
|
||||
function toPerMillion(pricePerToken: string | number): number {
|
||||
const n = typeof pricePerToken === 'string' ? parseFloat(pricePerToken) : pricePerToken;
|
||||
if (isNaN(n)) return 0;
|
||||
return Math.round(n * 1_000_000 * 100) / 100;
|
||||
}
|
||||
|
||||
export const GET: RequestHandler = async ({ locals }) => {
|
||||
if (!locals.workspace || !locals.user) error(401);
|
||||
|
||||
const apiKey = env.OPENROUTER_API_KEY;
|
||||
if (!apiKey) {
|
||||
error(500, 'OPENROUTER_API_KEY er ikke konfigurert');
|
||||
}
|
||||
|
||||
if (cache && Date.now() - cache.fetched_at < CACHE_TTL) {
|
||||
return json(cache.models);
|
||||
}
|
||||
// --- Provider-spesifikke hentere ---
|
||||
|
||||
async function fetchOpenRouter(apiKey: string): Promise<CatalogModel[]> {
|
||||
const res = await fetch('https://openrouter.ai/api/v1/models', {
|
||||
headers: { Authorization: `Bearer ${apiKey}` }
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
error(502, `OpenRouter returnerte ${res.status}`);
|
||||
}
|
||||
|
||||
if (!res.ok) return [];
|
||||
const body = await res.json();
|
||||
const models: CatalogModel[] = (body.data as OpenRouterModel[]).map((m) => ({
|
||||
return (body.data ?? []).map((m: any) => ({
|
||||
id: m.id,
|
||||
name: m.name,
|
||||
name: m.name ?? m.id,
|
||||
provider: m.id.split('/')[0],
|
||||
context_length: m.context_length,
|
||||
litellm_prefix: 'openrouter/',
|
||||
api_key_env: 'OPENROUTER_API_KEY',
|
||||
context_length: m.context_length ?? 0,
|
||||
prompt_price_per_m: toPerMillion(m.pricing?.prompt ?? '0'),
|
||||
completion_price_per_m: toPerMillion(m.pricing?.completion ?? '0'),
|
||||
modality: m.architecture?.modality ?? 'text',
|
||||
max_completion: m.top_provider?.max_completion_tokens ?? null
|
||||
}));
|
||||
}
|
||||
|
||||
async function fetchXai(_apiKey: string): Promise<CatalogModel[]> {
|
||||
// xAI /v1/models krever betalt konto — hardkod kjente modeller
|
||||
// Kilde: https://docs.x.ai/docs/models
|
||||
const models = [
|
||||
{ id: 'grok-4.20-multi-agent-beta-0309', name: 'Grok 4.20 Multi-Agent (beta)', ctx: 131072 },
|
||||
{ id: 'grok-4.20-beta-0309-reasoning', name: 'Grok 4.20 (reasoning, beta)', ctx: 131072 },
|
||||
{ id: 'grok-4.20-beta-0309-non-reasoning', name: 'Grok 4.20 (beta)', ctx: 131072 },
|
||||
{ id: 'grok-4-0709', name: 'Grok 4', ctx: 131072 },
|
||||
{ id: 'grok-4-fast-reasoning', name: 'Grok 4 Fast (reasoning)', ctx: 131072 },
|
||||
{ id: 'grok-4-fast-non-reasoning', name: 'Grok 4 Fast', ctx: 131072 },
|
||||
{ id: 'grok-4-1-fast-reasoning', name: 'Grok 4.1 Fast (reasoning)', ctx: 131072 },
|
||||
{ id: 'grok-4-1-fast-non-reasoning', name: 'Grok 4.1 Fast', ctx: 131072 },
|
||||
{ id: 'grok-3', name: 'Grok 3', ctx: 131072 },
|
||||
{ id: 'grok-3-mini', name: 'Grok 3 Mini', ctx: 131072 },
|
||||
{ id: 'grok-code-fast-1', name: 'Grok Code Fast', ctx: 131072 },
|
||||
];
|
||||
return models.map(m => ({
|
||||
id: m.id,
|
||||
name: m.name,
|
||||
provider: 'xai',
|
||||
litellm_prefix: 'xai/',
|
||||
api_key_env: 'XAI_API_KEY',
|
||||
context_length: m.ctx,
|
||||
prompt_price_per_m: -1,
|
||||
completion_price_per_m: -1,
|
||||
modality: 'text',
|
||||
max_completion: null
|
||||
}));
|
||||
}
|
||||
|
||||
async function fetchGemini(apiKey: string): Promise<CatalogModel[]> {
|
||||
const res = await fetch(
|
||||
`https://generativelanguage.googleapis.com/v1beta/models?key=${apiKey}`
|
||||
);
|
||||
if (!res.ok) return [];
|
||||
const body = await res.json();
|
||||
return (body.models ?? [])
|
||||
.filter((m: any) => m.supportedGenerationMethods?.includes('generateContent'))
|
||||
.map((m: any) => {
|
||||
// models/gemini-2.5-flash → gemini-2.5-flash
|
||||
const shortName = (m.name as string).replace('models/', '');
|
||||
return {
|
||||
id: shortName,
|
||||
name: m.displayName ?? shortName,
|
||||
provider: 'google',
|
||||
litellm_prefix: 'gemini/',
|
||||
api_key_env: 'GEMINI_API_KEY',
|
||||
context_length: m.inputTokenLimit ?? 0,
|
||||
prompt_price_per_m: -1,
|
||||
completion_price_per_m: -1,
|
||||
modality: 'text',
|
||||
max_completion: m.outputTokenLimit ?? null
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
async function fetchOpenAI(apiKey: string): Promise<CatalogModel[]> {
|
||||
const res = await fetch('https://api.openai.com/v1/models', {
|
||||
headers: { Authorization: `Bearer ${apiKey}` }
|
||||
});
|
||||
if (!res.ok) return [];
|
||||
const body = await res.json();
|
||||
return (body.data ?? [])
|
||||
.filter((m: any) => m.id.startsWith('gpt-') || m.id.startsWith('o') || m.id.startsWith('chatgpt-'))
|
||||
.map((m: any) => ({
|
||||
id: m.id,
|
||||
name: m.id,
|
||||
provider: 'openai',
|
||||
litellm_prefix: 'openai/',
|
||||
api_key_env: 'OPENAI_API_KEY',
|
||||
context_length: 128000,
|
||||
prompt_price_per_m: 0,
|
||||
completion_price_per_m: 0,
|
||||
modality: 'text',
|
||||
max_completion: null
|
||||
}));
|
||||
}
|
||||
|
||||
const PROVIDERS: ProviderFetcher[] = [
|
||||
{ keyEnv: 'XAI_API_KEY', label: 'xAI', litellmPrefix: 'xai/', fetch: fetchXai },
|
||||
{ keyEnv: 'GEMINI_API_KEY', label: 'Google', litellmPrefix: 'gemini/', fetch: fetchGemini },
|
||||
{ keyEnv: 'OPENAI_API_KEY', label: 'OpenAI', litellmPrefix: 'openai/', fetch: fetchOpenAI },
|
||||
{ keyEnv: 'OPENROUTER_API_KEY', label: 'OpenRouter', litellmPrefix: 'openrouter/', fetch: fetchOpenRouter }
|
||||
];
|
||||
|
||||
export const GET: RequestHandler = async ({ locals, url }) => {
|
||||
if (!locals.workspace || !locals.user) error(401);
|
||||
|
||||
const forceRefresh = url.searchParams.get('refresh') === '1';
|
||||
|
||||
if (!forceRefresh && cache && Date.now() - cache.fetched_at < CACHE_TTL) {
|
||||
return json(cache.models);
|
||||
}
|
||||
|
||||
// Hent aktive nøkler med verdier fra DB
|
||||
const keys = await sql`
|
||||
SELECT env_name, key_value FROM ai_api_keys WHERE is_enabled = true AND key_value IS NOT NULL
|
||||
`;
|
||||
const keyMap = new Map(keys.map((k: any) => [k.env_name, k.key_value as string]));
|
||||
|
||||
// Hent fra alle aktive leverandører parallelt
|
||||
const promises = PROVIDERS
|
||||
.filter(p => keyMap.has(p.keyEnv))
|
||||
.map(async (p) => {
|
||||
try {
|
||||
return await p.fetch(keyMap.get(p.keyEnv)!);
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
});
|
||||
|
||||
const results = await Promise.all(promises);
|
||||
const models = results.flat();
|
||||
|
||||
cache = { models, fetched_at: Date.now() };
|
||||
return json(models);
|
||||
|
|
|
|||
16
web/src/routes/api/admin/ai/prompts/+server.ts
Normal file
16
web/src/routes/api/admin/ai/prompts/+server.ts
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
import { json, error } from '@sveltejs/kit';
|
||||
import type { RequestHandler } from './$types';
|
||||
import { sql } from '$lib/server/db';
|
||||
|
||||
/** GET — list alle AI-prompts */
|
||||
export const GET: RequestHandler = async ({ locals }) => {
|
||||
if (!locals.workspace || !locals.user) error(401);
|
||||
|
||||
const rows = await sql`
|
||||
SELECT action, system_prompt, description, updated_at
|
||||
FROM ai_prompts
|
||||
ORDER BY action
|
||||
`;
|
||||
|
||||
return json(rows);
|
||||
};
|
||||
44
web/src/routes/api/admin/ai/prompts/[action]/+server.ts
Normal file
44
web/src/routes/api/admin/ai/prompts/[action]/+server.ts
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
import { json, error } from '@sveltejs/kit';
|
||||
import type { RequestHandler } from './$types';
|
||||
import { sql } from '$lib/server/db';
|
||||
|
||||
/** PATCH — oppdater system_prompt, description, label og/eller icon */
|
||||
export const PATCH: RequestHandler = async ({ params, request, locals }) => {
|
||||
if (!locals.workspace || !locals.user) error(401);
|
||||
|
||||
const body = await request.json();
|
||||
|
||||
const [row] = await sql`
|
||||
UPDATE ai_prompts SET
|
||||
system_prompt = COALESCE(${body.system_prompt ?? null}, system_prompt),
|
||||
description = COALESCE(${body.description ?? null}, description),
|
||||
label = COALESCE(${body.label ?? null}, label),
|
||||
icon = COALESCE(${body.icon ?? null}, icon),
|
||||
updated_at = now()
|
||||
WHERE action = ${params.action}
|
||||
RETURNING action, system_prompt, description, label, icon, sort_order, updated_at
|
||||
`;
|
||||
|
||||
if (!row) error(404, 'Prompt ikke funnet');
|
||||
return json(row);
|
||||
};
|
||||
|
||||
/** PUT — opprett eller erstatt prompt for en action */
|
||||
export const PUT: RequestHandler = async ({ params, request, locals }) => {
|
||||
if (!locals.workspace || !locals.user) error(401);
|
||||
|
||||
const body = await request.json();
|
||||
if (!body.system_prompt) error(400, 'system_prompt er påkrevd');
|
||||
|
||||
const [row] = await sql`
|
||||
INSERT INTO ai_prompts (action, system_prompt, description)
|
||||
VALUES (${params.action}, ${body.system_prompt}, ${body.description ?? null})
|
||||
ON CONFLICT (action) DO UPDATE SET
|
||||
system_prompt = EXCLUDED.system_prompt,
|
||||
description = COALESCE(EXCLUDED.description, ai_prompts.description),
|
||||
updated_at = now()
|
||||
RETURNING action, system_prompt, description, updated_at
|
||||
`;
|
||||
|
||||
return json(row);
|
||||
};
|
||||
19
web/src/routes/api/admin/ai/providers/renumber/+server.ts
Normal file
19
web/src/routes/api/admin/ai/providers/renumber/+server.ts
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
import { json, error } from '@sveltejs/kit';
|
||||
import type { RequestHandler } from './$types';
|
||||
import { sql } from '$lib/server/db';
|
||||
|
||||
/** POST — renummerer prioriteter for en liste providers */
|
||||
export const POST: RequestHandler = async ({ request, locals }) => {
|
||||
if (!locals.workspace || !locals.user) error(401);
|
||||
|
||||
const items: { id: string; priority: number }[] = await request.json();
|
||||
if (!Array.isArray(items) || items.length === 0) error(400);
|
||||
|
||||
for (const item of items) {
|
||||
await sql`
|
||||
UPDATE ai_model_providers SET priority = ${item.priority} WHERE id = ${item.id}::uuid
|
||||
`;
|
||||
}
|
||||
|
||||
return json({ ok: true });
|
||||
};
|
||||
|
|
@ -22,22 +22,23 @@ export const load: PageServerLoad = async () => {
|
|||
`;
|
||||
|
||||
const prompts = await sql`
|
||||
SELECT action, system_prompt, description, updated_at
|
||||
SELECT action, system_prompt, description, label, icon, sort_order, updated_at
|
||||
FROM ai_prompts
|
||||
ORDER BY action
|
||||
ORDER BY sort_order, action
|
||||
`;
|
||||
|
||||
const usage = await sql`
|
||||
SELECT
|
||||
model_alias,
|
||||
model_actual,
|
||||
action,
|
||||
count(*)::int AS call_count,
|
||||
sum(prompt_tokens)::int AS prompt_tokens,
|
||||
sum(completion_tokens)::int AS completion_tokens,
|
||||
sum(total_tokens)::int AS total_tokens
|
||||
FROM ai_usage_log
|
||||
WHERE created_at > now() - interval '30 days'
|
||||
GROUP BY model_alias, model_actual
|
||||
GROUP BY model_alias, model_actual, action
|
||||
ORDER BY total_tokens DESC
|
||||
`;
|
||||
|
||||
|
|
|
|||
|
|
@ -31,12 +31,16 @@
|
|||
action: string;
|
||||
system_prompt: string;
|
||||
description: string | null;
|
||||
label: string | null;
|
||||
icon: string | null;
|
||||
sort_order: number;
|
||||
updated_at: string;
|
||||
}
|
||||
|
||||
interface UsageRow {
|
||||
model_alias: string;
|
||||
model_actual: string | null;
|
||||
action: string | null;
|
||||
call_count: number;
|
||||
prompt_tokens: number;
|
||||
completion_tokens: number;
|
||||
|
|
@ -47,6 +51,8 @@
|
|||
id: string;
|
||||
name: string;
|
||||
provider: string;
|
||||
litellm_prefix: string;
|
||||
api_key_env: string;
|
||||
context_length: number;
|
||||
prompt_price_per_m: number;
|
||||
completion_price_per_m: number;
|
||||
|
|
@ -74,6 +80,8 @@
|
|||
let configMsg = $state('');
|
||||
let editingPrompt = $state<string | null>(null);
|
||||
let editPromptText = $state('');
|
||||
let editPromptLabel = $state('');
|
||||
let editPromptIcon = $state('');
|
||||
let expandedAlias = $state<string | null>(null);
|
||||
|
||||
// Alias-redigering
|
||||
|
|
@ -135,8 +143,9 @@
|
|||
return String(n);
|
||||
}
|
||||
|
||||
function formatPrice(n: number): string {
|
||||
if (n === 0) return 'Gratis';
|
||||
function formatPrice(n: number | null | undefined): string {
|
||||
if (n == null || n < 0) return '\u2014';
|
||||
if (n === 0) return '\u2014';
|
||||
return `$${n.toFixed(2)}`;
|
||||
}
|
||||
|
||||
|
|
@ -263,6 +272,7 @@
|
|||
}
|
||||
}
|
||||
|
||||
// Grupper etter api_key_env + provider (f.eks. "google via GEMINI_API_KEY" vs "google via OPENROUTER_API_KEY")
|
||||
let groupedByProvider = $derived.by(() => {
|
||||
const search = catalogSearch.toLowerCase();
|
||||
const filtered = search
|
||||
|
|
@ -270,23 +280,42 @@
|
|||
(m) =>
|
||||
m.name.toLowerCase().includes(search) ||
|
||||
m.id.toLowerCase().includes(search) ||
|
||||
m.provider.toLowerCase().includes(search)
|
||||
m.provider.toLowerCase().includes(search) ||
|
||||
m.api_key_env.toLowerCase().includes(search)
|
||||
)
|
||||
: catalogModels;
|
||||
|
||||
const map = new Map<string, CatalogModel[]>();
|
||||
for (const m of filtered) {
|
||||
const list = map.get(m.provider) ?? [];
|
||||
// Grupper per API-nøkkel, med provider som undergruppe
|
||||
const groupKey = m.api_key_env === 'OPENROUTER_API_KEY'
|
||||
? `${m.provider} (OpenRouter)`
|
||||
: m.provider;
|
||||
const list = map.get(groupKey) ?? [];
|
||||
list.push(m);
|
||||
map.set(m.provider, list);
|
||||
map.set(groupKey, list);
|
||||
}
|
||||
|
||||
// Sorter modeller synkende etter pris innen hver provider
|
||||
// Sorter: pris synkende først (ukjent/-1 sist), deretter navn synkende
|
||||
for (const [, models] of map) {
|
||||
models.sort((a, b) => b.completion_price_per_m - a.completion_price_per_m);
|
||||
models.sort((a, b) => {
|
||||
const aPrice = a.completion_price_per_m;
|
||||
const bPrice = b.completion_price_per_m;
|
||||
const aHasPrice = aPrice > 0;
|
||||
const bHasPrice = bPrice > 0;
|
||||
if (aHasPrice !== bHasPrice) return aHasPrice ? -1 : 1;
|
||||
if (aPrice !== bPrice) return bPrice - aPrice;
|
||||
return b.name.localeCompare(a.name);
|
||||
});
|
||||
}
|
||||
|
||||
return [...map.entries()].sort(([a], [b]) => a.localeCompare(b));
|
||||
// Direkte API-nøkler først, deretter OpenRouter-grupper
|
||||
return [...map.entries()].sort(([a], [b]) => {
|
||||
const aOr = a.includes('OpenRouter');
|
||||
const bOr = b.includes('OpenRouter');
|
||||
if (aOr !== bOr) return aOr ? 1 : -1;
|
||||
return a.localeCompare(b);
|
||||
});
|
||||
});
|
||||
|
||||
let catalogPickerFiltered = $derived.by(() => {
|
||||
|
|
@ -310,43 +339,11 @@
|
|||
expandedProviders = new Set(expandedProviders);
|
||||
}
|
||||
|
||||
// Mapping fra OpenRouter-provider til LiteLLM direkte-prefiks + nøkkel
|
||||
const directKeyMap: Record<string, { prefix: string; key: string }> = {
|
||||
google: { prefix: 'gemini/', key: 'GEMINI_API_KEY' },
|
||||
anthropic: { prefix: 'anthropic/', key: 'ANTHROPIC_API_KEY' },
|
||||
openai: { prefix: 'openai/', key: 'OPENAI_API_KEY' },
|
||||
'x-ai': { prefix: 'xai/', key: 'XAI_API_KEY' },
|
||||
};
|
||||
|
||||
function modelForKey(model: CatalogModel, keyEnv: string): string {
|
||||
if (keyEnv === 'OPENROUTER_API_KEY') return `openrouter/${model.id}`;
|
||||
// Direkte: strip provider-prefix fra model.id, legg til LiteLLM-prefiks
|
||||
const mapping = directKeyMap[model.provider];
|
||||
if (mapping) {
|
||||
const modelName = model.id.replace(`${model.provider}/`, '');
|
||||
return `${mapping.prefix}${modelName}`;
|
||||
}
|
||||
return `openrouter/${model.id}`;
|
||||
function litellmModelId(model: CatalogModel): string {
|
||||
return `${model.litellm_prefix}${model.id}`;
|
||||
}
|
||||
|
||||
function availableKeysForModel(model: CatalogModel): ApiKey[] {
|
||||
const keys: ApiKey[] = [];
|
||||
// Direkte nøkkel for denne leverandøren
|
||||
const mapping = directKeyMap[model.provider];
|
||||
if (mapping) {
|
||||
const directKey = apiKeys.find(k => k.name === mapping.key);
|
||||
if (directKey) keys.push(directKey);
|
||||
}
|
||||
// OpenRouter alltid tilgjengelig
|
||||
const orKey = apiKeys.find(k => k.name === 'OPENROUTER_API_KEY');
|
||||
if (orKey) keys.push(orKey);
|
||||
return keys;
|
||||
}
|
||||
|
||||
// Catalog add — steg 1: velg alias, steg 2: velg nøkkel
|
||||
let catalogAddKey = $state('');
|
||||
|
||||
async function addFromCatalog(model: CatalogModel, aliasId: string, keyEnv: string) {
|
||||
async function addFromCatalog(model: CatalogModel, aliasId: string) {
|
||||
errorMsg = '';
|
||||
const maxPri = Math.max(0, ...providersForAlias(aliasId).map((p) => p.priority));
|
||||
try {
|
||||
|
|
@ -356,8 +353,8 @@
|
|||
body: JSON.stringify({
|
||||
alias_id: aliasId,
|
||||
priority: maxPri + 1,
|
||||
litellm_model: modelForKey(model, keyEnv),
|
||||
api_key_env: keyEnv
|
||||
litellm_model: litellmModelId(model),
|
||||
api_key_env: model.api_key_env
|
||||
})
|
||||
});
|
||||
if (!res.ok) throw new Error('Feil ved opprettelse');
|
||||
|
|
@ -365,14 +362,14 @@
|
|||
providers = [...providers, row];
|
||||
addingFromCatalog = null;
|
||||
catalogAddAlias = '';
|
||||
catalogAddKey = '';
|
||||
} catch {
|
||||
errorMsg = 'Kunne ikke legge til provider fra katalog';
|
||||
}
|
||||
}
|
||||
|
||||
function selectFromPicker(model: CatalogModel) {
|
||||
newProvider.litellm_model = modelForKey(model, newProvider.api_key_env);
|
||||
newProvider.litellm_model = litellmModelId(model);
|
||||
newProvider.api_key_env = model.api_key_env;
|
||||
showCatalogPicker = false;
|
||||
catalogPickerSearch = '';
|
||||
}
|
||||
|
|
@ -433,8 +430,12 @@
|
|||
|
||||
function estimateCost(row: UsageRow): number | null {
|
||||
if (!catalogLoaded || !row.model_actual) return null;
|
||||
const model = catalogModels.find((m) => m.id === row.model_actual);
|
||||
if (!model) return null;
|
||||
// model_actual kan være "xai/grok-..." eller "google/gemma-..." — match mot id eller litellm_prefix+id
|
||||
const actual = row.model_actual;
|
||||
const model = catalogModels.find((m) =>
|
||||
m.id === actual || `${m.litellm_prefix}${m.id}` === actual || `${m.provider}/${m.id}` === actual
|
||||
);
|
||||
if (!model || model.prompt_price_per_m < 0 || model.completion_price_per_m < 0) return null;
|
||||
return (
|
||||
(row.prompt_tokens / 1_000_000) * model.prompt_price_per_m +
|
||||
(row.completion_tokens / 1_000_000) * model.completion_price_per_m
|
||||
|
|
@ -589,11 +590,33 @@
|
|||
const res = await fetch(`/api/admin/ai/providers/${provider.id}`, { method: 'DELETE' });
|
||||
if (!res.ok) throw new Error('Feil');
|
||||
providers = providers.filter((p) => p.id !== provider.id);
|
||||
// Renummerer prioriteter for gjenværende providers under samme alias
|
||||
await renumberPriorities(provider.alias_id);
|
||||
} catch {
|
||||
errorMsg = 'Kunne ikke slette provider';
|
||||
}
|
||||
}
|
||||
|
||||
async function renumberPriorities(aliasId: string) {
|
||||
const ap = providersForAlias(aliasId);
|
||||
let changed = false;
|
||||
for (let i = 0; i < ap.length; i++) {
|
||||
if (ap[i].priority !== i + 1) {
|
||||
ap[i].priority = i + 1;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
if (!changed) return;
|
||||
try {
|
||||
await fetch('/api/admin/ai/providers/renumber', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(ap.map(p => ({ id: p.id, priority: p.priority })))
|
||||
});
|
||||
providers = [...providers];
|
||||
} catch { /* stille */ }
|
||||
}
|
||||
|
||||
async function addAlias() {
|
||||
errorMsg = '';
|
||||
if (!newAlias.alias) return;
|
||||
|
|
@ -655,11 +678,15 @@
|
|||
function startEditPrompt(prompt: Prompt) {
|
||||
editingPrompt = prompt.action;
|
||||
editPromptText = prompt.system_prompt;
|
||||
editPromptLabel = prompt.label ?? '';
|
||||
editPromptIcon = prompt.icon ?? '';
|
||||
}
|
||||
|
||||
function cancelEditPrompt() {
|
||||
editingPrompt = null;
|
||||
editPromptText = '';
|
||||
editPromptLabel = '';
|
||||
editPromptIcon = '';
|
||||
}
|
||||
|
||||
async function savePrompt(prompt: Prompt) {
|
||||
|
|
@ -669,14 +696,22 @@
|
|||
const res = await fetch(`/api/admin/ai/prompts/${prompt.action}`, {
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ system_prompt: editPromptText })
|
||||
body: JSON.stringify({
|
||||
system_prompt: editPromptText,
|
||||
label: editPromptLabel || null,
|
||||
icon: editPromptIcon || null
|
||||
})
|
||||
});
|
||||
if (!res.ok) throw new Error('Feil ved lagring');
|
||||
const updated = await res.json();
|
||||
prompt.system_prompt = updated.system_prompt;
|
||||
prompt.label = updated.label;
|
||||
prompt.icon = updated.icon;
|
||||
prompt.updated_at = updated.updated_at;
|
||||
editingPrompt = null;
|
||||
editPromptText = '';
|
||||
editPromptLabel = '';
|
||||
editPromptIcon = '';
|
||||
markSaved(prompt.action);
|
||||
} catch {
|
||||
errorMsg = 'Kunne ikke lagre prompt';
|
||||
|
|
@ -816,7 +851,7 @@
|
|||
<!-- Seksjon 1: Modellkatalog -->
|
||||
<section>
|
||||
<div class="catalog-header">
|
||||
<h3>Modellkatalog (OpenRouter)</h3>
|
||||
<h3>Modellkatalog</h3>
|
||||
<div class="catalog-actions">
|
||||
{#if catalogLoaded}
|
||||
<input
|
||||
|
|
@ -825,6 +860,7 @@
|
|||
placeholder="Søk modeller..."
|
||||
bind:value={catalogSearch}
|
||||
/>
|
||||
<button class="toggle-btn" onclick={() => { catalogLoaded = false; catalogModels = []; }}>Skjul</button>
|
||||
{/if}
|
||||
<button
|
||||
class="toggle-btn"
|
||||
|
|
@ -867,30 +903,23 @@
|
|||
<span class="cat-col-add">
|
||||
<button
|
||||
class="toggle-btn"
|
||||
onclick={() => { addingFromCatalog = addingFromCatalog === model.id ? null : model.id; catalogAddAlias = ''; catalogAddKey = ''; }}
|
||||
onclick={() => { addingFromCatalog = addingFromCatalog === model.id ? null : model.id; catalogAddAlias = ''; }}
|
||||
>{addingFromCatalog === model.id ? '✗' : 'Legg til →'}</button>
|
||||
</span>
|
||||
</div>
|
||||
{#if addingFromCatalog === model.id}
|
||||
{@const modelKeys = availableKeysForModel(model)}
|
||||
<div class="catalog-add-row">
|
||||
<span class="catalog-add-label">Legg til <strong>{model.name}</strong>:</span>
|
||||
<span class="catalog-add-label">Legg til <strong>{model.name}</strong> via {model.api_key_env}:</span>
|
||||
<select bind:value={catalogAddAlias}>
|
||||
<option value="">Velg alias...</option>
|
||||
{#each aliases as a}
|
||||
<option value={a.id}>{a.alias}</option>
|
||||
{/each}
|
||||
</select>
|
||||
<select bind:value={catalogAddKey}>
|
||||
<option value="">Velg nøkkel...</option>
|
||||
{#each modelKeys as k}
|
||||
<option value={k.name}>{k.label}{k.is_enabled ? '' : ' (av)'}</option>
|
||||
{/each}
|
||||
</select>
|
||||
<button
|
||||
class="add-btn"
|
||||
disabled={!catalogAddAlias || !catalogAddKey}
|
||||
onclick={() => addFromCatalog(model, catalogAddAlias, catalogAddKey)}
|
||||
disabled={!catalogAddAlias}
|
||||
onclick={() => addFromCatalog(model, catalogAddAlias)}
|
||||
>Legg til</button>
|
||||
</div>
|
||||
{/if}
|
||||
|
|
@ -1139,6 +1168,7 @@
|
|||
<div class="table-list">
|
||||
<div class="table-row table-row--header prompt-row">
|
||||
<span class="col-action">Action</span>
|
||||
<span class="col-label">Visningsnavn</span>
|
||||
<span class="col-desc">Beskrivelse</span>
|
||||
<span class="col-chars">Tegn</span>
|
||||
<span class="col-updated">Oppdatert</span>
|
||||
|
|
@ -1148,6 +1178,7 @@
|
|||
{#each prompts as prompt (prompt.action)}
|
||||
<div class="table-row prompt-row">
|
||||
<span class="col-action">{prompt.action}</span>
|
||||
<span class="col-label">{prompt.icon ?? ''} {prompt.label ?? '\u2014'}</span>
|
||||
<span class="col-desc">{prompt.description ?? '\u2014'}</span>
|
||||
<span class="col-chars">{prompt.system_prompt.length}</span>
|
||||
<span class="col-updated">{new Date(prompt.updated_at).toLocaleDateString('nb-NO')}</span>
|
||||
|
|
@ -1164,6 +1195,16 @@
|
|||
|
||||
{#if editingPrompt === prompt.action}
|
||||
<div class="prompt-editor">
|
||||
<div class="prompt-editor-meta">
|
||||
<label>
|
||||
<span class="prompt-meta-label">Ikon</span>
|
||||
<input type="text" bind:value={editPromptIcon} placeholder="🧹" class="prompt-meta-input prompt-meta-input--icon" />
|
||||
</label>
|
||||
<label>
|
||||
<span class="prompt-meta-label">Visningsnavn</span>
|
||||
<input type="text" bind:value={editPromptLabel} placeholder="Vask tekst" class="prompt-meta-input" />
|
||||
</label>
|
||||
</div>
|
||||
<textarea
|
||||
bind:value={editPromptText}
|
||||
rows="12"
|
||||
|
|
@ -1196,8 +1237,9 @@
|
|||
<div class="table-row table-row--header usage-row">
|
||||
<span>Alias</span>
|
||||
<span>Modell</span>
|
||||
<span>Prompt</span>
|
||||
<span class="col-num">Kall</span>
|
||||
<span class="col-num">Prompt</span>
|
||||
<span class="col-num">Prompt-tok.</span>
|
||||
<span class="col-num">Kompl.</span>
|
||||
<span class="col-num">Totalt</span>
|
||||
<span class="col-num">Est. $</span>
|
||||
|
|
@ -1208,6 +1250,7 @@
|
|||
<div class="table-row usage-row">
|
||||
<span class="col-alias">{row.model_alias}</span>
|
||||
<span class="col-model-actual">{row.model_actual ?? '\u2014'}</span>
|
||||
<span class="col-action-usage">{row.action ?? '\u2014'}</span>
|
||||
<span class="col-num">{row.call_count}</span>
|
||||
<span class="col-num">{row.prompt_tokens.toLocaleString('nb-NO')}</span>
|
||||
<span class="col-num">{row.completion_tokens.toLocaleString('nb-NO')}</span>
|
||||
|
|
@ -2040,7 +2083,15 @@
|
|||
|
||||
/* Usage/tokenforbruk */
|
||||
.usage-row {
|
||||
grid-template-columns: 1fr 2fr 70px 80px 80px 80px 70px;
|
||||
grid-template-columns: 1fr 2fr 1fr 60px 80px 80px 80px 60px;
|
||||
}
|
||||
|
||||
.col-action-usage {
|
||||
font-size: 0.75rem;
|
||||
color: #8b92a5;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.col-model-actual {
|
||||
|
|
@ -2060,7 +2111,11 @@
|
|||
|
||||
/* Prompt-seksjon */
|
||||
.prompt-row {
|
||||
grid-template-columns: 1.5fr 2.5fr 60px 90px 70px;
|
||||
grid-template-columns: 1.2fr 1.2fr 2fr 60px 80px 70px;
|
||||
}
|
||||
|
||||
.col-label {
|
||||
font-size: 0.85rem;
|
||||
}
|
||||
|
||||
.col-action {
|
||||
|
|
@ -2089,6 +2144,43 @@
|
|||
border-bottom: 1px solid #2d3148;
|
||||
}
|
||||
|
||||
.prompt-editor-meta {
|
||||
display: flex;
|
||||
gap: 0.75rem;
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
|
||||
.prompt-editor-meta label {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.35rem;
|
||||
}
|
||||
|
||||
.prompt-meta-label {
|
||||
font-size: 0.75rem;
|
||||
color: #8b92a5;
|
||||
}
|
||||
|
||||
.prompt-meta-input {
|
||||
background: #161822;
|
||||
border: 1px solid #2d3148;
|
||||
border-radius: 4px;
|
||||
color: #e1e4e8;
|
||||
padding: 0.25rem 0.4rem;
|
||||
font-size: 0.8rem;
|
||||
width: 160px;
|
||||
}
|
||||
|
||||
.prompt-meta-input--icon {
|
||||
width: 50px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.prompt-meta-input:focus {
|
||||
outline: none;
|
||||
border-color: #3b82f6;
|
||||
}
|
||||
|
||||
.prompt-editor textarea {
|
||||
width: 100%;
|
||||
background: #161822;
|
||||
|
|
|
|||
|
|
@ -163,28 +163,44 @@ impl JobHandler for AiTextProcessHandler {
|
|||
.await
|
||||
.context("Feil ved lagring av revisjon")?;
|
||||
|
||||
// 4. Bygg system-prompt basert på action
|
||||
// 4. Hent prompt-label fra DB (for metadata-stempel i chat)
|
||||
let prompt_label: Option<String> = sqlx::query_scalar(
|
||||
"SELECT label FROM ai_prompts WHERE action = $1"
|
||||
)
|
||||
.bind(action)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
|
||||
// 5. Bygg system-prompt basert på action
|
||||
let system_prompt = match prompt_override {
|
||||
Some(custom) => custom.to_string(),
|
||||
None => get_system_prompt_from_db(pool, action).await,
|
||||
};
|
||||
|
||||
// 5. Send til AI Gateway
|
||||
// 6. Send til AI Gateway
|
||||
let ai_resp = self
|
||||
.call_ai_gateway(&system_prompt, &plain_text, &model)
|
||||
.await
|
||||
.context("AI Gateway-kall feilet")?;
|
||||
|
||||
// 6. Oppdater SpacetimeDB — dette er primær-kanalen til frontend.
|
||||
// sync.rs synker body-endringen til PG automatisk.
|
||||
self.update_spacetimedb(&message_id, workspace_id, &ai_resp.content)
|
||||
.await
|
||||
.context("Kunne ikke oppdatere SpacetimeDB med AI-resultat")?;
|
||||
// 7. Beregn faktisk modellnavn
|
||||
// LiteLLM returnerer alias-navnet i model-feltet — bruk expected_model fra DB
|
||||
let actual_model = match &ai_resp.model_actual {
|
||||
Some(m) if m != &model => Some(m.clone()), // Gateway returnerte faktisk modellnavn
|
||||
_ => expected_model, // Bruk oppslaget fra providers-tabellen
|
||||
};
|
||||
// Strip openrouter/-prefiks for lesbarhet
|
||||
let actual_model_clean = actual_model.map(|m| m.replace("openrouter/", "").replace("gemini/", "google/"));
|
||||
|
||||
// 7. Skriv PG-only data (metadata, revisjon er allerede lagret, tokenforbruk)
|
||||
// 8. Skriv PG metadata FØR SpacetimeDB-oppdatering
|
||||
// (frontend henter metadata fra PG når SpacetimeDB-update trigger onUpdate)
|
||||
let metadata = json!({
|
||||
"ai_processed": true,
|
||||
"ai_action": action
|
||||
"ai_action": action,
|
||||
"ai_label": prompt_label.as_deref().unwrap_or(action),
|
||||
"ai_model": actual_model_clean.as_deref().unwrap_or(&model)
|
||||
});
|
||||
|
||||
sqlx::query(
|
||||
|
|
@ -194,31 +210,30 @@ impl JobHandler for AiTextProcessHandler {
|
|||
WHERE id = $2
|
||||
"#,
|
||||
)
|
||||
.bind(metadata)
|
||||
.bind(&metadata)
|
||||
.bind(message_id)
|
||||
.execute(pool)
|
||||
.await
|
||||
.context("Feil ved oppdatering av metadata")?;
|
||||
|
||||
// 8. Logg tokenforbruk til ai_usage_log
|
||||
// LiteLLM returnerer alias-navnet i model-feltet — bruk expected_model fra DB
|
||||
let actual_model = match &ai_resp.model_actual {
|
||||
Some(m) if m != &model => Some(m.clone()), // Gateway returnerte faktisk modellnavn
|
||||
_ => expected_model, // Bruk oppslaget fra providers-tabellen
|
||||
};
|
||||
// Strip openrouter/-prefiks for lesbarhet
|
||||
let actual_model_clean = actual_model.map(|m| m.replace("openrouter/", "").replace("gemini/", "google/"));
|
||||
// 9. Oppdater SpacetimeDB — dette er primær-kanalen til frontend.
|
||||
// Gjøres ETTER PG-metadata slik at enrichMessageFromPg finner fersk data.
|
||||
self.update_spacetimedb(&message_id, workspace_id, &ai_resp.content)
|
||||
.await
|
||||
.context("Kunne ikke oppdatere SpacetimeDB med AI-resultat")?;
|
||||
|
||||
// 10. Logg tokenforbruk til ai_usage_log
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO ai_usage_log (workspace_id, job_id, job_type, model_alias, model_actual, prompt_tokens, completion_tokens, total_tokens)
|
||||
VALUES ($1, $2, 'ai_text_process', $3, $4, $5, $6, $7)
|
||||
INSERT INTO ai_usage_log (workspace_id, job_id, job_type, model_alias, model_actual, action, prompt_tokens, completion_tokens, total_tokens)
|
||||
VALUES ($1, $2, 'ai_text_process', $3, $4, $5, $6, $7, $8)
|
||||
"#,
|
||||
)
|
||||
.bind(workspace_id)
|
||||
.bind(job_id)
|
||||
.bind(&model)
|
||||
.bind(&actual_model_clean)
|
||||
.bind(action)
|
||||
.bind(ai_resp.prompt_tokens)
|
||||
.bind(ai_resp.completion_tokens)
|
||||
.bind(ai_resp.total_tokens)
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue