synops/maskinrommet/src/publishing.rs
vegard 1425a82cdd Fullfører oppgave 14.17: A/B-testing for presentasjonselementer
Implementerer automatisk A/B-testing for forside-varianter:

- PG-migrasjon 012: ab_events-tabell for impression/klikk-logging
  med hour_of_week (0-167) for tidspunkt-normalisering
- Variant-rotasjon: ab_select() velger tilfeldig blant testing-varianter
  ved forside-rendering, winner prioriteres, retired filtreres bort
- Impression-logging: asynkron fire-and-forget ved forside-serve
  (både cache-hit og -miss), lagres i ab_events
- Klikk-attribusjon: artikkelbesøk sjekker forside-cache for aktive
  AB-varianter og logger klikk. Eksplisitt tracking via
  GET /pub/{slug}/t/{article_id}?v={edge_id}
- Periodisk evaluator (300s intervall): z-test for proporsjoner
  (p < 0.05), minimum 100 impressions per variant, oppdaterer
  edge-metadata (ab_status, impressions, clicks, ctr)
- Redaktør-overstyring: POST /intentions/ab_override markerer
  valgt variant som winner, andre som retired (krever owner/admin)
- Auto-initialisering: maybe_start_ab_test() setter ab_status=testing
  automatisk når >1 variant av samme type opprettes

Alle 42 tester passerer inkludert 3 nye z-test-tester.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-18 03:13:39 +00:00

3395 lines
117 KiB
Rust

//! Publiseringsmotor: Tera-templates med innebygde temaer.
//!
//! Fire temaer: avis, magasin, blogg, tidsskrift.
//! Hvert tema har artikkelmal + forside-mal.
//! CSS-variabler for theme_config-overstyring.
//!
//! Artikler rendres til HTML via Tera, lagres i CAS med SEO-metadata
//! (OG-tags, canonical, JSON-LD). Noden oppdateres med
//! `metadata.rendered.html_hash` + `renderer_version`.
//!
//! Ref: docs/concepts/publisering.md § "Temaer", "HTML-rendering og CAS"
use std::collections::HashMap;
use std::sync::Arc;
use axum::{
extract::{Path, Query, State},
http::{header, StatusCode},
response::Response,
};
use chrono::{DateTime, Datelike, Utc};
use rand::seq::SliceRandom;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use tera::{Context, Tera};
use tokio::sync::RwLock;
use uuid::Uuid;
use crate::cas::CasStore;
use crate::jobs;
use crate::tiptap;
use crate::AppState;
/// Renderer-versjon. Økes ved mal-/template-endringer.
/// Brukes for å identifisere artikler som trenger re-rendering (oppgave 14.14).
pub const RENDERER_VERSION: i64 = 2;
// =============================================================================
// Tema-konfigurasjon fra publishing-trait
// =============================================================================
#[derive(Deserialize, Default, Debug)]
pub struct PublishingConfig {
pub slug: Option<String>,
pub theme: Option<String>,
#[serde(default)]
pub theme_config: ThemeConfig,
pub custom_domain: Option<String>,
pub index_mode: Option<String>,
pub index_cache_ttl: Option<u64>,
pub featured_max: Option<i64>,
pub stream_page_size: Option<i64>,
/// Krever redaksjonell godkjenning for publisering.
/// Når true: members bruker submitted_to-flyten, kun owner/admin kan opprette belongs_to.
#[serde(default)]
pub require_approval: bool,
/// Roller som kan opprette submitted_to-edges til samlingen.
/// Verdier: "owner", "admin", "member", "reader". Default: ["member"].
#[serde(default = "default_submission_roles")]
pub submission_roles: Vec<String>,
}
fn default_submission_roles() -> Vec<String> {
vec!["member".to_string()]
}
#[derive(Deserialize, Default, Debug, Clone, Serialize)]
pub struct ThemeConfig {
#[serde(default)]
pub colors: ColorConfig,
#[serde(default)]
pub typography: TypographyConfig,
#[serde(default)]
pub layout: LayoutConfig,
pub logo_hash: Option<String>,
}
#[derive(Deserialize, Default, Debug, Clone, Serialize)]
pub struct ColorConfig {
pub primary: Option<String>,
pub accent: Option<String>,
pub background: Option<String>,
pub text: Option<String>,
pub muted: Option<String>,
}
#[derive(Deserialize, Default, Debug, Clone, Serialize)]
pub struct TypographyConfig {
pub heading_font: Option<String>,
pub body_font: Option<String>,
}
#[derive(Deserialize, Default, Debug, Clone, Serialize)]
pub struct LayoutConfig {
pub max_width: Option<String>,
}
// =============================================================================
// SEO-data
// =============================================================================
/// SEO-metadata for artikkelrendering.
#[derive(Serialize, Clone)]
pub struct SeoData {
pub og_title: String,
pub description: String,
pub canonical_url: String,
pub og_image: Option<String>,
pub json_ld: String,
}
fn build_seo_data(
article: &ArticleData,
collection_title: &str,
canonical_url: &str,
) -> SeoData {
let description = article
.summary
.as_deref()
.unwrap_or("")
.to_string();
let json_ld = build_json_ld(article, collection_title, canonical_url);
// Bygg OG-image URL fra CAS-hash hvis tilgjengelig
let og_image = article.og_image.as_ref().map(|hash| format!("/cas/{hash}"));
SeoData {
og_title: article.title.clone(),
description,
canonical_url: canonical_url.to_string(),
og_image,
json_ld,
}
}
fn build_json_ld(
article: &ArticleData,
publisher_name: &str,
canonical_url: &str,
) -> String {
// Escape for safe JSON embedding i <script>-tag
let ld = serde_json::json!({
"@context": "https://schema.org",
"@type": "Article",
"headline": article.title,
"datePublished": article.published_at,
"url": canonical_url,
"publisher": {
"@type": "Organization",
"name": publisher_name
},
"description": article.summary.as_deref().unwrap_or("")
});
ld.to_string()
}
// =============================================================================
// Innebygde temaer — Tera-templates
// =============================================================================
/// Tema-defaults for CSS-variabler per tema.
struct ThemeDefaults {
primary: &'static str,
accent: &'static str,
background: &'static str,
text: &'static str,
muted: &'static str,
heading_font: &'static str,
body_font: &'static str,
max_width: &'static str,
}
fn theme_defaults(theme: &str) -> ThemeDefaults {
match theme {
"avis" => ThemeDefaults {
primary: "#1a1a2e",
accent: "#e94560",
background: "#ffffff",
text: "#1a1a2e",
muted: "#6b7280",
heading_font: "'Georgia', 'Times New Roman', serif",
body_font: "'Charter', 'Georgia', serif",
max_width: "1200px",
},
"magasin" => ThemeDefaults {
primary: "#2d3436",
accent: "#0984e3",
background: "#fafafa",
text: "#2d3436",
muted: "#636e72",
heading_font: "'Playfair Display', 'Georgia', serif",
body_font: "system-ui, -apple-system, sans-serif",
max_width: "1100px",
},
"blogg" => ThemeDefaults {
primary: "#2c3e50",
accent: "#3498db",
background: "#ffffff",
text: "#333333",
muted: "#7f8c8d",
heading_font: "system-ui, -apple-system, sans-serif",
body_font: "system-ui, -apple-system, sans-serif",
max_width: "720px",
},
"tidsskrift" => ThemeDefaults {
primary: "#1a1a1a",
accent: "#8b0000",
background: "#fffff8",
text: "#1a1a1a",
muted: "#555555",
heading_font: "'Georgia', 'Times New Roman', serif",
body_font: "'Georgia', 'Times New Roman', serif",
max_width: "680px",
},
// Fallback til blogg-defaults
_ => theme_defaults("blogg"),
}
}
/// Generer CSS-variabler fra theme_config med tema-defaults som fallback.
fn build_css_variables(theme: &str, config: &ThemeConfig) -> String {
let defaults = theme_defaults(theme);
format!(
r#":root {{
--color-primary: {primary};
--color-accent: {accent};
--color-background: {background};
--color-text: {text};
--color-muted: {muted};
--font-heading: {heading_font};
--font-body: {body_font};
--layout-max-width: {max_width};
}}"#,
primary = config.colors.primary.as_deref().unwrap_or(defaults.primary),
accent = config.colors.accent.as_deref().unwrap_or(defaults.accent),
background = config.colors.background.as_deref().unwrap_or(defaults.background),
text = config.colors.text.as_deref().unwrap_or(defaults.text),
muted = config.colors.muted.as_deref().unwrap_or(defaults.muted),
heading_font = config.typography.heading_font.as_deref().unwrap_or(defaults.heading_font),
body_font = config.typography.body_font.as_deref().unwrap_or(defaults.body_font),
max_width = config.layout.max_width.as_deref().unwrap_or(defaults.max_width),
)
}
// =============================================================================
// Tera engine med innebygde templates
// =============================================================================
/// Bygg Tera-instans med alle innebygde temaer.
pub fn build_tera() -> Tera {
let mut tera = Tera::default();
// Base-template (felles for alle temaer)
tera.add_raw_template("base.html", include_str!("templates/base.html"))
.expect("Feil i base.html template");
// Avis
tera.add_raw_template("avis/article.html", include_str!("templates/avis/article.html"))
.expect("Feil i avis/article.html");
tera.add_raw_template("avis/index.html", include_str!("templates/avis/index.html"))
.expect("Feil i avis/index.html");
// Magasin
tera.add_raw_template("magasin/article.html", include_str!("templates/magasin/article.html"))
.expect("Feil i magasin/article.html");
tera.add_raw_template("magasin/index.html", include_str!("templates/magasin/index.html"))
.expect("Feil i magasin/index.html");
// Blogg
tera.add_raw_template("blogg/article.html", include_str!("templates/blogg/article.html"))
.expect("Feil i blogg/article.html");
tera.add_raw_template("blogg/index.html", include_str!("templates/blogg/index.html"))
.expect("Feil i blogg/index.html");
// Tidsskrift
tera.add_raw_template("tidsskrift/article.html", include_str!("templates/tidsskrift/article.html"))
.expect("Feil i tidsskrift/article.html");
tera.add_raw_template("tidsskrift/index.html", include_str!("templates/tidsskrift/index.html"))
.expect("Feil i tidsskrift/index.html");
// Dynamiske sider (delte templates, temaet styres via CSS-variabler i base.html)
tera.add_raw_template("category.html", include_str!("templates/category.html"))
.expect("Feil i category.html");
tera.add_raw_template("archive.html", include_str!("templates/archive.html"))
.expect("Feil i archive.html");
tera.add_raw_template("search.html", include_str!("templates/search.html"))
.expect("Feil i search.html");
tera.add_raw_template("about.html", include_str!("templates/about.html"))
.expect("Feil i about.html");
tera
}
// =============================================================================
// Datamodeller for rendering
// =============================================================================
#[derive(Serialize, Clone)]
pub struct ArticleData {
pub id: String,
pub short_id: String,
pub title: String,
pub subtitle: Option<String>,
pub content: String,
pub summary: Option<String>,
pub og_image: Option<String>,
pub published_at: String,
pub published_at_short: String,
}
#[derive(Serialize)]
pub struct IndexData {
pub title: String,
pub description: Option<String>,
pub hero: Option<ArticleData>,
pub featured: Vec<ArticleData>,
pub stream: Vec<ArticleData>,
}
// =============================================================================
// In-memory index-cache (dynamisk modus)
// =============================================================================
/// Cachet forside-HTML med utløpstid.
pub struct CachedIndex {
html: String,
expires_at: DateTime<Utc>,
/// Aktive A/B-variant edge-IDs som vises i denne cachede forsiden.
/// Brukes for impression-logging ved serve.
/// Tuple: (edge_id, article_id)
active_ab_variants: Vec<(Uuid, Uuid)>,
/// Map fra article_id til aktive variant edge_ids (for klikk-attribusjon).
ab_article_variants: HashMap<Uuid, Vec<Uuid>>,
}
/// Thread-safe cache for forside-rendering (dynamisk modus).
/// Nøkkel: collection UUID. Verdi: rendret HTML med TTL.
pub type IndexCache = Arc<RwLock<HashMap<Uuid, CachedIndex>>>;
/// Opprett en ny tom IndexCache.
pub fn new_index_cache() -> IndexCache {
Arc::new(RwLock::new(HashMap::new()))
}
/// Invalider cache for en gitt samling.
pub async fn invalidate_index_cache(cache: &IndexCache, collection_id: Uuid) {
let mut map = cache.write().await;
if map.remove(&collection_id).is_some() {
tracing::info!(collection_id = %collection_id, "Forside-cache invalidert");
}
}
// =============================================================================
// Render-funksjoner
// =============================================================================
/// Render en artikkel med gitt tema og SEO-metadata.
pub fn render_article(
tera: &Tera,
theme: &str,
config: &ThemeConfig,
article: &ArticleData,
collection_title: &str,
base_url: &str,
seo: &SeoData,
has_rss: bool,
) -> Result<String, tera::Error> {
let css_vars = build_css_variables(theme, config);
let template_name = format!("{theme}/article.html");
let mut ctx = Context::new();
ctx.insert("css_variables", &css_vars);
ctx.insert("theme", theme);
ctx.insert("article", article);
ctx.insert("collection_title", collection_title);
ctx.insert("base_url", base_url);
ctx.insert("logo_hash", &config.logo_hash);
ctx.insert("seo", seo);
ctx.insert("has_rss", &has_rss);
tera.render(&template_name, &ctx)
}
/// Render forsiden med gitt tema.
pub fn render_index(
tera: &Tera,
theme: &str,
config: &ThemeConfig,
index: &IndexData,
base_url: &str,
has_rss: bool,
) -> Result<String, tera::Error> {
let css_vars = build_css_variables(theme, config);
let template_name = format!("{theme}/index.html");
let mut ctx = Context::new();
ctx.insert("css_variables", &css_vars);
ctx.insert("theme", theme);
ctx.insert("index", index);
ctx.insert("base_url", base_url);
ctx.insert("logo_hash", &config.logo_hash);
ctx.insert("has_rss", &has_rss);
tera.render(&template_name, &ctx)
}
// =============================================================================
// CAS-rendering: render artikkel → lagre i CAS → oppdater node metadata
// =============================================================================
/// Render en artikkel til HTML, lagre i CAS, og oppdater nodens metadata.
///
/// Kalles fra jobbkø (`render_article`-jobb) når en `belongs_to`-edge
/// opprettes til en samling med `publishing`-trait.
///
/// Steg:
/// 1. Hent samlingens publishing-konfig (tema, slug, custom_domain)
/// 2. Hent artikkelens metadata.document (TipTap JSON)
/// 3. Konverter document → HTML via tiptap::document_to_html()
/// 4. Render full artikkelside med Tera-template + SEO
/// 5. Lagre HTML i CAS
/// 6. Oppdater nodens metadata.rendered (html_hash, rendered_at, renderer_version)
pub async fn render_article_to_cas(
db: &PgPool,
cas: &CasStore,
node_id: Uuid,
collection_id: Uuid,
) -> Result<serde_json::Value, String> {
// 1. Hent samlingens publishing-konfig
let collection_row: Option<(Option<String>, serde_json::Value)> = sqlx::query_as(
r#"
SELECT title, metadata
FROM nodes
WHERE id = $1 AND node_kind = 'collection'
"#,
)
.bind(collection_id)
.fetch_optional(db)
.await
.map_err(|e| format!("Feil ved henting av samling: {e}"))?;
let Some((collection_title_opt, collection_metadata)) = collection_row else {
return Err(format!("Samling {collection_id} finnes ikke"));
};
let coll_traits = collection_metadata.get("traits");
let publishing_config: PublishingConfig = coll_traits
.and_then(|t| t.get("publishing"))
.cloned()
.map(|v| serde_json::from_value(v).unwrap_or_default())
.unwrap_or_default();
let has_rss = coll_traits.and_then(|t| t.get("rss")).is_some();
let slug = publishing_config.slug.as_deref().unwrap_or("unknown");
let theme = publishing_config.theme.as_deref().unwrap_or("blogg");
let config = &publishing_config.theme_config;
let collection_title = collection_title_opt.unwrap_or_else(|| slug.to_string());
// 2. Hent artikkelens data + metadata.document + edge-metadata
let article_row: Option<(Uuid, Option<String>, Option<String>, serde_json::Value, DateTime<Utc>)> = sqlx::query_as(
r#"
SELECT n.id, n.title, n.content, n.metadata, n.created_at
FROM nodes n
WHERE n.id = $1
"#,
)
.bind(node_id)
.fetch_optional(db)
.await
.map_err(|e| format!("Feil ved henting av artikkel: {e}"))?;
let Some((id, title, content, metadata, created_at)) = article_row else {
return Err(format!("Artikkel {node_id} finnes ikke"));
};
// Hent publish_at fra edge-metadata
let edge_meta: Option<(Option<serde_json::Value>,)> = sqlx::query_as(
r#"
SELECT metadata
FROM edges
WHERE source_id = $1 AND target_id = $2 AND edge_type = 'belongs_to'
LIMIT 1
"#,
)
.bind(node_id)
.bind(collection_id)
.fetch_optional(db)
.await
.map_err(|e| format!("Feil ved henting av edge: {e}"))?;
let publish_at = edge_meta
.as_ref()
.and_then(|(m,)| m.as_ref())
.and_then(|m| m.get("publish_at"))
.and_then(|v| v.as_str())
.and_then(|s| s.parse::<DateTime<Utc>>().ok())
.unwrap_or(created_at);
// 3. Konverter metadata.document til HTML (eller bruk content som fallback)
let article_html = if let Some(doc) = metadata.get("document") {
let html = tiptap::document_to_html(doc);
if html.is_empty() {
// Fallback til content-feltet
content.unwrap_or_default()
} else {
html
}
} else {
// Ingen document — bruk content direkte
content.unwrap_or_default()
};
let short_id = id.to_string()[..8].to_string();
// 3b. Hent presentasjonselementer (title/subtitle/summary/og_image/og_description)
let pres = fetch_presentation_elements(db, node_id).await
.map_err(|e| format!("Feil ved henting av presentasjonselementer: {e}"))?;
// Bruk presentasjonselement-tittel hvis tilgjengelig, ellers artikkelens interne tittel
let article_title = pres.best_title()
.unwrap_or_else(|| title.unwrap_or_else(|| "Uten tittel".to_string()));
// Bruk presentasjonselement-ingress hvis tilgjengelig, ellers auto-generert
let summary_text = pres.best_summary()
.unwrap_or_else(|| truncate(&article_html.replace("<p>", "").replace("</p>", " ").replace('\n', " "), 200));
let article_data = ArticleData {
id: id.to_string(),
short_id: short_id.clone(),
title: article_title,
subtitle: pres.best_subtitle(),
content: article_html,
summary: Some(summary_text),
og_image: pres.best_og_image(),
published_at: publish_at.to_rfc3339(),
published_at_short: publish_at.format("%e. %B %Y").to_string(),
};
// 4. Bygg SEO-data og render med Tera
let base_url = publishing_config
.custom_domain
.as_deref()
.map(|d| format!("https://{d}"))
.unwrap_or_else(|| format!("/pub/{slug}"));
let canonical_url = format!("{base_url}/{short_id}");
let seo = build_seo_data(&article_data, &collection_title, &canonical_url);
let tera = build_tera();
let html = render_article(&tera, theme, config, &article_data, &collection_title, &base_url, &seo, has_rss)
.map_err(|e| format!("Tera render-feil: {e}"))?;
// 5. Lagre i CAS
let store_result = cas
.store(html.as_bytes())
.await
.map_err(|e| format!("CAS-lagring feilet: {e}"))?;
tracing::info!(
node_id = %node_id,
hash = %store_result.hash,
size = store_result.size,
deduplicated = store_result.already_existed,
"Artikkel rendret og lagret i CAS"
);
// 6. Oppdater nodens metadata.rendered
let now = Utc::now();
sqlx::query(
r#"
UPDATE nodes
SET metadata = jsonb_set(
jsonb_set(
jsonb_set(
CASE WHEN metadata ? 'rendered'
THEN metadata
ELSE jsonb_set(metadata, '{rendered}', '{}'::jsonb)
END,
'{rendered,html_hash}',
to_jsonb($2::text)
),
'{rendered,rendered_at}',
to_jsonb($3::text)
),
'{rendered,renderer_version}',
to_jsonb($4::bigint)
)
WHERE id = $1
"#,
)
.bind(node_id)
.bind(&store_result.hash)
.bind(now.to_rfc3339())
.bind(RENDERER_VERSION)
.execute(db)
.await
.map_err(|e| format!("Feil ved oppdatering av metadata.rendered: {e}"))?;
tracing::info!(
node_id = %node_id,
html_hash = %store_result.hash,
renderer_version = RENDERER_VERSION,
"metadata.rendered oppdatert"
);
Ok(serde_json::json!({
"html_hash": store_result.hash,
"size": store_result.size,
"renderer_version": RENDERER_VERSION
}))
}
/// Render forsiden til CAS (statisk modus).
///
/// Henter hero/featured/stream med tre indekserte spørringer,
/// rendrer via Tera-template, lagrer HTML i CAS, og oppdaterer
/// samlingens metadata.rendered_index med index_hash.
pub async fn render_index_to_cas(
db: &PgPool,
cas: &CasStore,
collection_id: Uuid,
) -> Result<serde_json::Value, String> {
// Hent samlingens konfig
let collection_row: Option<(Option<String>, serde_json::Value)> = sqlx::query_as(
r#"
SELECT title, metadata
FROM nodes
WHERE id = $1 AND node_kind = 'collection'
"#,
)
.bind(collection_id)
.fetch_optional(db)
.await
.map_err(|e| format!("Feil ved henting av samling: {e}"))?;
let Some((collection_title_opt, collection_metadata)) = collection_row else {
return Err(format!("Samling {collection_id} finnes ikke"));
};
let idx_traits = collection_metadata.get("traits");
let publishing_config: PublishingConfig = idx_traits
.and_then(|t| t.get("publishing"))
.cloned()
.map(|v| serde_json::from_value(v).unwrap_or_default())
.unwrap_or_default();
let has_rss = idx_traits.and_then(|t| t.get("rss")).is_some();
let slug = publishing_config.slug.as_deref().unwrap_or("unknown");
let theme = publishing_config.theme.as_deref().unwrap_or("blogg");
let config = &publishing_config.theme_config;
let collection_title = collection_title_opt.unwrap_or_else(|| slug.to_string());
let featured_max = publishing_config.featured_max.unwrap_or(4);
let stream_page_size = publishing_config.stream_page_size.unwrap_or(20);
let base_url = publishing_config
.custom_domain
.as_deref()
.map(|d| format!("https://{d}"))
.unwrap_or_else(|| format!("/pub/{slug}"));
// Hent artikler med tre indekserte spørringer (A/B-rotasjon inkludert)
let (hero, featured, stream, _ab_variants) =
fetch_index_articles_optimized(db, collection_id, featured_max, stream_page_size).await
.map_err(|e| format!("Feil ved henting av forsideartikler: {e}"))?;
let index_data = IndexData {
title: collection_title,
description: None,
hero,
featured,
stream,
};
// Render med Tera
let tera = build_tera();
let html = render_index(&tera, theme, config, &index_data, &base_url, has_rss)
.map_err(|e| format!("Tera render-feil (index): {e}"))?;
// Lagre i CAS
let store_result = cas
.store(html.as_bytes())
.await
.map_err(|e| format!("CAS-lagring feilet: {e}"))?;
tracing::info!(
collection_id = %collection_id,
hash = %store_result.hash,
size = store_result.size,
deduplicated = store_result.already_existed,
"Forside rendret og lagret i CAS"
);
// Oppdater samlingens metadata.rendered_index
let now = Utc::now();
sqlx::query(
r#"
UPDATE nodes
SET metadata = jsonb_set(
jsonb_set(
jsonb_set(
CASE WHEN metadata ? 'rendered_index'
THEN metadata
ELSE jsonb_set(metadata, '{rendered_index}', '{}'::jsonb)
END,
'{rendered_index,index_hash}',
to_jsonb($2::text)
),
'{rendered_index,rendered_at}',
to_jsonb($3::text)
),
'{rendered_index,renderer_version}',
to_jsonb($4::bigint)
)
WHERE id = $1
"#,
)
.bind(collection_id)
.bind(&store_result.hash)
.bind(now.to_rfc3339())
.bind(RENDERER_VERSION)
.execute(db)
.await
.map_err(|e| format!("Feil ved oppdatering av metadata.rendered_index: {e}"))?;
tracing::info!(
collection_id = %collection_id,
index_hash = %store_result.hash,
"metadata.rendered_index oppdatert"
);
Ok(serde_json::json!({
"index_hash": store_result.hash,
"size": store_result.size,
"renderer_version": RENDERER_VERSION
}))
}
// =============================================================================
// Database-spørringer
// =============================================================================
struct CollectionRow {
id: Uuid,
title: Option<String>,
publishing_config: PublishingConfig,
has_rss: bool,
}
/// Finn samling med publishing-trait basert på slug.
async fn find_publishing_collection(
db: &PgPool,
slug: &str,
) -> Result<Option<CollectionRow>, sqlx::Error> {
let row: Option<(Uuid, Option<String>, serde_json::Value)> = sqlx::query_as(
r#"
SELECT id, title, metadata
FROM nodes
WHERE node_kind = 'collection'
AND metadata->'traits'->'publishing'->>'slug' = $1
LIMIT 1
"#,
)
.bind(slug)
.fetch_optional(db)
.await?;
let Some((id, title, metadata)) = row else {
return Ok(None);
};
let traits = metadata.get("traits");
let publishing_config: PublishingConfig = traits
.and_then(|t| t.get("publishing"))
.cloned()
.map(|v| serde_json::from_value(v).unwrap_or_default())
.unwrap_or_default();
let has_rss = traits
.and_then(|t| t.get("rss"))
.is_some();
Ok(Some(CollectionRow {
id,
title,
publishing_config,
has_rss,
}))
}
/// Finn publishing-samling basert på samlings-ID.
/// Returnerer None hvis samlingen ikke har publishing-trait.
pub async fn find_publishing_collection_by_id(
db: &PgPool,
collection_id: Uuid,
) -> Result<Option<PublishingConfig>, sqlx::Error> {
let row: Option<(serde_json::Value,)> = sqlx::query_as(
r#"
SELECT metadata
FROM nodes
WHERE id = $1
AND node_kind = 'collection'
AND metadata->'traits' ? 'publishing'
"#,
)
.bind(collection_id)
.fetch_optional(db)
.await?;
let Some((metadata,)) = row else {
return Ok(None);
};
let config: PublishingConfig = metadata
.get("traits")
.and_then(|t| t.get("publishing"))
.cloned()
.map(|v| serde_json::from_value(v).unwrap_or_default())
.unwrap_or_default();
Ok(Some(config))
}
/// Hent artikkeldata for en enkelt node (belongs_to samlingen).
/// Returnerer også nodens metadata (for å sjekke rendered.html_hash).
async fn fetch_article(
db: &PgPool,
collection_id: Uuid,
article_short_id: &str,
) -> Result<Option<FetchedArticle>, sqlx::Error> {
// short_id er de første 8 tegnene av UUID
let pattern = format!("{article_short_id}%");
let row: Option<(Uuid, Option<String>, Option<String>, serde_json::Value, DateTime<Utc>, Option<serde_json::Value>)> = sqlx::query_as(
r#"
SELECT n.id, n.title, n.content, n.metadata, n.created_at, e.metadata
FROM edges e
JOIN nodes n ON n.id = e.source_id
WHERE e.target_id = $1
AND e.edge_type = 'belongs_to'
AND n.id::text LIKE $2
LIMIT 1
"#,
)
.bind(collection_id)
.bind(&pattern)
.fetch_optional(db)
.await?;
let Some((id, title, content, node_metadata, created_at, edge_meta)) = row else {
return Ok(None);
};
let publish_at = edge_meta
.as_ref()
.and_then(|m| m.get("publish_at"))
.and_then(|v| v.as_str())
.and_then(|s| s.parse::<DateTime<Utc>>().ok())
.unwrap_or(created_at);
// Sjekk om det finnes rendret HTML i CAS
let html_hash = node_metadata
.get("rendered")
.and_then(|r| r.get("html_hash"))
.and_then(|h| h.as_str())
.map(|s| s.to_string());
// Konverter metadata.document til HTML, eller bruk content som fallback
let article_html = if let Some(doc) = node_metadata.get("document") {
let html = tiptap::document_to_html(doc);
if html.is_empty() {
content.unwrap_or_default()
} else {
html
}
} else {
content.unwrap_or_default()
};
// Hent presentasjonselementer
let pres = fetch_presentation_elements(db, id).await?;
let article_title = pres.best_title()
.unwrap_or_else(|| title.unwrap_or_else(|| "Uten tittel".to_string()));
let summary_text = pres.best_summary()
.unwrap_or_else(|| truncate(
&article_html.replace("<p>", "").replace("</p>", " ").replace('\n', " "),
200,
));
let article = ArticleData {
id: id.to_string(),
short_id: id.to_string()[..8].to_string(),
title: article_title,
subtitle: pres.best_subtitle(),
content: article_html,
summary: Some(summary_text),
og_image: pres.best_og_image(),
published_at: publish_at.to_rfc3339(),
published_at_short: publish_at.format("%e. %B %Y").to_string(),
};
Ok(Some(FetchedArticle {
article,
html_hash,
edge_meta,
}))
}
struct FetchedArticle {
article: ArticleData,
html_hash: Option<String>,
#[allow(dead_code)]
edge_meta: Option<serde_json::Value>,
}
/// Hent forsideartikler med tre separate, indekserte spørringer.
///
/// Hver spørring filtrerer på slot i edge.metadata og bruker
/// GIN-indeks på edges.metadata. Mer effektivt enn å hente alt
/// og filtrere i Rust, spesielt med mange artikler.
async fn fetch_index_articles_optimized(
db: &PgPool,
collection_id: Uuid,
featured_max: i64,
stream_page_size: i64,
) -> Result<(Option<ArticleData>, Vec<ArticleData>, Vec<ArticleData>, Vec<(Uuid, Uuid)>), sqlx::Error> {
// Hjelpefunksjon for å konvertere rader til ArticleData
fn row_to_article(
id: Uuid,
title: Option<String>,
content: Option<String>,
created_at: DateTime<Utc>,
edge_meta: Option<serde_json::Value>,
) -> ArticleData {
let publish_at = edge_meta
.as_ref()
.and_then(|m| m.get("publish_at"))
.and_then(|v| v.as_str())
.and_then(|s| s.parse::<DateTime<Utc>>().ok())
.unwrap_or(created_at);
let summary = content.as_deref().map(|c| truncate(c, 200));
ArticleData {
id: id.to_string(),
short_id: id.to_string()[..8].to_string(),
title: title.unwrap_or_else(|| "Uten tittel".to_string()),
subtitle: None,
content: content.unwrap_or_default(),
summary,
og_image: None,
published_at: publish_at.to_rfc3339(),
published_at_short: publish_at.format("%e. %B %Y").to_string(),
}
}
type Row = (Uuid, Option<String>, Option<String>, DateTime<Utc>, Option<serde_json::Value>);
// 1. Hero: slot = "hero", maks 1
let hero_row: Option<Row> = sqlx::query_as(
r#"
SELECT n.id, n.title, n.content, n.created_at, e.metadata
FROM edges e
JOIN nodes n ON n.id = e.source_id
WHERE e.target_id = $1
AND e.edge_type = 'belongs_to'
AND e.metadata->>'slot' = 'hero'
LIMIT 1
"#,
)
.bind(collection_id)
.fetch_optional(db)
.await?;
let hero = hero_row.map(|(id, title, content, created_at, edge_meta)| {
row_to_article(id, title, content, created_at, edge_meta)
});
// 2. Featured: slot = "featured", sortert på slot_order
let featured_rows: Vec<Row> = sqlx::query_as(
r#"
SELECT n.id, n.title, n.content, n.created_at, e.metadata
FROM edges e
JOIN nodes n ON n.id = e.source_id
WHERE e.target_id = $1
AND e.edge_type = 'belongs_to'
AND e.metadata->>'slot' = 'featured'
ORDER BY (e.metadata->>'slot_order')::int ASC NULLS LAST
LIMIT $2
"#,
)
.bind(collection_id)
.bind(featured_max)
.fetch_all(db)
.await?;
let featured: Vec<ArticleData> = featured_rows
.into_iter()
.map(|(id, title, content, created_at, edge_meta)| {
row_to_article(id, title, content, created_at, edge_meta)
})
.collect();
// 3. Strøm: slot IS NULL (eller mangler), sortert på publish_at
let stream_rows: Vec<Row> = sqlx::query_as(
r#"
SELECT n.id, n.title, n.content, n.created_at, e.metadata
FROM edges e
JOIN nodes n ON n.id = e.source_id
WHERE e.target_id = $1
AND e.edge_type = 'belongs_to'
AND (e.metadata->>'slot' IS NULL OR e.metadata->>'slot' = '')
ORDER BY COALESCE(
(e.metadata->>'publish_at')::timestamptz,
n.created_at
) DESC
LIMIT $2
"#,
)
.bind(collection_id)
.bind(stream_page_size)
.fetch_all(db)
.await?;
let stream: Vec<ArticleData> = stream_rows
.into_iter()
.map(|(id, title, content, created_at, edge_meta)| {
row_to_article(id, title, content, created_at, edge_meta)
})
.collect();
// Berik med presentasjonselementer (batch: hent alle i én spørring)
let mut all_ids: Vec<Uuid> = vec![];
if let Some(ref h) = hero {
if let Ok(uid) = h.id.parse::<Uuid>() { all_ids.push(uid); }
}
for a in &featured { if let Ok(uid) = a.id.parse::<Uuid>() { all_ids.push(uid); } }
for a in &stream { if let Ok(uid) = a.id.parse::<Uuid>() { all_ids.push(uid); } }
let pres_map = fetch_presentation_elements_batch(db, &all_ids).await
.unwrap_or_default();
// Samle aktive A/B-variant edge-IDs for impression-logging
let mut active_ab_variants: Vec<(Uuid, Uuid)> = vec![];
/// Berik artikkel med presentasjonselementer via A/B-rotasjon.
/// Returnerer edge-IDs for varianter som er del av aktive A/B-tester.
fn enrich_with_ab(article: &mut ArticleData, pres: &PresentationElements) -> Vec<(Uuid, Uuid)> {
let mut variants = vec![];
let article_id = article.id.parse::<Uuid>().unwrap_or_default();
// Tittel: AB-rotasjon
if let Some((text, ab_edge)) = pres.ab_title() {
article.title = text;
if let Some(eid) = ab_edge { variants.push((eid, article_id)); }
}
// Subtitle: bruk best (sjelden A/B-testet)
if let Some(s) = pres.best_subtitle() { article.subtitle = Some(s); }
// Summary: AB-rotasjon
if let Some((text, ab_edge)) = pres.ab_summary() {
article.summary = Some(text);
if let Some(eid) = ab_edge { variants.push((eid, article_id)); }
}
// OG-image: AB-rotasjon
if let Some((hash, ab_edge)) = pres.ab_og_image() {
article.og_image = Some(hash);
if let Some(eid) = ab_edge { variants.push((eid, article_id)); }
}
variants
}
let mut hero = hero;
if let Some(ref mut h) = hero {
if let Ok(uid) = h.id.parse::<Uuid>() {
if let Some(pres) = pres_map.get(&uid) {
active_ab_variants.extend(enrich_with_ab(h, pres));
}
}
}
let mut featured = featured;
for a in &mut featured {
if let Ok(uid) = a.id.parse::<Uuid>() {
if let Some(pres) = pres_map.get(&uid) {
active_ab_variants.extend(enrich_with_ab(a, pres));
}
}
}
let mut stream = stream;
for a in &mut stream {
if let Ok(uid) = a.id.parse::<Uuid>() {
if let Some(pres) = pres_map.get(&uid) {
active_ab_variants.extend(enrich_with_ab(a, pres));
}
}
}
Ok((hero, featured, stream, active_ab_variants))
}
// =============================================================================
// Presentasjonselementer: hent title/subtitle/summary/og_image fra edge-koblede noder
// =============================================================================
/// Presentasjonselementer for en artikkel.
/// Inneholder lister per type — flere varianter støttes for A/B-testing.
struct PresentationElements {
titles: Vec<PresEl>,
subtitles: Vec<PresEl>,
summaries: Vec<PresEl>,
og_images: Vec<PresEl>,
og_descriptions: Vec<PresEl>,
}
struct PresEl {
edge_id: Uuid,
title: Option<String>,
content: Option<String>,
#[allow(dead_code)]
node_kind: String,
metadata: serde_json::Value,
edge_metadata: serde_json::Value,
}
impl PresEl {
fn ab_status(&self) -> &str {
self.edge_metadata
.get("ab_status")
.and_then(|v| v.as_str())
.unwrap_or("")
}
}
impl PresentationElements {
/// Velg beste variant: "winner" > "testing" > første tilgjengelige.
/// Brukes for enkeltartikkel-rendering der vi alltid viser "best".
fn best_of(elements: &[PresEl]) -> Option<&PresEl> {
// Prioriter "winner"
if let Some(el) = elements.iter().find(|e| e.ab_status() == "winner") {
return Some(el);
}
// Så "testing" eller de uten ab_status (enkeltvariant)
elements.iter().find(|e| e.ab_status() != "retired")
}
/// Velg variant for forside-visning med A/B-rotasjon.
/// Returnerer (valgt element, edge_id) — edge_id brukes for impression-logging.
/// Hvis det finnes en "winner", returneres alltid den.
/// Hvis det finnes flere "testing"-varianter, velges tilfeldig.
fn ab_select(elements: &[PresEl]) -> Option<&PresEl> {
// Prioriter "winner"
if let Some(el) = elements.iter().find(|e| e.ab_status() == "winner") {
return Some(el);
}
// Filtrer til testing-kandidater (alt som ikke er retired)
let candidates: Vec<&PresEl> = elements.iter()
.filter(|e| e.ab_status() != "retired")
.collect();
if candidates.is_empty() {
return None;
}
if candidates.len() == 1 {
return Some(candidates[0]);
}
// Tilfeldig rotasjon mellom testing-varianter
let mut rng = rand::thread_rng();
candidates.choose(&mut rng).copied()
}
/// Sjekk om det finnes aktive A/B-tester (>1 ikke-retired variant).
fn has_active_ab_test(elements: &[PresEl]) -> bool {
elements.iter().filter(|e| e.ab_status() != "retired").count() > 1
}
fn best_title(&self) -> Option<String> {
Self::best_of(&self.titles)
.and_then(|el| el.title.clone().or(el.content.clone()))
}
fn best_subtitle(&self) -> Option<String> {
Self::best_of(&self.subtitles)
.and_then(|el| el.title.clone().or(el.content.clone()))
}
fn best_summary(&self) -> Option<String> {
Self::best_of(&self.summaries)
.and_then(|el| el.content.clone().or(el.title.clone()))
}
fn best_og_image(&self) -> Option<String> {
Self::best_of(&self.og_images)
.and_then(|el| el.metadata.get("cas_hash").and_then(|h| h.as_str()).map(|s| s.to_string()))
}
#[allow(dead_code)]
fn best_og_description(&self) -> Option<String> {
Self::best_of(&self.og_descriptions)
.and_then(|el| el.content.clone().or(el.title.clone()))
}
/// AB-seleksjon for forside: velg tittel med rotasjon.
fn ab_title(&self) -> Option<(String, Option<Uuid>)> {
Self::ab_select(&self.titles)
.and_then(|el| {
let text = el.title.clone().or(el.content.clone())?;
let eid = if Self::has_active_ab_test(&self.titles) { Some(el.edge_id) } else { None };
Some((text, eid))
})
}
/// AB-seleksjon for forside: velg summary med rotasjon.
fn ab_summary(&self) -> Option<(String, Option<Uuid>)> {
Self::ab_select(&self.summaries)
.and_then(|el| {
let text = el.content.clone().or(el.title.clone())?;
let eid = if Self::has_active_ab_test(&self.summaries) { Some(el.edge_id) } else { None };
Some((text, eid))
})
}
/// AB-seleksjon for forside: velg OG-image med rotasjon.
fn ab_og_image(&self) -> Option<(String, Option<Uuid>)> {
Self::ab_select(&self.og_images)
.and_then(|el| {
let hash = el.metadata.get("cas_hash").and_then(|h| h.as_str()).map(|s| s.to_string())?;
let eid = if Self::has_active_ab_test(&self.og_images) { Some(el.edge_id) } else { None };
Some((hash, eid))
})
}
}
/// Hent alle presentasjonselementer for en artikkel.
/// Noder koblet via title/subtitle/summary/og_image/og_description-edges
/// der source_id = elementnode, target_id = artikkel.
async fn fetch_presentation_elements(
db: &PgPool,
article_id: Uuid,
) -> Result<PresentationElements, sqlx::Error> {
let rows: Vec<(Uuid, String, Option<String>, Option<String>, String, serde_json::Value, serde_json::Value)> = sqlx::query_as(
r#"
SELECT e.id AS edge_id, e.edge_type, n.title, n.content, n.node_kind, n.metadata, e.metadata AS edge_metadata
FROM edges e
JOIN nodes n ON n.id = e.source_id
WHERE e.target_id = $1
AND e.edge_type IN ('title', 'subtitle', 'summary', 'og_image', 'og_description')
ORDER BY e.created_at
"#,
)
.bind(article_id)
.fetch_all(db)
.await?;
let mut titles = vec![];
let mut subtitles = vec![];
let mut summaries = vec![];
let mut og_images = vec![];
let mut og_descriptions = vec![];
for (edge_id, edge_type, title, content, node_kind, metadata, edge_metadata) in rows {
let el = PresEl { edge_id, title, content, node_kind, metadata, edge_metadata };
match edge_type.as_str() {
"title" => titles.push(el),
"subtitle" => subtitles.push(el),
"summary" => summaries.push(el),
"og_image" => og_images.push(el),
"og_description" => og_descriptions.push(el),
_ => {}
}
}
Ok(PresentationElements { titles, subtitles, summaries, og_images, og_descriptions })
}
/// Hent presentasjonselementer for flere artikler i én spørring.
/// Returnerer et HashMap fra artikkel-ID til PresentationElements.
async fn fetch_presentation_elements_batch(
db: &PgPool,
article_ids: &[Uuid],
) -> Result<HashMap<Uuid, PresentationElements>, sqlx::Error> {
if article_ids.is_empty() {
return Ok(HashMap::new());
}
let rows: Vec<(Uuid, Uuid, String, Option<String>, Option<String>, String, serde_json::Value, serde_json::Value)> = sqlx::query_as(
r#"
SELECT e.target_id AS article_id, e.id AS edge_id, e.edge_type, n.title, n.content, n.node_kind, n.metadata, e.metadata AS edge_metadata
FROM edges e
JOIN nodes n ON n.id = e.source_id
WHERE e.target_id = ANY($1)
AND e.edge_type IN ('title', 'subtitle', 'summary', 'og_image', 'og_description')
ORDER BY e.created_at
"#,
)
.bind(article_ids)
.fetch_all(db)
.await?;
let mut map: HashMap<Uuid, PresentationElements> = HashMap::new();
for (article_id, edge_id, edge_type, title, content, node_kind, metadata, edge_metadata) in rows {
let el = PresEl { edge_id, title, content, node_kind, metadata, edge_metadata };
let pres = map.entry(article_id).or_insert_with(|| PresentationElements {
titles: vec![], subtitles: vec![], summaries: vec![],
og_images: vec![], og_descriptions: vec![],
});
match edge_type.as_str() {
"title" => pres.titles.push(el),
"subtitle" => pres.subtitles.push(el),
"summary" => pres.summaries.push(el),
"og_image" => pres.og_images.push(el),
"og_description" => pres.og_descriptions.push(el),
_ => {}
}
}
Ok(map)
}
fn truncate(s: &str, max: usize) -> String {
if s.len() <= max {
return s.to_string();
}
match s[..max].rfind(' ') {
Some(pos) => format!("{}", &s[..pos]),
None => format!("{}", &s[..max]),
}
}
// =============================================================================
// HTTP-handlers
// =============================================================================
/// GET /pub/{slug} — forside for en publikasjon.
///
/// Støtter to moduser styrt av `index_mode` i trait-konfig:
/// - **static**: Serverer pre-rendret HTML fra CAS (immutable cache).
/// Forsiden rendres til CAS via `render_index`-jobb ved publisering.
/// - **dynamic** (default): Rendrer on-the-fly med in-memory cache.
/// TTL styres av `index_cache_ttl` (default 300s).
pub async fn serve_index(
State(state): State<AppState>,
Path(slug): Path<String>,
) -> Result<Response, StatusCode> {
let collection = find_publishing_collection(&state.db, &slug)
.await
.map_err(|e| {
tracing::error!(slug = %slug, error = %e, "Feil ved oppslag av samling");
StatusCode::INTERNAL_SERVER_ERROR
})?
.ok_or(StatusCode::NOT_FOUND)?;
let index_mode = collection.publishing_config.index_mode.as_deref().unwrap_or("dynamic");
let cache_ttl = collection.publishing_config.index_cache_ttl.unwrap_or(300);
// --- Statisk modus: server fra CAS ---
if index_mode == "static" {
// Sjekk metadata.rendered_index.index_hash
let hash_row: Option<(serde_json::Value,)> = sqlx::query_as(
"SELECT metadata FROM nodes WHERE id = $1",
)
.bind(collection.id)
.fetch_optional(&state.db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
if let Some((metadata,)) = hash_row {
if let Some(index_hash) = metadata
.get("rendered_index")
.and_then(|r| r.get("index_hash"))
.and_then(|h| h.as_str())
{
let cas_path = state.cas.path_for(index_hash);
if cas_path.exists() {
let html_bytes = tokio::fs::read(&cas_path).await.map_err(|e| {
tracing::error!(hash = %index_hash, error = %e, "Kunne ikke lese CAS-fil for index");
StatusCode::INTERNAL_SERVER_ERROR
})?;
return Ok(Response::builder()
.header(header::CONTENT_TYPE, "text/html; charset=utf-8")
.header(
header::CACHE_CONTROL,
"public, max-age=31536000, immutable",
)
.body(html_bytes.into())
.unwrap());
}
}
}
// Fallthrough: ingen CAS-versjon — render on-the-fly som fallback
tracing::warn!(slug = %slug, "Statisk index mangler i CAS, faller tilbake til dynamisk rendering");
}
// --- Dynamisk modus: in-memory cache med TTL ---
{
let cache = state.index_cache.read().await;
if let Some(cached) = cache.get(&collection.id) {
if cached.expires_at > Utc::now() {
// Log A/B-impressions asynkront ved cache-hit
if !cached.active_ab_variants.is_empty() {
let db_clone = state.db.clone();
let cid = collection.id;
let variants = cached.active_ab_variants.clone();
tokio::spawn(async move {
log_ab_impressions(&db_clone, cid, &variants).await;
});
}
let max_age = (cached.expires_at - Utc::now()).num_seconds().max(0);
return Ok(Response::builder()
.header(header::CONTENT_TYPE, "text/html; charset=utf-8")
.header(
header::CACHE_CONTROL,
format!("public, max-age={max_age}"),
)
.body(cached.html.clone().into())
.unwrap());
}
}
}
// Cache miss eller utløpt — render og cache
let theme = collection.publishing_config.theme.as_deref().unwrap_or("blogg");
let config = collection.publishing_config.theme_config.clone();
let featured_max = collection.publishing_config.featured_max.unwrap_or(4);
let stream_page_size = collection.publishing_config.stream_page_size.unwrap_or(20);
let (hero, featured, stream, ab_variants) = fetch_index_articles_optimized(
&state.db,
collection.id,
featured_max,
stream_page_size,
)
.await
.map_err(|e| {
tracing::error!(slug = %slug, error = %e, "Feil ved henting av forsideartikler");
StatusCode::INTERNAL_SERVER_ERROR
})?;
let collection_title = collection.title.unwrap_or_else(|| slug.clone());
let base_url = collection
.publishing_config
.custom_domain
.as_deref()
.map(|d| format!("https://{d}"))
.unwrap_or_else(|| format!("/pub/{slug}"));
let index_data = IndexData {
title: collection_title,
description: None,
hero,
featured,
stream,
};
let tera = build_tera();
let html = render_index(&tera, theme, &config, &index_data, &base_url, collection.has_rss).map_err(|e| {
tracing::error!(slug = %slug, theme = %theme, error = %e, "Tera render-feil (index)");
StatusCode::INTERNAL_SERVER_ERROR
})?;
// Log A/B-impressions asynkront (fire-and-forget)
if !ab_variants.is_empty() {
let db_clone = state.db.clone();
let cid = collection.id;
let variants = ab_variants.clone();
tokio::spawn(async move {
log_ab_impressions(&db_clone, cid, &variants).await;
});
}
// Bygg article_id → variant edge_ids map for klikk-attribusjon
let mut ab_article_variants: HashMap<Uuid, Vec<Uuid>> = HashMap::new();
for &(edge_id, article_id) in &ab_variants {
ab_article_variants.entry(article_id).or_default().push(edge_id);
}
// Legg i cache med aktive A/B-varianter
let expires_at = Utc::now() + chrono::Duration::seconds(cache_ttl as i64);
{
let mut cache = state.index_cache.write().await;
cache.insert(collection.id, CachedIndex {
html: html.clone(),
expires_at,
active_ab_variants: ab_variants,
ab_article_variants,
});
}
Ok(Response::builder()
.header(header::CONTENT_TYPE, "text/html; charset=utf-8")
.header(
header::CACHE_CONTROL,
format!("public, max-age={cache_ttl}"),
)
.body(html.into())
.unwrap())
}
/// GET /pub/{slug}/{article_id} — enkeltartikkel.
///
/// Serverer fra CAS hvis artikkelen er pre-rendret (metadata.rendered.html_hash).
/// Faller tilbake til on-the-fly rendering hvis ikke.
pub async fn serve_article(
State(state): State<AppState>,
Path((slug, article_id)): Path<(String, String)>,
) -> Result<Response, StatusCode> {
let collection = find_publishing_collection(&state.db, &slug)
.await
.map_err(|e| {
tracing::error!(slug = %slug, error = %e, "Feil ved oppslag av samling");
StatusCode::INTERNAL_SERVER_ERROR
})?
.ok_or(StatusCode::NOT_FOUND)?;
let theme = collection.publishing_config.theme.as_deref().unwrap_or("blogg");
let config = &collection.publishing_config.theme_config;
let fetched = fetch_article(&state.db, collection.id, &article_id)
.await
.map_err(|e| {
tracing::error!(slug = %slug, article = %article_id, error = %e, "Feil ved henting av artikkel");
StatusCode::INTERNAL_SERVER_ERROR
})?
.ok_or(StatusCode::NOT_FOUND)?;
// A/B klikk-attribusjon: sjekk om denne artikkelen har aktive varianter
// i den cachede forsiden, og logg klikk for dem.
if let Ok(uid) = fetched.article.id.parse::<Uuid>() {
let cache = state.index_cache.read().await;
if let Some(cached) = cache.get(&collection.id) {
if let Some(variant_edges) = cached.ab_article_variants.get(&uid) {
let db = state.db.clone();
let cid = collection.id;
let edges = variant_edges.clone();
let aid = uid;
drop(cache); // Release read lock before spawning
tokio::spawn(async move {
for edge_id in edges {
log_ab_click(&db, edge_id, aid, cid).await;
}
});
}
}
}
// Sjekk om pre-rendret HTML finnes i CAS
if let Some(ref hash) = fetched.html_hash {
let cas_path = state.cas.path_for(hash);
if cas_path.exists() {
let html_bytes = tokio::fs::read(&cas_path).await.map_err(|e| {
tracing::error!(hash = %hash, error = %e, "Kunne ikke lese CAS-fil");
StatusCode::INTERNAL_SERVER_ERROR
})?;
return Ok(Response::builder()
.header(header::CONTENT_TYPE, "text/html; charset=utf-8")
.header(
header::CACHE_CONTROL,
"public, max-age=31536000, immutable",
)
.body(html_bytes.into())
.unwrap());
}
}
// Fallback: render on-the-fly
let collection_title = collection.title.unwrap_or_else(|| slug.clone());
let base_url = collection
.publishing_config
.custom_domain
.as_deref()
.map(|d| format!("https://{d}"))
.unwrap_or_else(|| format!("/pub/{slug}"));
let canonical_url = format!("{base_url}/{}", fetched.article.short_id);
let seo = build_seo_data(&fetched.article, &collection_title, &canonical_url);
let tera = build_tera();
let html = render_article(&tera, theme, config, &fetched.article, &collection_title, &base_url, &seo, collection.has_rss)
.map_err(|e| {
tracing::error!(slug = %slug, article = %article_id, theme = %theme, error = %e, "Tera render-feil (artikkel)");
StatusCode::INTERNAL_SERVER_ERROR
})?;
Ok(Response::builder()
.header(header::CONTENT_TYPE, "text/html; charset=utf-8")
.header(header::CACHE_CONTROL, "public, max-age=300")
.body(html.into())
.unwrap())
}
/// GET /pub/{slug}/preview/{theme} — forhåndsvisning av tema med testdata.
/// Nyttig for å se hvordan et tema ser ut uten reelle data.
pub async fn preview_theme(
Path((slug, theme)): Path<(String, String)>,
) -> Result<Response, StatusCode> {
let valid_themes = ["avis", "magasin", "blogg", "tidsskrift"];
if !valid_themes.contains(&theme.as_str()) {
return Err(StatusCode::NOT_FOUND);
}
let config = ThemeConfig::default();
let sample_articles: Vec<ArticleData> = (1..=6)
.map(|i| ArticleData {
id: format!("00000000-0000-0000-0000-00000000000{i}"),
short_id: format!("0000000{i}"),
title: format!("Eksempelartikkel {i}"),
subtitle: None,
content: "Lorem ipsum dolor sit amet, consectetur adipiscing elit. \
Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. \
Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris."
.to_string(),
summary: Some("Lorem ipsum dolor sit amet, consectetur adipiscing elit.".to_string()),
og_image: None,
published_at: "2026-03-18T12:00:00Z".to_string(),
published_at_short: "18. mars 2026".to_string(),
})
.collect();
let index_data = IndexData {
title: format!("Forhåndsvisning — {theme}"),
description: Some("Eksempeldata for temavisning".to_string()),
hero: Some(sample_articles[0].clone()),
featured: sample_articles[1..4].to_vec(),
stream: sample_articles[4..].to_vec(),
};
let base_url = format!("/pub/{slug}");
let tera = build_tera();
let html = render_index(&tera, &theme, &config, &index_data, &base_url, false).map_err(|e| {
tracing::error!(theme = %theme, error = %e, "Tera render-feil (preview)");
StatusCode::INTERNAL_SERVER_ERROR
})?;
Ok(Response::builder()
.header(header::CONTENT_TYPE, "text/html; charset=utf-8")
.header(header::CACHE_CONTROL, "no-cache")
.body(html.into())
.unwrap())
}
// =============================================================================
// Planlagt publisering — periodisk scheduler
// =============================================================================
/// Rad fra spørring for planlagte artikler som er klare for publisering.
#[derive(sqlx::FromRow, Debug)]
struct ScheduledArticle {
node_id: Uuid,
collection_id: Uuid,
}
/// Finn belongs_to-edges med publish_at i fortiden der artikkelen
/// ikke er rendret ennå. Returnerer (node_id, collection_id)-par.
async fn find_due_articles(db: &PgPool) -> Result<Vec<ScheduledArticle>, sqlx::Error> {
// En artikkel er "due" når:
// 1. belongs_to-edge har publish_at <= now()
// 2. Noden mangler metadata.rendered.html_hash (ikke rendret)
//
// Vi sjekker også at det ikke allerede finnes en pending/running
// render_article-jobb for denne noden, for å unngå duplikater.
sqlx::query_as::<_, ScheduledArticle>(
r#"
SELECT
e.source_id AS node_id,
e.target_id AS collection_id
FROM edges e
JOIN nodes n ON n.id = e.source_id
WHERE e.edge_type = 'belongs_to'
AND (e.metadata->>'publish_at')::timestamptz <= now()
AND (
n.metadata->'rendered'->>'html_hash' IS NULL
)
AND NOT EXISTS (
SELECT 1 FROM job_queue jq
WHERE jq.job_type = 'render_article'
AND jq.status IN ('pending', 'running', 'retry')
AND jq.payload->>'node_id' = n.id::text
)
"#,
)
.fetch_all(db)
.await
}
/// Kjør én runde med planlagt publisering.
/// Returnerer antall artikler som ble lagt i render-kø.
async fn run_publish_scheduler(db: &PgPool) -> Result<usize, String> {
let due = find_due_articles(db)
.await
.map_err(|e| format!("Spørring for planlagte artikler feilet: {e}"))?;
if due.is_empty() {
return Ok(0);
}
tracing::info!(count = due.len(), "Fant planlagte artikler klare for publisering");
// Samle unike collection_ids for index-oppdatering etterpå
let mut collections_to_reindex: std::collections::HashSet<Uuid> = std::collections::HashSet::new();
for article in &due {
let payload = serde_json::json!({
"node_id": article.node_id.to_string(),
"collection_id": article.collection_id.to_string(),
});
match jobs::enqueue(db, "render_article", payload, Some(article.collection_id), 5).await {
Ok(job_id) => {
tracing::info!(
job_id = %job_id,
node_id = %article.node_id,
collection_id = %article.collection_id,
"Render-jobb opprettet for planlagt artikkel"
);
collections_to_reindex.insert(article.collection_id);
}
Err(e) => {
tracing::error!(
node_id = %article.node_id,
error = %e,
"Kunne ikke opprette render-jobb for planlagt artikkel"
);
}
}
}
// Legg inn render_index-jobb for hver berørt samling (lavere prioritet)
for collection_id in &collections_to_reindex {
let payload = serde_json::json!({
"collection_id": collection_id.to_string(),
});
if let Err(e) = jobs::enqueue(db, "render_index", payload, Some(*collection_id), 3).await {
tracing::error!(
collection_id = %collection_id,
error = %e,
"Kunne ikke opprette render_index-jobb"
);
}
}
let count = due.len();
tracing::info!(
articles = count,
collections = collections_to_reindex.len(),
"Planlagt publisering: jobber lagt i kø"
);
Ok(count)
}
/// Start periodisk scheduler for planlagt publisering.
/// Sjekker hvert 60. sekund for artikler med publish_at i fortiden.
pub fn start_publish_scheduler(db: PgPool) {
tokio::spawn(async move {
// Vent 30 sekunder etter oppstart før første sjekk
tokio::time::sleep(std::time::Duration::from_secs(30)).await;
tracing::info!("Planlagt publisering-scheduler startet (intervall: 60s)");
loop {
match run_publish_scheduler(&db).await {
Ok(count) => {
if count > 0 {
tracing::info!(
articles = count,
"Planlagt publisering: {} artikler lagt i render-kø",
count,
);
}
}
Err(e) => {
tracing::error!(error = %e, "Planlagt publisering-scheduler feilet");
}
}
tokio::time::sleep(std::time::Duration::from_secs(60)).await;
}
});
}
// =============================================================================
// Bulk re-rendering ved temaendring (oppgave 14.14)
// =============================================================================
/// Paginert batch-jobb: finn artikler som trenger re-rendering og enqueue
/// render_article-jobber i grupper på 100. Artikler serveres med gammelt
/// tema til de er re-rendret — renderer_version i metadata identifiserer
/// hvilke som gjenstår.
///
/// Kalles når theme eller theme_config endres på en samling.
pub async fn trigger_bulk_rerender(
db: &PgPool,
collection_id: Uuid,
) -> Result<usize, String> {
let batch_size: i64 = 100;
let mut total_enqueued: usize = 0;
loop {
// Finn neste batch artikler som trenger re-rendering.
// Filtrerer ut artikler som allerede har pending/running render-jobb.
let article_ids: Vec<(Uuid,)> = sqlx::query_as(
r#"
SELECT e.source_id
FROM edges e
JOIN nodes n ON n.id = e.source_id
WHERE e.target_id = $1
AND e.edge_type = 'belongs_to'
AND (
n.metadata->'rendered'->>'renderer_version' IS NULL
OR (n.metadata->'rendered'->>'renderer_version')::bigint < $2
)
AND NOT EXISTS (
SELECT 1 FROM job_queue jq
WHERE jq.job_type = 'render_article'
AND jq.status IN ('pending', 'running', 'retry')
AND jq.payload->>'node_id' = n.id::text
)
LIMIT $3
"#,
)
.bind(collection_id)
.bind(RENDERER_VERSION)
.bind(batch_size)
.fetch_all(db)
.await
.map_err(|e| format!("Feil ved henting av artikler for bulk rerender: {e}"))?;
if article_ids.is_empty() {
break;
}
let batch_count = article_ids.len();
for (article_id,) in &article_ids {
let payload = serde_json::json!({
"node_id": article_id.to_string(),
"collection_id": collection_id.to_string(),
});
if let Err(e) = jobs::enqueue(db, "render_article", payload, Some(collection_id), 3).await {
tracing::error!(
article_id = %article_id,
collection_id = %collection_id,
error = %e,
"Kunne ikke enqueue render_article ved temaendring"
);
}
}
total_enqueued += batch_count;
tracing::info!(
collection_id = %collection_id,
batch = batch_count,
total = total_enqueued,
"Bulk rerender batch enqueued"
);
// Hvis batchen var mindre enn batch_size, er vi ferdige
if (batch_count as i64) < batch_size {
break;
}
}
// Enqueue render av forsiden til slutt (lavere prioritet)
let index_payload = serde_json::json!({
"collection_id": collection_id.to_string(),
});
if let Err(e) = jobs::enqueue(db, "render_index", index_payload, Some(collection_id), 4).await {
tracing::error!(
collection_id = %collection_id,
error = %e,
"Kunne ikke enqueue render_index ved temaendring"
);
}
tracing::info!(
collection_id = %collection_id,
total_articles = total_enqueued,
"Bulk re-rendering enqueued ved temaendring"
);
Ok(total_enqueued)
}
// =============================================================================
// Dynamiske sider: kategori, arkiv, søk, om-side (oppgave 14.15)
// =============================================================================
/// Cachet dynamisk side med utløpstid. Nøkkel er (collection_id, side-type + params).
pub struct CachedDynamicPage {
html: String,
expires_at: DateTime<Utc>,
}
/// Thread-safe cache for dynamiske sider (kategori, arkiv, søk).
pub type DynamicPageCache = Arc<RwLock<HashMap<String, CachedDynamicPage>>>;
pub fn new_dynamic_page_cache() -> DynamicPageCache {
Arc::new(RwLock::new(HashMap::new()))
}
/// Sjekk cache og returner HTML hvis gyldig.
async fn check_dynamic_cache(cache: &DynamicPageCache, key: &str) -> Option<String> {
let map = cache.read().await;
if let Some(cached) = map.get(key) {
if cached.expires_at > Utc::now() {
return Some(cached.html.clone());
}
}
None
}
/// Sett inn i dynamisk cache.
async fn insert_dynamic_cache(cache: &DynamicPageCache, key: String, html: String, ttl_secs: u64) {
let expires_at = Utc::now() + chrono::Duration::seconds(ttl_secs as i64);
let mut map = cache.write().await;
map.insert(key, CachedDynamicPage { html, expires_at });
}
const DYNAMIC_PAGE_SIZE: i64 = 20;
/// Beregn sideområde for paginering (maks 7 sider rundt gjeldende).
fn page_range(current: i64, total: i64) -> Vec<i64> {
let start = (current - 3).max(1);
let end = (current + 3).min(total);
(start..=end).collect()
}
/// Norsk månedsnavn.
fn norwegian_month(month: u32) -> &'static str {
match month {
1 => "januar", 2 => "februar", 3 => "mars", 4 => "april",
5 => "mai", 6 => "juni", 7 => "juli", 8 => "august",
9 => "september", 10 => "oktober", 11 => "november",
12 => "desember", _ => "ukjent",
}
}
#[derive(Deserialize)]
pub struct PageQuery {
pub side: Option<i64>,
}
#[derive(Deserialize)]
pub struct SearchQuery {
pub q: Option<String>,
pub side: Option<i64>,
}
// --- Kategori-side ---
/// GET /pub/{slug}/kategori/{tag} — artikler filtrert på tag-edge.
///
/// Tag-edges: edge_type = 'tagged' fra artikkel → tag-node.
/// Tag-noder har node_kind = 'tag'. Slug-matching på tag-nodens title (lowercased).
pub async fn serve_category(
State(state): State<AppState>,
Path((slug, tag_slug)): Path<(String, String)>,
Query(query): Query<PageQuery>,
) -> Result<Response, StatusCode> {
let collection = find_publishing_collection(&state.db, &slug)
.await
.map_err(|e| {
tracing::error!(slug = %slug, error = %e, "Feil ved oppslag av samling (kategori)");
StatusCode::INTERNAL_SERVER_ERROR
})?
.ok_or(StatusCode::NOT_FOUND)?;
let page = query.side.unwrap_or(1).max(1);
let cache_ttl = collection.publishing_config.index_cache_ttl.unwrap_or(300);
// Cache-nøkkel
let cache_key = format!("cat:{}:{}:{}", collection.id, tag_slug, page);
if let Some(html) = check_dynamic_cache(&state.dynamic_page_cache, &cache_key).await {
return Ok(Response::builder()
.header(header::CONTENT_TYPE, "text/html; charset=utf-8")
.header(header::CACHE_CONTROL, format!("public, max-age={cache_ttl}"))
.body(html.into())
.unwrap());
}
// Finn tag-noden via title match (case-insensitive)
let tag_row: Option<(Uuid, Option<String>)> = sqlx::query_as(
r#"
SELECT id, title FROM nodes
WHERE node_kind = 'tag' AND LOWER(title) = LOWER($1)
LIMIT 1
"#,
)
.bind(&tag_slug)
.fetch_optional(&state.db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let Some((tag_id, tag_title)) = tag_row else {
return Err(StatusCode::NOT_FOUND);
};
let tag_name = tag_title.unwrap_or_else(|| tag_slug.clone());
// Tell totalt antall artikler med denne taggen i samlingen
let (total_count,): (i64,) = sqlx::query_as(
r#"
SELECT COUNT(*)
FROM edges e_belongs
JOIN edges e_tag ON e_tag.source_id = e_belongs.source_id
WHERE e_belongs.target_id = $1
AND e_belongs.edge_type = 'belongs_to'
AND e_tag.target_id = $2
AND e_tag.edge_type = 'tagged'
"#,
)
.bind(collection.id)
.bind(tag_id)
.fetch_one(&state.db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let total_pages = ((total_count as f64) / DYNAMIC_PAGE_SIZE as f64).ceil() as i64;
let offset = (page - 1) * DYNAMIC_PAGE_SIZE;
type Row = (Uuid, Option<String>, Option<String>, DateTime<Utc>, Option<serde_json::Value>);
let rows: Vec<Row> = sqlx::query_as(
r#"
SELECT n.id, n.title, n.content, n.created_at, e_belongs.metadata
FROM edges e_belongs
JOIN edges e_tag ON e_tag.source_id = e_belongs.source_id
JOIN nodes n ON n.id = e_belongs.source_id
WHERE e_belongs.target_id = $1
AND e_belongs.edge_type = 'belongs_to'
AND e_tag.target_id = $2
AND e_tag.edge_type = 'tagged'
ORDER BY COALESCE(
(e_belongs.metadata->>'publish_at')::timestamptz,
n.created_at
) DESC
LIMIT $3 OFFSET $4
"#,
)
.bind(collection.id)
.bind(tag_id)
.bind(DYNAMIC_PAGE_SIZE)
.bind(offset)
.fetch_all(&state.db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let articles: Vec<ArticleData> = rows.into_iter().map(|(id, title, content, created_at, edge_meta)| {
let publish_at = edge_meta.as_ref()
.and_then(|m| m.get("publish_at"))
.and_then(|v| v.as_str())
.and_then(|s| s.parse::<DateTime<Utc>>().ok())
.unwrap_or(created_at);
let summary = content.as_deref().map(|c| truncate(c, 200));
ArticleData {
id: id.to_string(),
short_id: id.to_string()[..8].to_string(),
title: title.unwrap_or_else(|| "Uten tittel".to_string()),
subtitle: None,
content: content.unwrap_or_default(),
summary,
og_image: None,
published_at: publish_at.to_rfc3339(),
published_at_short: publish_at.format("%e. %B %Y").to_string(),
}
}).collect();
// Render
let theme = collection.publishing_config.theme.as_deref().unwrap_or("blogg");
let config = &collection.publishing_config.theme_config;
let css_vars = build_css_variables(theme, config);
let collection_title = collection.title.unwrap_or_else(|| slug.clone());
let base_url = collection.publishing_config.custom_domain.as_deref()
.map(|d| format!("https://{d}"))
.unwrap_or_else(|| format!("/pub/{slug}"));
let tera = build_tera();
let mut ctx = Context::new();
ctx.insert("css_variables", &css_vars);
ctx.insert("theme", theme);
ctx.insert("collection_title", &collection_title);
ctx.insert("base_url", &base_url);
ctx.insert("logo_hash", &config.logo_hash);
ctx.insert("has_rss", &collection.has_rss);
ctx.insert("tag_name", &tag_name);
ctx.insert("tag_slug", &tag_slug);
ctx.insert("articles", &articles);
ctx.insert("article_count", &total_count);
ctx.insert("current_page", &page);
ctx.insert("total_pages", &total_pages);
ctx.insert("page_range", &page_range(page, total_pages));
let html = tera.render("category.html", &ctx).map_err(|e| {
tracing::error!(error = %e, "Tera render-feil (category)");
StatusCode::INTERNAL_SERVER_ERROR
})?;
insert_dynamic_cache(&state.dynamic_page_cache, cache_key, html.clone(), cache_ttl).await;
Ok(Response::builder()
.header(header::CONTENT_TYPE, "text/html; charset=utf-8")
.header(header::CACHE_CONTROL, format!("public, max-age={cache_ttl}"))
.body(html.into())
.unwrap())
}
// --- Arkiv-side ---
#[derive(Serialize)]
struct MonthGroup {
label: String,
articles: Vec<ArticleData>,
}
/// GET /pub/{slug}/arkiv — kronologisk arkiv med månedsgruppering.
pub async fn serve_archive(
State(state): State<AppState>,
Path(slug): Path<String>,
Query(query): Query<PageQuery>,
) -> Result<Response, StatusCode> {
let collection = find_publishing_collection(&state.db, &slug)
.await
.map_err(|e| {
tracing::error!(slug = %slug, error = %e, "Feil ved oppslag av samling (arkiv)");
StatusCode::INTERNAL_SERVER_ERROR
})?
.ok_or(StatusCode::NOT_FOUND)?;
let page = query.side.unwrap_or(1).max(1);
let cache_ttl = collection.publishing_config.index_cache_ttl.unwrap_or(300);
let cache_key = format!("archive:{}:{}", collection.id, page);
if let Some(html) = check_dynamic_cache(&state.dynamic_page_cache, &cache_key).await {
return Ok(Response::builder()
.header(header::CONTENT_TYPE, "text/html; charset=utf-8")
.header(header::CACHE_CONTROL, format!("public, max-age={cache_ttl}"))
.body(html.into())
.unwrap());
}
// Tell totalt
let (total_count,): (i64,) = sqlx::query_as(
r#"
SELECT COUNT(*)
FROM edges e
WHERE e.target_id = $1 AND e.edge_type = 'belongs_to'
"#,
)
.bind(collection.id)
.fetch_one(&state.db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let total_pages = ((total_count as f64) / DYNAMIC_PAGE_SIZE as f64).ceil() as i64;
let offset = (page - 1) * DYNAMIC_PAGE_SIZE;
type Row = (Uuid, Option<String>, Option<String>, DateTime<Utc>, Option<serde_json::Value>);
let rows: Vec<Row> = sqlx::query_as(
r#"
SELECT n.id, n.title, n.content, n.created_at, e.metadata
FROM edges e
JOIN nodes n ON n.id = e.source_id
WHERE e.target_id = $1
AND e.edge_type = 'belongs_to'
ORDER BY COALESCE(
(e.metadata->>'publish_at')::timestamptz,
n.created_at
) DESC
LIMIT $2 OFFSET $3
"#,
)
.bind(collection.id)
.bind(DYNAMIC_PAGE_SIZE)
.bind(offset)
.fetch_all(&state.db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
// Grupper artikler etter måned
let mut month_groups: Vec<MonthGroup> = Vec::new();
for (id, title, content, created_at, edge_meta) in rows {
let publish_at = edge_meta.as_ref()
.and_then(|m| m.get("publish_at"))
.and_then(|v| v.as_str())
.and_then(|s| s.parse::<DateTime<Utc>>().ok())
.unwrap_or(created_at);
let label = format!("{} {}", norwegian_month(publish_at.month()), publish_at.year());
let summary = content.as_deref().map(|c| truncate(c, 200));
let article = ArticleData {
id: id.to_string(),
short_id: id.to_string()[..8].to_string(),
title: title.unwrap_or_else(|| "Uten tittel".to_string()),
subtitle: None,
content: content.unwrap_or_default(),
summary,
og_image: None,
published_at: publish_at.to_rfc3339(),
published_at_short: publish_at.format("%e. %B %Y").to_string(),
};
if let Some(last) = month_groups.last_mut() {
if last.label == label {
last.articles.push(article);
continue;
}
}
month_groups.push(MonthGroup {
label,
articles: vec![article],
});
}
// Render
let theme = collection.publishing_config.theme.as_deref().unwrap_or("blogg");
let config = &collection.publishing_config.theme_config;
let css_vars = build_css_variables(theme, config);
let collection_title = collection.title.unwrap_or_else(|| slug.clone());
let base_url = collection.publishing_config.custom_domain.as_deref()
.map(|d| format!("https://{d}"))
.unwrap_or_else(|| format!("/pub/{slug}"));
let tera = build_tera();
let mut ctx = Context::new();
ctx.insert("css_variables", &css_vars);
ctx.insert("theme", theme);
ctx.insert("collection_title", &collection_title);
ctx.insert("base_url", &base_url);
ctx.insert("logo_hash", &config.logo_hash);
ctx.insert("has_rss", &collection.has_rss);
ctx.insert("month_groups", &month_groups);
ctx.insert("total_articles", &total_count);
ctx.insert("current_page", &page);
ctx.insert("total_pages", &total_pages);
ctx.insert("page_range", &page_range(page, total_pages));
let html = tera.render("archive.html", &ctx).map_err(|e| {
tracing::error!(error = %e, "Tera render-feil (archive)");
StatusCode::INTERNAL_SERVER_ERROR
})?;
insert_dynamic_cache(&state.dynamic_page_cache, cache_key, html.clone(), cache_ttl).await;
Ok(Response::builder()
.header(header::CONTENT_TYPE, "text/html; charset=utf-8")
.header(header::CACHE_CONTROL, format!("public, max-age={cache_ttl}"))
.body(html.into())
.unwrap())
}
// --- Søk-side ---
/// GET /pub/{slug}/sok?q=...&side=1 — fulltekstsøk i publiserte artikler.
///
/// Bruker PostgreSQL tsvector + ts_rank for relevanssortering.
/// Kun artikler med belongs_to-edge til samlingen inkluderes.
pub async fn serve_search(
State(state): State<AppState>,
Path(slug): Path<String>,
Query(query): Query<SearchQuery>,
) -> Result<Response, StatusCode> {
let collection = find_publishing_collection(&state.db, &slug)
.await
.map_err(|e| {
tracing::error!(slug = %slug, error = %e, "Feil ved oppslag av samling (søk)");
StatusCode::INTERNAL_SERVER_ERROR
})?
.ok_or(StatusCode::NOT_FOUND)?;
let search_query = query.q.unwrap_or_default().trim().to_string();
let page = query.side.unwrap_or(1).max(1);
let cache_ttl = collection.publishing_config.index_cache_ttl.unwrap_or(300).min(60); // Kortere cache for søk
let theme = collection.publishing_config.theme.as_deref().unwrap_or("blogg");
let config = &collection.publishing_config.theme_config;
let css_vars = build_css_variables(theme, config);
let collection_title = collection.title.unwrap_or_else(|| slug.clone());
let base_url = collection.publishing_config.custom_domain.as_deref()
.map(|d| format!("https://{d}"))
.unwrap_or_else(|| format!("/pub/{slug}"));
let tera = build_tera();
let mut ctx = Context::new();
ctx.insert("css_variables", &css_vars);
ctx.insert("theme", theme);
ctx.insert("collection_title", &collection_title);
ctx.insert("base_url", &base_url);
ctx.insert("logo_hash", &config.logo_hash);
ctx.insert("has_rss", &collection.has_rss);
ctx.insert("query", &search_query);
if search_query.is_empty() {
// Vis tom søkeside
ctx.insert("articles", &Vec::<ArticleData>::new());
ctx.insert("result_count", &0i64);
ctx.insert("current_page", &1i64);
ctx.insert("total_pages", &0i64);
ctx.insert("page_range", &Vec::<i64>::new());
} else {
// Cache-oppslag
let cache_key = format!("search:{}:{}:{}", collection.id, search_query, page);
if let Some(html) = check_dynamic_cache(&state.dynamic_page_cache, &cache_key).await {
return Ok(Response::builder()
.header(header::CONTENT_TYPE, "text/html; charset=utf-8")
.header(header::CACHE_CONTROL, format!("public, max-age={cache_ttl}"))
.body(html.into())
.unwrap());
}
// Konverter søkeord til tsquery (bruk plainto_tsquery for sikkerhet)
let (total_count,): (i64,) = sqlx::query_as(
r#"
SELECT COUNT(*)
FROM edges e
JOIN nodes n ON n.id = e.source_id
WHERE e.target_id = $1
AND e.edge_type = 'belongs_to'
AND n.search_vector @@ plainto_tsquery('norwegian', $2)
"#,
)
.bind(collection.id)
.bind(&search_query)
.fetch_one(&state.db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let total_pages = ((total_count as f64) / DYNAMIC_PAGE_SIZE as f64).ceil() as i64;
let offset = (page - 1) * DYNAMIC_PAGE_SIZE;
type Row = (Uuid, Option<String>, Option<String>, DateTime<Utc>, Option<serde_json::Value>);
let rows: Vec<Row> = sqlx::query_as(
r#"
SELECT n.id, n.title, n.content, n.created_at, e.metadata
FROM edges e
JOIN nodes n ON n.id = e.source_id
WHERE e.target_id = $1
AND e.edge_type = 'belongs_to'
AND n.search_vector @@ plainto_tsquery('norwegian', $2)
ORDER BY ts_rank(n.search_vector, plainto_tsquery('norwegian', $2)) DESC
LIMIT $3 OFFSET $4
"#,
)
.bind(collection.id)
.bind(&search_query)
.bind(DYNAMIC_PAGE_SIZE)
.bind(offset)
.fetch_all(&state.db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let articles: Vec<ArticleData> = rows.into_iter().map(|(id, title, content, created_at, edge_meta)| {
let publish_at = edge_meta.as_ref()
.and_then(|m| m.get("publish_at"))
.and_then(|v| v.as_str())
.and_then(|s| s.parse::<DateTime<Utc>>().ok())
.unwrap_or(created_at);
let summary = content.as_deref().map(|c| truncate(c, 200));
ArticleData {
id: id.to_string(),
short_id: id.to_string()[..8].to_string(),
title: title.unwrap_or_else(|| "Uten tittel".to_string()),
subtitle: None,
content: content.unwrap_or_default(),
summary,
og_image: None,
published_at: publish_at.to_rfc3339(),
published_at_short: publish_at.format("%e. %B %Y").to_string(),
}
}).collect();
ctx.insert("articles", &articles);
ctx.insert("result_count", &total_count);
ctx.insert("current_page", &page);
ctx.insert("total_pages", &total_pages);
ctx.insert("page_range", &page_range(page, total_pages));
let html = tera.render("search.html", &ctx).map_err(|e| {
tracing::error!(error = %e, "Tera render-feil (search)");
StatusCode::INTERNAL_SERVER_ERROR
})?;
insert_dynamic_cache(&state.dynamic_page_cache, cache_key, html.clone(), cache_ttl).await;
return Ok(Response::builder()
.header(header::CONTENT_TYPE, "text/html; charset=utf-8")
.header(header::CACHE_CONTROL, format!("public, max-age={cache_ttl}"))
.body(html.into())
.unwrap());
}
let html = tera.render("search.html", &ctx).map_err(|e| {
tracing::error!(error = %e, "Tera render-feil (search)");
StatusCode::INTERNAL_SERVER_ERROR
})?;
Ok(Response::builder()
.header(header::CONTENT_TYPE, "text/html; charset=utf-8")
.header(header::CACHE_CONTROL, "no-cache")
.body(html.into())
.unwrap())
}
// --- Om-side ---
/// GET /pub/{slug}/om — statisk om-side.
///
/// Om-siden er en node med `page_role: "about"` i metadata, koblet til
/// samlingen via `belongs_to`-edge. Rendret HTML hentes fra CAS
/// (metadata.rendered.html_hash) eller rendres on-the-fly via about-template.
pub async fn serve_about(
State(state): State<AppState>,
Path(slug): Path<String>,
) -> Result<Response, StatusCode> {
let collection = find_publishing_collection(&state.db, &slug)
.await
.map_err(|e| {
tracing::error!(slug = %slug, error = %e, "Feil ved oppslag av samling (om)");
StatusCode::INTERNAL_SERVER_ERROR
})?
.ok_or(StatusCode::NOT_FOUND)?;
// Finn om-noden: belongs_to samlingen med page_role = "about"
let about_row: Option<(Uuid, Option<String>, Option<String>, serde_json::Value)> = sqlx::query_as(
r#"
SELECT n.id, n.title, n.content, n.metadata
FROM edges e
JOIN nodes n ON n.id = e.source_id
WHERE e.target_id = $1
AND e.edge_type = 'belongs_to'
AND n.metadata->>'page_role' = 'about'
LIMIT 1
"#,
)
.bind(collection.id)
.fetch_optional(&state.db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let Some((_id, _title, content, metadata)) = about_row else {
return Err(StatusCode::NOT_FOUND);
};
// Sjekk om rendret HTML finnes i CAS
if let Some(html_hash) = metadata
.get("rendered")
.and_then(|r| r.get("html_hash"))
.and_then(|h| h.as_str())
{
let cas_path = state.cas.path_for(html_hash);
if cas_path.exists() {
let html_bytes = tokio::fs::read(&cas_path).await.map_err(|e| {
tracing::error!(hash = %html_hash, error = %e, "Kunne ikke lese CAS-fil (about)");
StatusCode::INTERNAL_SERVER_ERROR
})?;
return Ok(Response::builder()
.header(header::CONTENT_TYPE, "text/html; charset=utf-8")
.header(header::CACHE_CONTROL, "public, max-age=31536000, immutable")
.body(html_bytes.into())
.unwrap());
}
}
// Fallback: render on-the-fly via about-template
let about_html = if let Some(doc) = metadata.get("document") {
let html = tiptap::document_to_html(doc);
if html.is_empty() {
content.unwrap_or_default()
} else {
html
}
} else {
content.unwrap_or_default()
};
let theme = collection.publishing_config.theme.as_deref().unwrap_or("blogg");
let config = &collection.publishing_config.theme_config;
let css_vars = build_css_variables(theme, config);
let collection_title = collection.title.unwrap_or_else(|| slug.clone());
let base_url = collection.publishing_config.custom_domain.as_deref()
.map(|d| format!("https://{d}"))
.unwrap_or_else(|| format!("/pub/{slug}"));
let tera = build_tera();
let mut ctx = Context::new();
ctx.insert("css_variables", &css_vars);
ctx.insert("theme", theme);
ctx.insert("collection_title", &collection_title);
ctx.insert("base_url", &base_url);
ctx.insert("logo_hash", &config.logo_hash);
ctx.insert("has_rss", &collection.has_rss);
ctx.insert("about_html", &about_html);
let html = tera.render("about.html", &ctx).map_err(|e| {
tracing::error!(error = %e, "Tera render-feil (about)");
StatusCode::INTERNAL_SERVER_ERROR
})?;
Ok(Response::builder()
.header(header::CONTENT_TYPE, "text/html; charset=utf-8")
.header(header::CACHE_CONTROL, "public, max-age=3600")
.body(html.into())
.unwrap())
}
/// Render om-side til CAS. Kalles ved opprettelse/oppdatering av om-noden.
pub async fn render_about_to_cas(
db: &PgPool,
cas: &CasStore,
node_id: Uuid,
collection_id: Uuid,
) -> Result<serde_json::Value, String> {
// Hent samlingens konfig
let collection_row: Option<(Option<String>, serde_json::Value)> = sqlx::query_as(
r#"SELECT title, metadata FROM nodes WHERE id = $1 AND node_kind = 'collection'"#,
)
.bind(collection_id)
.fetch_optional(db)
.await
.map_err(|e| format!("Feil ved henting av samling: {e}"))?;
let Some((collection_title_opt, collection_metadata)) = collection_row else {
return Err(format!("Samling {collection_id} finnes ikke"));
};
let traits = collection_metadata.get("traits");
let publishing_config: PublishingConfig = traits
.and_then(|t| t.get("publishing"))
.cloned()
.map(|v| serde_json::from_value(v).unwrap_or_default())
.unwrap_or_default();
let has_rss = traits.and_then(|t| t.get("rss")).is_some();
let slug = publishing_config.slug.as_deref().unwrap_or("unknown");
let theme = publishing_config.theme.as_deref().unwrap_or("blogg");
let config = &publishing_config.theme_config;
let collection_title = collection_title_opt.unwrap_or_else(|| slug.to_string());
// Hent om-nodens innhold
let node_row: Option<(Option<String>, serde_json::Value)> = sqlx::query_as(
"SELECT content, metadata FROM nodes WHERE id = $1",
)
.bind(node_id)
.fetch_optional(db)
.await
.map_err(|e| format!("Feil ved henting av om-node: {e}"))?;
let Some((content, metadata)) = node_row else {
return Err(format!("Node {node_id} finnes ikke"));
};
let about_html = if let Some(doc) = metadata.get("document") {
let html = tiptap::document_to_html(doc);
if html.is_empty() { content.unwrap_or_default() } else { html }
} else {
content.unwrap_or_default()
};
let css_vars = build_css_variables(theme, config);
let base_url = publishing_config.custom_domain.as_deref()
.map(|d| format!("https://{d}"))
.unwrap_or_else(|| format!("/pub/{slug}"));
let tera = build_tera();
let mut ctx = Context::new();
ctx.insert("css_variables", &css_vars);
ctx.insert("theme", theme);
ctx.insert("collection_title", &collection_title);
ctx.insert("base_url", &base_url);
ctx.insert("logo_hash", &config.logo_hash);
ctx.insert("has_rss", &has_rss);
ctx.insert("about_html", &about_html);
let html = tera.render("about.html", &ctx)
.map_err(|e| format!("Tera render-feil (about): {e}"))?;
let store_result = cas.store(html.as_bytes()).await
.map_err(|e| format!("CAS-lagring feilet: {e}"))?;
// Oppdater nodens metadata.rendered
let now = Utc::now();
sqlx::query(
r#"
UPDATE nodes
SET metadata = jsonb_set(
jsonb_set(
jsonb_set(
CASE WHEN metadata ? 'rendered'
THEN metadata
ELSE jsonb_set(metadata, '{rendered}', '{}'::jsonb)
END,
'{rendered,html_hash}',
to_jsonb($2::text)
),
'{rendered,rendered_at}',
to_jsonb($3::text)
),
'{rendered,renderer_version}',
to_jsonb($4::bigint)
)
WHERE id = $1
"#,
)
.bind(node_id)
.bind(&store_result.hash)
.bind(now.to_rfc3339())
.bind(RENDERER_VERSION)
.execute(db)
.await
.map_err(|e| format!("Feil ved oppdatering av metadata.rendered (about): {e}"))?;
Ok(serde_json::json!({
"html_hash": store_result.hash,
"size": store_result.size,
"renderer_version": RENDERER_VERSION
}))
}
// =============================================================================
// =============================================================================
// A/B-testing: impression-logging, klikk-sporing, evaluering (oppgave 14.17)
// =============================================================================
/// Log impressions for A/B-varianter som vises på forsiden.
/// Kalles asynkront (fire-and-forget) ved forside-serving.
async fn log_ab_impressions(db: &PgPool, collection_id: Uuid, variants: &[(Uuid, Uuid)]) {
for &(edge_id, article_id) in variants {
if let Err(e) = sqlx::query(
r#"
INSERT INTO ab_events (edge_id, article_id, collection_id, event_type)
VALUES ($1, $2, $3, 'impression')
"#,
)
.bind(edge_id)
.bind(article_id)
.bind(collection_id)
.execute(db)
.await
{
tracing::warn!(
edge_id = %edge_id,
error = %e,
"Kunne ikke logge A/B-impression"
);
}
}
}
/// Log et klikk for en A/B-variant.
async fn log_ab_click(db: &PgPool, edge_id: Uuid, article_id: Uuid, collection_id: Uuid) {
if let Err(e) = sqlx::query(
r#"
INSERT INTO ab_events (edge_id, article_id, collection_id, event_type)
VALUES ($1, $2, $3, 'click')
"#,
)
.bind(edge_id)
.bind(article_id)
.bind(collection_id)
.execute(db)
.await
{
tracing::warn!(
edge_id = %edge_id,
error = %e,
"Kunne ikke logge A/B-klikk"
);
}
}
/// GET /pub/{slug}/t/{article_short_id} — klikk-sporing for A/B-test.
///
/// Mottar edge_id som query-parameter `v`. Logger klikk og redirecter
/// til den egentlige artikkelsiden.
pub async fn track_click(
State(state): State<AppState>,
Path((slug, article_short_id)): Path<(String, String)>,
Query(params): Query<HashMap<String, String>>,
) -> Result<Response, StatusCode> {
// Finn samling
let collection = find_publishing_collection(&state.db, &slug)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.ok_or(StatusCode::NOT_FOUND)?;
// Hent edge_id fra query-parameter
if let Some(edge_id_str) = params.get("v") {
if let Ok(edge_id) = edge_id_str.parse::<Uuid>() {
// Finn article_id fra short_id
let pattern = format!("{article_short_id}%");
let article_id: Option<(Uuid,)> = sqlx::query_as(
r#"
SELECT n.id
FROM edges e
JOIN nodes n ON n.id = e.source_id
WHERE e.target_id = $1
AND e.edge_type = 'belongs_to'
AND n.id::text LIKE $2
LIMIT 1
"#,
)
.bind(collection.id)
.bind(&pattern)
.fetch_optional(&state.db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
if let Some((aid,)) = article_id {
// Log klikk asynkront
let db = state.db.clone();
let cid = collection.id;
tokio::spawn(async move {
log_ab_click(&db, edge_id, aid, cid).await;
});
}
}
}
// Redirect til artikkelsiden
let redirect_url = format!("/pub/{slug}/{article_short_id}");
Ok(Response::builder()
.status(StatusCode::FOUND)
.header(header::LOCATION, redirect_url)
.header(header::CACHE_CONTROL, "no-cache, no-store")
.body(Default::default())
.unwrap())
}
// =============================================================================
// A/B-evaluering: periodisk CTR-beregning og signifikans-testing
// =============================================================================
/// Finn alle aktive A/B-tester (artikler med >1 ikke-retired variant av samme type).
/// Returnerer grupper: (article_id, edge_type, vec<edge_id>).
async fn find_active_ab_tests(db: &PgPool) -> Result<Vec<AbTestGroup>, sqlx::Error> {
// Finn alle presentasjonselement-edges med ab_status = 'testing' eller uten ab_status
// der artikkelen har >1 variant av samme type
let rows: Vec<(Uuid, Uuid, String, serde_json::Value)> = sqlx::query_as(
r#"
SELECT e.target_id AS article_id, e.id AS edge_id, e.edge_type, e.metadata
FROM edges e
WHERE e.edge_type IN ('title', 'subtitle', 'summary', 'og_image')
AND (e.metadata->>'ab_status' IS NULL
OR e.metadata->>'ab_status' = 'testing')
ORDER BY e.target_id, e.edge_type
"#,
)
.fetch_all(db)
.await?;
// Grupper per (article_id, edge_type) og filtrer til de med >1 variant
let mut groups: HashMap<(Uuid, String), Vec<Uuid>> = HashMap::new();
for (article_id, edge_id, edge_type, _metadata) in &rows {
groups
.entry((*article_id, edge_type.clone()))
.or_default()
.push(*edge_id);
}
Ok(groups
.into_iter()
.filter(|(_, edges)| edges.len() > 1)
.map(|((article_id, edge_type), edge_ids)| AbTestGroup {
article_id,
edge_type,
edge_ids,
})
.collect())
}
struct AbTestGroup {
article_id: Uuid,
edge_type: String,
edge_ids: Vec<Uuid>,
}
/// Hent impression/klikk-tall per edge_id.
async fn fetch_ab_stats(
db: &PgPool,
edge_ids: &[Uuid],
) -> Result<HashMap<Uuid, (i64, i64)>, sqlx::Error> {
// Returnerer (impressions, clicks) per edge_id
let rows: Vec<(Uuid, String, i64)> = sqlx::query_as(
r#"
SELECT edge_id, event_type, COUNT(*) as cnt
FROM ab_events
WHERE edge_id = ANY($1)
GROUP BY edge_id, event_type
"#,
)
.bind(edge_ids)
.fetch_all(db)
.await?;
let mut stats: HashMap<Uuid, (i64, i64)> = HashMap::new();
for (edge_id, event_type, cnt) in rows {
let entry = stats.entry(edge_id).or_insert((0, 0));
match event_type.as_str() {
"impression" => entry.0 = cnt,
"click" => entry.1 = cnt,
_ => {}
}
}
Ok(stats)
}
/// Z-test for to proporsjoner. Returnerer p-verdi (tosidig).
/// p1 = clicks1/impressions1, p2 = clicks2/impressions2.
fn z_test_proportions(imp1: i64, click1: i64, imp2: i64, click2: i64) -> f64 {
if imp1 < 30 || imp2 < 30 {
// For lite data for meningsfull test
return 1.0;
}
let n1 = imp1 as f64;
let n2 = imp2 as f64;
let p1 = click1 as f64 / n1;
let p2 = click2 as f64 / n2;
// Pooled proportion
let p = (click1 + click2) as f64 / (n1 + n2);
let q = 1.0 - p;
let se = (p * q * (1.0 / n1 + 1.0 / n2)).sqrt();
if se < 1e-12 {
return 1.0;
}
let z = (p1 - p2).abs() / se;
// Approksimert p-verdi (tosidig) via kumulativ normalfordeling
// Bruk enkel tilnærming: P(Z > z) ≈ erfc(z / sqrt(2)) / 2
2.0 * normal_cdf_complement(z)
}
/// Komplementær kumulativ normalfordeling P(Z > z).
/// Abramowitz & Stegun-tilnærming.
fn normal_cdf_complement(z: f64) -> f64 {
let t = 1.0 / (1.0 + 0.2316419 * z);
let d = 0.3989423 * (-z * z / 2.0).exp();
let p = d * t * (0.3193815 + t * (-0.3565638 + t * (1.781478 + t * (-1.821256 + t * 1.330274))));
p.max(0.0).min(1.0)
}
/// Minimum impressions per variant før evaluering vurderes.
const MIN_IMPRESSIONS_FOR_EVAL: i64 = 100;
/// Signifikansnivå for å erklære vinner.
const SIGNIFICANCE_LEVEL: f64 = 0.05;
/// Evaluer alle aktive A/B-tester og marker vinnere/tapere.
pub async fn evaluate_ab_tests(db: &PgPool) -> Result<usize, String> {
let groups = find_active_ab_tests(db)
.await
.map_err(|e| format!("Feil ved henting av A/B-tester: {e}"))?;
if groups.is_empty() {
return Ok(0);
}
let mut resolved = 0;
for group in &groups {
let stats = fetch_ab_stats(db, &group.edge_ids)
.await
.map_err(|e| format!("Feil ved henting av A/B-stats: {e}"))?;
// Sjekk at alle varianter har nok data
let all_ready = group.edge_ids.iter().all(|eid| {
stats.get(eid).map(|(imp, _)| *imp >= MIN_IMPRESSIONS_FOR_EVAL).unwrap_or(false)
});
if !all_ready {
continue;
}
// Oppdater edge-metadata med impressions, clicks, ctr
for &edge_id in &group.edge_ids {
let (imp, clicks) = stats.get(&edge_id).copied().unwrap_or((0, 0));
let ctr = if imp > 0 { clicks as f64 / imp as f64 } else { 0.0 };
if let Err(e) = sqlx::query(
r#"
UPDATE edges
SET metadata = metadata
|| jsonb_build_object(
'impressions', $2::bigint,
'clicks', $3::bigint,
'ctr', $4::float8
)
WHERE id = $1
"#,
)
.bind(edge_id)
.bind(imp)
.bind(clicks)
.bind(ctr)
.execute(db)
.await
{
tracing::warn!(edge_id = %edge_id, error = %e, "Kunne ikke oppdatere edge AB-metadata");
}
}
// For par-sammenligning: test alle par og finn om det finnes en klar vinner
if group.edge_ids.len() == 2 {
let (imp_a, click_a) = stats.get(&group.edge_ids[0]).copied().unwrap_or((0, 0));
let (imp_b, click_b) = stats.get(&group.edge_ids[1]).copied().unwrap_or((0, 0));
let p_value = z_test_proportions(imp_a, click_a, imp_b, click_b);
if p_value < SIGNIFICANCE_LEVEL {
let ctr_a = if imp_a > 0 { click_a as f64 / imp_a as f64 } else { 0.0 };
let ctr_b = if imp_b > 0 { click_b as f64 / imp_b as f64 } else { 0.0 };
let (winner_id, loser_id) = if ctr_a >= ctr_b {
(group.edge_ids[0], group.edge_ids[1])
} else {
(group.edge_ids[1], group.edge_ids[0])
};
// Marker vinner
let _ = sqlx::query(
"UPDATE edges SET metadata = metadata || '{\"ab_status\": \"winner\"}' WHERE id = $1",
)
.bind(winner_id)
.execute(db)
.await;
// Marker taper som retired
let _ = sqlx::query(
"UPDATE edges SET metadata = metadata || '{\"ab_status\": \"retired\"}' WHERE id = $1",
)
.bind(loser_id)
.execute(db)
.await;
tracing::info!(
article_id = %group.article_id,
edge_type = %group.edge_type,
winner = %winner_id,
loser = %loser_id,
p_value = p_value,
"A/B-test avgjort: vinner markert"
);
resolved += 1;
}
} else {
// >2 varianter: finn den med høyest CTR og test mot nest-best
let mut ranked: Vec<(Uuid, f64, i64, i64)> = group.edge_ids.iter().map(|&eid| {
let (imp, clicks) = stats.get(&eid).copied().unwrap_or((0, 0));
let ctr = if imp > 0 { clicks as f64 / imp as f64 } else { 0.0 };
(eid, ctr, imp, clicks)
}).collect();
ranked.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
if ranked.len() >= 2 {
let (best_id, _, best_imp, best_clicks) = ranked[0];
let (_second_id, _, second_imp, second_clicks) = ranked[1];
let p_value = z_test_proportions(best_imp, best_clicks, second_imp, second_clicks);
if p_value < SIGNIFICANCE_LEVEL {
// Vinneren er bedre enn alle andre med signifikans
let _ = sqlx::query(
"UPDATE edges SET metadata = metadata || '{\"ab_status\": \"winner\"}' WHERE id = $1",
)
.bind(best_id)
.execute(db)
.await;
// Retire alle andre
for &(eid, _, _, _) in &ranked[1..] {
let _ = sqlx::query(
"UPDATE edges SET metadata = metadata || '{\"ab_status\": \"retired\"}' WHERE id = $1",
)
.bind(eid)
.execute(db)
.await;
}
tracing::info!(
article_id = %group.article_id,
edge_type = %group.edge_type,
winner = %best_id,
variants = ranked.len(),
p_value = p_value,
"A/B-test avgjort (multi-variant): vinner markert"
);
resolved += 1;
}
}
}
}
Ok(resolved)
}
/// Start periodisk A/B-evaluering.
/// Kjører hvert 5. minutt, evaluerer aktive tester og markerer vinnere.
pub fn start_ab_evaluator(db: PgPool) {
tokio::spawn(async move {
// Vent 60 sekunder etter oppstart
tokio::time::sleep(std::time::Duration::from_secs(60)).await;
tracing::info!("A/B-evaluator startet (intervall: 300s)");
loop {
match evaluate_ab_tests(&db).await {
Ok(count) => {
if count > 0 {
tracing::info!(resolved = count, "A/B-evaluator: {} tester avgjort", count);
}
}
Err(e) => {
tracing::error!(error = %e, "A/B-evaluator feilet");
}
}
tokio::time::sleep(std::time::Duration::from_secs(300)).await;
}
});
}
/// POST /intentions/ab_override — redaktør overstyrer A/B-test.
///
/// Markerer valgt variant som "winner" og alle andre som "retired".
/// Krever owner/admin-tilgang til samlingen.
#[derive(Deserialize)]
pub struct AbOverrideRequest {
pub edge_id: Uuid,
pub article_id: Uuid,
}
pub async fn ab_override(
State(state): State<AppState>,
user: crate::auth::AuthUser,
axum::Json(req): axum::Json<AbOverrideRequest>,
) -> Result<axum::Json<serde_json::Value>, StatusCode> {
// Finn artikkelen og dens samling
let edge_row: Option<(String, Uuid)> = sqlx::query_as(
r#"
SELECT e.edge_type, e.target_id
FROM edges e
WHERE e.id = $1
"#,
)
.bind(req.edge_id)
.fetch_optional(&state.db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
let Some((edge_type, article_id)) = edge_row else {
return Err(StatusCode::NOT_FOUND);
};
if article_id != req.article_id {
return Err(StatusCode::BAD_REQUEST);
}
// Sjekk at brukeren har tilgang (owner/admin av en samling artikkelen tilhører)
let has_access: bool = sqlx::query_scalar(
r#"
SELECT EXISTS(
SELECT 1
FROM edges e_belongs
JOIN edges e_role ON e_role.source_id = $2 AND e_role.target_id = e_belongs.target_id
AND e_role.edge_type IN ('owner', 'admin')
WHERE e_belongs.source_id = $1 AND e_belongs.edge_type = 'belongs_to'
)
"#,
)
.bind(article_id)
.bind(user.node_id)
.fetch_one(&state.db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
if !has_access {
return Err(StatusCode::FORBIDDEN);
}
// Marker valgt variant som winner
sqlx::query(
"UPDATE edges SET metadata = metadata || '{\"ab_status\": \"winner\"}' WHERE id = $1",
)
.bind(req.edge_id)
.execute(&state.db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
// Retire alle andre varianter av samme type til samme artikkel
sqlx::query(
r#"
UPDATE edges
SET metadata = metadata || '{"ab_status": "retired"}'
WHERE target_id = $1
AND edge_type = $2
AND id != $3
AND (metadata->>'ab_status' IS NULL OR metadata->>'ab_status' = 'testing')
"#,
)
.bind(article_id)
.bind(&edge_type)
.bind(req.edge_id)
.execute(&state.db)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
tracing::info!(
edge_id = %req.edge_id,
article_id = %article_id,
edge_type = %edge_type,
user = %user.node_id,
"A/B-test overstyrt av redaktør"
);
Ok(axum::Json(serde_json::json!({
"status": "ok",
"winner_edge_id": req.edge_id,
"edge_type": edge_type
})))
}
// =============================================================================
// A/B initialisering: sett testing-status på nye varianter automatisk
// =============================================================================
/// Sett ab_status = "testing" og started_at på en presentasjonselement-edge
/// når det finnes >1 variant av samme type til samme artikkel.
pub async fn maybe_start_ab_test(db: &PgPool, article_id: Uuid, edge_type: &str) {
// Tell antall varianter (ikke-retired)
let count: Option<(i64,)> = sqlx::query_as(
r#"
SELECT COUNT(*) FROM edges
WHERE target_id = $1
AND edge_type = $2
AND (metadata->>'ab_status' IS NULL OR metadata->>'ab_status' != 'retired')
"#,
)
.bind(article_id)
.bind(edge_type)
.fetch_optional(db)
.await
.unwrap_or(None);
let count = count.map(|(c,)| c).unwrap_or(0);
if count > 1 {
// Marker alle varianter uten ab_status som "testing"
let now = Utc::now().to_rfc3339();
let _ = sqlx::query(
r#"
UPDATE edges
SET metadata = metadata
|| jsonb_build_object('ab_status', 'testing', 'started_at', $3::text)
WHERE target_id = $1
AND edge_type = $2
AND metadata->>'ab_status' IS NULL
"#,
)
.bind(article_id)
.bind(edge_type)
.bind(&now)
.execute(db)
.await;
tracing::info!(
article_id = %article_id,
edge_type = %edge_type,
variants = count,
"A/B-test startet automatisk"
);
}
}
// =============================================================================
// Tester
// =============================================================================
#[cfg(test)]
mod tests {
use super::*;
fn default_seo() -> SeoData {
SeoData {
og_title: "Test".to_string(),
description: "Beskrivelse".to_string(),
canonical_url: "https://example.com/test".to_string(),
og_image: None,
json_ld: "{}".to_string(),
}
}
#[test]
fn css_variables_use_defaults() {
let config = ThemeConfig::default();
let css = build_css_variables("avis", &config);
assert!(css.contains("--color-primary: #1a1a2e"));
assert!(css.contains("--font-heading: 'Georgia'"));
assert!(css.contains("--layout-max-width: 1200px"));
}
#[test]
fn css_variables_override() {
let config = ThemeConfig {
colors: ColorConfig {
primary: Some("#ff0000".to_string()),
..Default::default()
},
..Default::default()
};
let css = build_css_variables("blogg", &config);
assert!(css.contains("--color-primary: #ff0000"));
// Andre verdier skal fortsatt bruke blogg-defaults
assert!(css.contains("--color-accent: #3498db"));
}
#[test]
fn tera_builds_successfully() {
let tera = build_tera();
// Alle 8 tema-templates + base skal finnes
let templates: Vec<&str> = tera.get_template_names().collect();
assert!(templates.contains(&"base.html"));
assert!(templates.contains(&"avis/article.html"));
assert!(templates.contains(&"avis/index.html"));
assert!(templates.contains(&"magasin/article.html"));
assert!(templates.contains(&"magasin/index.html"));
assert!(templates.contains(&"blogg/article.html"));
assert!(templates.contains(&"blogg/index.html"));
assert!(templates.contains(&"tidsskrift/article.html"));
assert!(templates.contains(&"tidsskrift/index.html"));
}
#[test]
fn render_article_all_themes() {
let tera = build_tera();
let config = ThemeConfig::default();
let article = ArticleData {
id: "test-id".to_string(),
short_id: "test-sho".to_string(),
title: "Testittel".to_string(),
content: "<p>Testinnhold</p>".to_string(),
subtitle: None,
summary: Some("Kort oppsummering".to_string()),
og_image: None,
published_at: "2026-03-18T12:00:00Z".to_string(),
published_at_short: "18. mars 2026".to_string(),
};
let seo = default_seo();
for theme in &["avis", "magasin", "blogg", "tidsskrift"] {
let html = render_article(&tera, theme, &config, &article, "Testsamling", "/pub/test", &seo, false)
.unwrap_or_else(|e| panic!("Render feilet for {theme}: {e}"));
assert!(html.contains("Testittel"), "Tittel mangler i {theme}");
assert!(html.contains("Testinnhold"), "Innhold mangler i {theme}");
assert!(html.contains("--color-primary"), "CSS-variabler mangler i {theme}");
}
}
#[test]
fn render_article_includes_seo() {
let tera = build_tera();
let config = ThemeConfig::default();
let article = ArticleData {
id: "seo-test".to_string(),
short_id: "seo-test".to_string(),
title: "SEO-tittel".to_string(),
content: "<p>Innhold</p>".to_string(),
subtitle: None,
summary: Some("SEO-beskrivelse her".to_string()),
og_image: None,
published_at: "2026-03-18T12:00:00Z".to_string(),
published_at_short: "18. mars 2026".to_string(),
};
let seo = SeoData {
og_title: "SEO-tittel".to_string(),
description: "SEO-beskrivelse her".to_string(),
canonical_url: "https://example.com/seo-test".to_string(),
og_image: None,
json_ld: r#"{"@type":"Article"}"#.to_string(),
};
let html = render_article(&tera, "blogg", &config, &article, "Testpub", "/pub/test", &seo, false)
.expect("Render feilet");
assert!(html.contains("og:title"), "OG-tittel mangler");
assert!(html.contains("og:description"), "OG-beskrivelse mangler");
assert!(html.contains("canonical"), "Canonical URL mangler");
assert!(html.contains("application/ld+json"), "JSON-LD mangler");
assert!(html.contains("SEO-beskrivelse her"), "Beskrivelse mangler");
}
#[test]
fn render_index_all_themes() {
let tera = build_tera();
let config = ThemeConfig::default();
let index = IndexData {
title: "Testforside".to_string(),
description: None,
hero: None,
featured: vec![],
stream: vec![ArticleData {
id: "s1".to_string(),
short_id: "s1000000".to_string(),
title: "Strøm-artikkel".to_string(),
subtitle: None,
content: "Innhold".to_string(),
summary: Some("Sammendrag".to_string()),
og_image: None,
published_at: "2026-03-18T12:00:00Z".to_string(),
published_at_short: "18. mars 2026".to_string(),
}],
};
for theme in &["avis", "magasin", "blogg", "tidsskrift"] {
let html = render_index(&tera, theme, &config, &index, "/pub/test", false)
.unwrap_or_else(|e| panic!("Render feilet for {theme}: {e}"));
assert!(html.contains("Testforside"), "Tittel mangler i {theme}");
assert!(html.contains("Strøm-artikkel"), "Strøm-artikkel mangler i {theme}");
}
}
#[test]
fn unknown_theme_falls_back_to_blogg() {
let config = ThemeConfig::default();
let css = build_css_variables("nonexistent", &config);
let css_blogg = build_css_variables("blogg", &config);
assert_eq!(css, css_blogg);
}
#[test]
fn z_test_insufficient_data_returns_1() {
// For lite data: returnerer p=1.0 (ingen signifikans)
assert_eq!(z_test_proportions(10, 5, 10, 3), 1.0);
}
#[test]
fn z_test_significant_difference() {
// Variant A: 1000 imp, 100 klikk (10% CTR)
// Variant B: 1000 imp, 50 klikk (5% CTR)
let p = z_test_proportions(1000, 100, 1000, 50);
assert!(p < 0.05, "Forventet signifikant forskjell, fikk p={p}");
}
#[test]
fn z_test_no_significant_difference() {
// Variant A: 100 imp, 10 klikk (10% CTR)
// Variant B: 100 imp, 9 klikk (9% CTR)
let p = z_test_proportions(100, 10, 100, 9);
assert!(p > 0.05, "Forventet ingen signifikant forskjell, fikk p={p}");
}
#[test]
fn json_ld_contains_required_fields() {
let article = ArticleData {
id: "test".to_string(),
short_id: "test1234".to_string(),
title: "Test-artikkel".to_string(),
subtitle: None,
content: "Innhold".to_string(),
summary: Some("Oppsummering".to_string()),
og_image: None,
published_at: "2026-03-18T12:00:00Z".to_string(),
published_at_short: "18. mars 2026".to_string(),
};
let ld = build_json_ld(&article, "Testpub", "https://example.com/test");
assert!(ld.contains("\"@type\":\"Article\""));
assert!(ld.contains("\"headline\":\"Test-artikkel\""));
assert!(ld.contains("\"datePublished\""));
assert!(ld.contains("\"publisher\""));
}
}