synops/maskinrommet/src/podcast_stats.rs
vegard 3e57adce46 Podcast-statistikk dashboard i admin-panelet (oppgave 30.4)
Nytt dashboard under /admin/podcast-stats som viser:
- Nøkkeltall: totale nedlastinger, unike lyttere, antall episoder
- Daglig trend med horisontale bar charts
- Topp-episoder rangert etter nedlastinger
- Klientfordeling (Apple Podcasts, Spotify, etc.) med stacked bar

Backend: GET /admin/podcast/stats spør podcast_download_stats-tabellen
(fylt av synops-stats CLI fra oppgave 30.3) og aggregerer per episode,
per dag, og per klient via jsonb_each_text.

Filtrering på tidsperiode (7/30/90/365 dager) og enkelt-episode.
2026-03-18 23:42:23 +00:00

206 lines
6.1 KiB
Rust

// Podcast-statistikk dashboard API (oppgave 30.4)
//
// Leser fra podcast_download_stats-tabellen (skrevet av synops-stats CLI)
// og returnerer aggregert data for admin-dashboardet:
// - Nedlastinger per episode (totalt + trend)
// - Topp-episoder
// - Klientfordeling (Apple Podcasts, Spotify, etc.)
// - Daglig tidsserie
//
// Ref: docs/features/podcast_statistikk.md
use axum::extract::State;
use axum::http::StatusCode;
use axum::Json;
use chrono::NaiveDate;
use serde::{Deserialize, Serialize};
use sqlx::PgPool;
use uuid::Uuid;
use crate::auth::AdminUser;
use crate::AppState;
// =============================================================================
// Datatyper
// =============================================================================
#[derive(Serialize, sqlx::FromRow)]
pub struct EpisodeTotal {
pub episode_id: Option<Uuid>,
pub episode_title: Option<String>,
pub total_downloads: i64,
pub total_unique_listeners: i64,
pub first_date: Option<NaiveDate>,
pub last_date: Option<NaiveDate>,
pub days_with_data: i64,
}
#[derive(Serialize, sqlx::FromRow)]
pub struct DailyDownloads {
pub date: NaiveDate,
pub downloads: i64,
pub unique_listeners: i64,
}
#[derive(Serialize)]
pub struct ClientBreakdown {
pub client: String,
pub count: i64,
}
#[derive(Serialize)]
pub struct PodcastStatsResponse {
/// Totalt over hele perioden
pub total_downloads: i64,
pub total_unique_listeners: i64,
/// Per episode, sortert etter nedlastinger (topp først)
pub episodes: Vec<EpisodeTotal>,
/// Daglig tidsserie (alle episoder aggregert)
pub daily: Vec<DailyDownloads>,
/// Klientfordeling (aggregert over perioden)
pub clients: Vec<ClientBreakdown>,
}
#[derive(Deserialize)]
pub struct PodcastStatsParams {
pub days: Option<i32>,
pub episode_id: Option<Uuid>,
}
#[derive(Serialize)]
pub struct ErrorResponse {
pub error: String,
}
fn internal_error(msg: &str) -> (StatusCode, Json<ErrorResponse>) {
(
StatusCode::INTERNAL_SERVER_ERROR,
Json(ErrorResponse {
error: msg.to_string(),
}),
)
}
// =============================================================================
// GET /admin/podcast/stats
// =============================================================================
pub async fn podcast_stats(
State(state): State<AppState>,
_admin: AdminUser,
axum::extract::Query(params): axum::extract::Query<PodcastStatsParams>,
) -> Result<Json<PodcastStatsResponse>, (StatusCode, Json<ErrorResponse>)> {
let days = params.days.unwrap_or(30).clamp(1, 365);
let episodes = fetch_episode_totals(&state.db, days, params.episode_id)
.await
.map_err(|e| internal_error(&format!("Feil i episodeoversikt: {e}")))?;
let daily = fetch_daily(&state.db, days, params.episode_id)
.await
.map_err(|e| internal_error(&format!("Feil i daglig oversikt: {e}")))?;
let clients = fetch_clients(&state.db, days, params.episode_id)
.await
.map_err(|e| internal_error(&format!("Feil i klientoversikt: {e}")))?;
let total_downloads = episodes.iter().map(|e| e.total_downloads).sum();
let total_unique_listeners = episodes.iter().map(|e| e.total_unique_listeners).sum();
Ok(Json(PodcastStatsResponse {
total_downloads,
total_unique_listeners,
episodes,
daily,
clients,
}))
}
// =============================================================================
// Spørringer
// =============================================================================
async fn fetch_episode_totals(
db: &PgPool,
days: i32,
episode_filter: Option<Uuid>,
) -> Result<Vec<EpisodeTotal>, sqlx::Error> {
sqlx::query_as::<_, EpisodeTotal>(
r#"
SELECT
s.episode_id,
n.title AS episode_title,
COALESCE(SUM(s.downloads), 0)::BIGINT AS total_downloads,
COALESCE(SUM(s.unique_listeners), 0)::BIGINT AS total_unique_listeners,
MIN(s.date) AS first_date,
MAX(s.date) AS last_date,
COUNT(DISTINCT s.date)::BIGINT AS days_with_data
FROM podcast_download_stats s
LEFT JOIN nodes n ON n.id = s.episode_id
WHERE s.date >= (CURRENT_DATE - make_interval(days := $1))
AND ($2::UUID IS NULL OR s.episode_id = $2)
GROUP BY s.episode_id, n.title
ORDER BY total_downloads DESC
"#,
)
.bind(days)
.bind(episode_filter)
.fetch_all(db)
.await
}
async fn fetch_daily(
db: &PgPool,
days: i32,
episode_filter: Option<Uuid>,
) -> Result<Vec<DailyDownloads>, sqlx::Error> {
sqlx::query_as::<_, DailyDownloads>(
r#"
SELECT
s.date,
COALESCE(SUM(s.downloads), 0)::BIGINT AS downloads,
COALESCE(SUM(s.unique_listeners), 0)::BIGINT AS unique_listeners
FROM podcast_download_stats s
WHERE s.date >= (CURRENT_DATE - make_interval(days := $1))
AND ($2::UUID IS NULL OR s.episode_id = $2)
GROUP BY s.date
ORDER BY s.date ASC
"#,
)
.bind(days)
.bind(episode_filter)
.fetch_all(db)
.await
}
/// Aggreger klientfordeling fra JSONB clients-feltet.
async fn fetch_clients(
db: &PgPool,
days: i32,
episode_filter: Option<Uuid>,
) -> Result<Vec<ClientBreakdown>, sqlx::Error> {
// clients er JSONB med { "Apple Podcasts": 18, "Spotify": 15, ... }
// Vi bruker jsonb_each_text for å ekspandere og aggregere
let rows: Vec<(String, i64)> = sqlx::query_as(
r#"
SELECT
kv.key AS client,
COALESCE(SUM(kv.value::BIGINT), 0)::BIGINT AS count
FROM podcast_download_stats s,
jsonb_each_text(s.clients) AS kv(key, value)
WHERE s.date >= (CURRENT_DATE - make_interval(days := $1))
AND ($2::UUID IS NULL OR s.episode_id = $2)
GROUP BY kv.key
ORDER BY count DESC
"#,
)
.bind(days)
.bind(episode_filter)
.fetch_all(db)
.await?;
Ok(rows
.into_iter()
.map(|(client, count)| ClientBreakdown { client, count })
.collect())
}