synops/tools/synops-audio/src/main.rs
vegard 6496434bd3 synops-common: delt lib for alle CLI-verktøy (oppgave 21.16)
Ny crate `tools/synops-common` samler duplisert kode som var
spredt over 13 CLI-verktøy:

- db::connect() — PG-pool fra DATABASE_URL (erstatter 10+ identiske blokker)
- cas::path() — CAS-stioppslag med to-nivå hash-katalog
- cas::root() — CAS_ROOT env med default
- cas::hash_bytes() / hash_file() / store() — SHA-256 hashing og lagring
- cas::mime_to_extension() — MIME → filendelse
- logging::init() — tracing til stderr med env-filter
- types::{NodeRow, EdgeRow, NodeSummary} — delte FromRow-structs

Alle verktøy (unntatt synops-tasks som ikke bruker DB) er refaktorert
til å bruke synops-common. Alle kompilerer og tester passerer.
2026-03-18 10:51:40 +00:00

1084 lines
36 KiB
Rust

// synops-audio — FFmpeg lydprosessering via EDL (Edit Decision List).
//
// Input: CAS-hash til kildefil + EDL som JSON-streng.
// Output: JSON med ny CAS-hash til stdout.
// Med --write: oppretter prosessert medienode og derived_from-edge i PG.
//
// Miljøvariabler:
// DATABASE_URL — PostgreSQL-tilkobling (påkrevd med --write)
// CAS_ROOT — Rot for content-addressable store (default: /srv/synops/media/cas)
//
// Ref: docs/retninger/unix_filosofi.md, docs/features/lydstudio.md
use clap::Parser;
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use std::process;
use uuid::Uuid;
/// Prosesser lydfil fra CAS via FFmpeg med EDL-operasjoner.
#[derive(Parser)]
#[command(name = "synops-audio", about = "FFmpeg lydprosessering med EDL")]
struct Cli {
/// SHA-256 CAS-hash til kildefilen
#[arg(long)]
cas_hash: String,
/// EDL (Edit Decision List) som JSON-streng
#[arg(long)]
edl: String,
/// Utdataformat: mp3, wav, flac, ogg
#[arg(long, default_value = "mp3")]
output_format: String,
/// Medienode-ID (original node, påkrevd med --write)
#[arg(long)]
node_id: Option<Uuid>,
/// Bruker-ID som utløste prosesseringen (for ressurslogging)
#[arg(long)]
requested_by: Option<Uuid>,
/// Skriv resultater til database (uten dette flagget: kun stdout)
#[arg(long)]
write: bool,
}
// ─── EDL-datastrukturer ───────────────────────────────────────────
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EdlDocument {
pub source_hash: String,
pub operations: Vec<EdlOperation>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum EdlOperation {
Cut {
start_ms: i64,
end_ms: i64,
},
Normalize {
target_lufs: f64,
},
TrimSilence {
threshold_db: f32,
min_duration_ms: u32,
},
FadeIn {
duration_ms: u32,
},
FadeOut {
duration_ms: u32,
},
NoiseReduction {
strength_db: f32,
},
Equalizer {
low_gain: f32,
mid_gain: f32,
high_gain: f32,
},
Compressor {
threshold_db: f32,
ratio: f32,
},
}
// ─── Hjelpedatastrukturer ─────────────────────────────────────────
#[derive(Debug, Serialize, Deserialize)]
struct LoudnessInfo {
input_i: f64,
input_tp: f64,
input_lra: f64,
input_thresh: f64,
}
#[derive(Debug, Serialize, Deserialize)]
struct SilenceRegion {
start_ms: i64,
end_ms: i64,
}
#[derive(Debug, Serialize, Deserialize)]
struct AudioInfo {
duration_ms: i64,
}
#[derive(Serialize)]
struct AudioProcessResult {
cas_hash: String,
size_bytes: u64,
output_format: String,
source_hash: String,
operations_applied: usize,
#[serde(skip_serializing_if = "Option::is_none")]
processed_node_id: Option<String>,
}
// ─── Entrypoint ───────────────────────────────────────────────────
#[tokio::main]
async fn main() {
synops_common::logging::init("synops_audio");
let cli = Cli::parse();
if cli.write && cli.node_id.is_none() {
eprintln!("Feil: --node-id er påkrevd sammen med --write");
process::exit(1);
}
if let Err(e) = run(cli).await {
eprintln!("Feil: {e}");
process::exit(1);
}
}
async fn run(cli: Cli) -> Result<(), String> {
let cas_root = synops_common::cas::root();
// 1. Parse EDL
let edl: EdlDocument = serde_json::from_str(&cli.edl)
.map_err(|e| format!("Ugyldig EDL JSON: {e}"))?;
// Verifiser at source_hash matcher cas_hash
if edl.source_hash != cli.cas_hash {
return Err(format!(
"source_hash i EDL ({}) matcher ikke --cas-hash ({})",
edl.source_hash, cli.cas_hash
));
}
// 2. Valider alle parametere
validate_operations(&edl.operations)?;
// Valider output-format
match cli.output_format.as_str() {
"mp3" | "wav" | "flac" | "ogg" => {}
other => return Err(format!("Ugyldig output-format: '{other}'. Gyldige: mp3, wav, flac, ogg")),
}
// 3. Sjekk at kildefilen finnes i CAS
let source_path = synops_common::cas::path(&cas_root, &cli.cas_hash);
if !source_path.exists() {
return Err(format!("Kildefil finnes ikke i CAS: {}", cli.cas_hash));
}
if edl.operations.is_empty() {
return Err("Ingen operasjoner å utføre".to_string());
}
tracing::info!(
cas_hash = %cli.cas_hash,
operations = edl.operations.len(),
output_format = %cli.output_format,
"Starter lydprosessering"
);
// 4. Hent lydvarighet for fade-beregning
let info = get_audio_info(&source_path).await?;
validate_fade_durations(&edl.operations, info.duration_ms)?;
// 5. Resolve trim_silence til cuts via silence detection
let silence_cuts = resolve_silence_cuts(&cas_root, &edl).await?;
// 6. To-pass loudnorm hvis normalize er med
let has_normalize = edl.operations.iter().any(|op| matches!(op, EdlOperation::Normalize { .. }));
let loudness_measured = if has_normalize {
// Pass 1: mål loudness etter andre filtre (uten normalize)
let mut pass1_ops: Vec<EdlOperation> = edl.operations.clone();
pass1_ops.retain(|op| !matches!(op, EdlOperation::Normalize { .. }));
pass1_ops.extend(silence_cuts.iter().cloned());
let pass1_filter = build_filter_chain(&pass1_ops, info.duration_ms, None);
let measured = if pass1_filter.is_empty() {
analyze_loudness(&source_path).await?
} else {
analyze_with_filter(&source_path, &pass1_filter).await?
};
Some(measured)
} else {
None
};
// 7. Bygg endelig filtergraf
let mut all_ops = edl.operations.clone();
all_ops.retain(|op| !matches!(op, EdlOperation::TrimSilence { .. }));
all_ops.extend(silence_cuts);
let filter = build_filter_chain(&all_ops, info.duration_ms, loudness_measured.as_ref());
if filter.is_empty() {
return Err("Ingen filtre generert fra operasjoner".to_string());
}
// 8. Kjør FFmpeg
let codec_args = match cli.output_format.as_str() {
"mp3" => vec!["-codec:a", "libmp3lame", "-q:a", "2"],
"wav" => vec!["-codec:a", "pcm_s16le"],
"flac" => vec!["-codec:a", "flac"],
"ogg" => vec!["-codec:a", "libvorbis", "-q:a", "6"],
_ => vec!["-codec:a", "libmp3lame", "-q:a", "2"],
};
let ext = match cli.output_format.as_str() {
"wav" => "wav",
"flac" => "flac",
"ogg" => "ogg",
_ => "mp3",
};
let tmp_dir = PathBuf::from(&cas_root).join("tmp");
tokio::fs::create_dir_all(&tmp_dir)
.await
.map_err(|e| format!("Kunne ikke opprette tmp-katalog: {e}"))?;
let tmp_output = tmp_dir.join(format!("audio_process_{}.{ext}", Uuid::now_v7()));
let mut cmd = tokio::process::Command::new("ffmpeg");
cmd.args(["-i"])
.arg(&source_path)
.args(["-af", &filter])
.args(&codec_args)
.args(["-y"])
.arg(&tmp_output);
tracing::info!(filter = %filter, "Kjører ffmpeg");
let output = cmd
.output()
.await
.map_err(|e| format!("Kunne ikke kjøre ffmpeg: {e}"))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
let _ = tokio::fs::remove_file(&tmp_output).await;
return Err(format!("ffmpeg feilet: {stderr}"));
}
// 9. Les resultat, hash det, og lagre i CAS
let result_bytes = tokio::fs::read(&tmp_output)
.await
.map_err(|e| format!("Kunne ikke lese ffmpeg-output: {e}"))?;
let _ = tokio::fs::remove_file(&tmp_output).await;
let (result_hash, result_size) = store_in_cas(&cas_root, &result_bytes).await?;
tracing::info!(
source = %cli.cas_hash,
result = %result_hash,
size = result_size,
"Lydprosessering fullført"
);
// 10. Valgfritt: skriv til database
let mut processed_node_id = None;
if cli.write {
let node_id = cli.node_id.unwrap();
let requested_by = cli.requested_by
.ok_or("--requested-by er påkrevd sammen med --write")?;
let db = synops_common::db::connect().await?;
let pnode_id = write_to_db(
&db,
node_id,
requested_by,
&result_hash,
result_size,
&cli.output_format,
&edl,
)
.await?;
processed_node_id = Some(pnode_id.to_string());
tracing::info!(node_id = %node_id, processed = %pnode_id, "Database oppdatert");
}
// 11. Skriv JSON-resultat til stdout
let result = AudioProcessResult {
cas_hash: result_hash,
size_bytes: result_size,
output_format: cli.output_format,
source_hash: cli.cas_hash,
operations_applied: edl.operations.len(),
processed_node_id,
};
println!(
"{}",
serde_json::to_string_pretty(&result)
.map_err(|e| format!("JSON-serialisering feilet: {e}"))?
);
Ok(())
}
// ─── Parametervalidering ─────────────────────────────────────────
/// Valider at alle numeriske verdier i EDL-operasjoner er innenfor
/// sikre grenser før de interpoleres i FFmpeg-filterstrenger.
fn validate_operations(ops: &[EdlOperation]) -> Result<(), String> {
let mut errors: Vec<String> = Vec::new();
for (i, op) in ops.iter().enumerate() {
let idx = i + 1;
match op {
EdlOperation::Cut { start_ms, end_ms } => {
if *start_ms < 0 {
errors.push(format!("Operasjon {idx} (cut): start_ms ({start_ms}) kan ikke være negativ"));
}
if *end_ms < 0 {
errors.push(format!("Operasjon {idx} (cut): end_ms ({end_ms}) kan ikke være negativ"));
}
if *end_ms <= *start_ms {
errors.push(format!("Operasjon {idx} (cut): end_ms ({end_ms}) må være større enn start_ms ({start_ms})"));
}
}
EdlOperation::Normalize { target_lufs } => {
if target_lufs.is_nan() || target_lufs.is_infinite() {
errors.push(format!("Operasjon {idx} (normalize): target_lufs er ikke et gyldig tall"));
} else if !(-70.0..=0.0).contains(target_lufs) {
errors.push(format!("Operasjon {idx} (normalize): target_lufs ({target_lufs}) må være mellom -70.0 og 0.0"));
}
}
EdlOperation::TrimSilence { threshold_db, min_duration_ms } => {
if threshold_db.is_nan() || threshold_db.is_infinite() {
errors.push(format!("Operasjon {idx} (trim_silence): threshold_db er ikke et gyldig tall"));
} else if !(-96.0..=0.0).contains(threshold_db) {
errors.push(format!("Operasjon {idx} (trim_silence): threshold_db ({threshold_db}) må være mellom -96.0 og 0.0"));
}
if *min_duration_ms == 0 {
errors.push(format!("Operasjon {idx} (trim_silence): min_duration_ms må være større enn 0"));
}
if *min_duration_ms > 60_000 {
errors.push(format!("Operasjon {idx} (trim_silence): min_duration_ms ({min_duration_ms}) kan ikke overstige 60000 (60 sekunder)"));
}
}
EdlOperation::FadeIn { duration_ms } => {
if *duration_ms == 0 {
errors.push(format!("Operasjon {idx} (fade_in): duration_ms må være større enn 0"));
}
if *duration_ms > 300_000 {
errors.push(format!("Operasjon {idx} (fade_in): duration_ms ({duration_ms}) kan ikke overstige 300000 (5 minutter)"));
}
}
EdlOperation::FadeOut { duration_ms } => {
if *duration_ms == 0 {
errors.push(format!("Operasjon {idx} (fade_out): duration_ms må være større enn 0"));
}
if *duration_ms > 300_000 {
errors.push(format!("Operasjon {idx} (fade_out): duration_ms ({duration_ms}) kan ikke overstige 300000 (5 minutter)"));
}
}
EdlOperation::NoiseReduction { strength_db } => {
if strength_db.is_nan() || strength_db.is_infinite() {
errors.push(format!("Operasjon {idx} (noise_reduction): strength_db er ikke et gyldig tall"));
} else if !(-80.0..=0.0).contains(strength_db) {
errors.push(format!("Operasjon {idx} (noise_reduction): strength_db ({strength_db}) må være mellom -80.0 og 0.0"));
}
}
EdlOperation::Equalizer { low_gain, mid_gain, high_gain } => {
for (name, val) in [("low_gain", low_gain), ("mid_gain", mid_gain), ("high_gain", high_gain)] {
if val.is_nan() || val.is_infinite() {
errors.push(format!("Operasjon {idx} (equalizer): {name} er ikke et gyldig tall"));
} else if !(-30.0..=30.0).contains(val) {
errors.push(format!("Operasjon {idx} (equalizer): {name} ({val}) må være mellom -30.0 og 30.0"));
}
}
}
EdlOperation::Compressor { threshold_db, ratio } => {
if threshold_db.is_nan() || threshold_db.is_infinite() {
errors.push(format!("Operasjon {idx} (compressor): threshold_db er ikke et gyldig tall"));
} else if !(-60.0..=0.0).contains(threshold_db) {
errors.push(format!("Operasjon {idx} (compressor): threshold_db ({threshold_db}) må være mellom -60.0 og 0.0"));
}
if ratio.is_nan() || ratio.is_infinite() {
errors.push(format!("Operasjon {idx} (compressor): ratio er ikke et gyldig tall"));
} else if !(1.0..=20.0).contains(ratio) {
errors.push(format!("Operasjon {idx} (compressor): ratio ({ratio}) må være mellom 1.0 og 20.0"));
}
}
}
}
if errors.is_empty() {
Ok(())
} else {
Err(format!("Ugyldig EDL:\n- {}", errors.join("\n- ")))
}
}
/// Valider fade-varigheter mot faktisk lydvarighet.
fn validate_fade_durations(ops: &[EdlOperation], duration_ms: i64) -> Result<(), String> {
let mut errors: Vec<String> = Vec::new();
for (i, op) in ops.iter().enumerate() {
let idx = i + 1;
match op {
EdlOperation::FadeIn { duration_ms: fade_ms } => {
if *fade_ms as i64 > duration_ms {
errors.push(format!(
"Operasjon {idx} (fade_in): varighet ({fade_ms} ms) \
overstiger lydens varighet ({duration_ms} ms)"
));
}
}
EdlOperation::FadeOut { duration_ms: fade_ms } => {
if *fade_ms as i64 > duration_ms {
errors.push(format!(
"Operasjon {idx} (fade_out): varighet ({fade_ms} ms) \
overstiger lydens varighet ({duration_ms} ms)"
));
}
}
_ => {}
}
}
if errors.is_empty() {
Ok(())
} else {
Err(format!("Ugyldig fade-varighet:\n- {}", errors.join("\n- ")))
}
}
// ─── FFmpeg-kommandoer ────────────────────────────────────────────
/// Hent lydvarighet via ffprobe.
async fn get_audio_info(path: &PathBuf) -> Result<AudioInfo, String> {
let output = tokio::process::Command::new("ffprobe")
.args([
"-v", "quiet",
"-print_format", "json",
"-show_format",
])
.arg(path)
.output()
.await
.map_err(|e| format!("Kunne ikke kjøre ffprobe: {e}"))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(format!("ffprobe feilet: {stderr}"));
}
let json: serde_json::Value = serde_json::from_slice(&output.stdout)
.map_err(|e| format!("Kunne ikke parse ffprobe-output: {e}"))?;
let duration_secs: f64 = json["format"]["duration"]
.as_str()
.and_then(|s| s.parse().ok())
.unwrap_or(0.0);
Ok(AudioInfo {
duration_ms: (duration_secs * 1000.0) as i64,
})
}
/// Analyser loudness (EBU R128) via ffmpeg loudnorm.
async fn analyze_loudness(path: &PathBuf) -> Result<LoudnessInfo, String> {
let output = tokio::process::Command::new("ffmpeg")
.args(["-i"])
.arg(path)
.args(["-af", "loudnorm=print_format=json", "-f", "null", "-"])
.output()
.await
.map_err(|e| format!("Kunne ikke kjøre ffmpeg loudnorm: {e}"))?;
let stderr = String::from_utf8_lossy(&output.stderr);
parse_loudnorm_json(&stderr)
}
/// Kjør loudnorm-analyse med et forhåndsfilter (for to-pass normalisering).
async fn analyze_with_filter(path: &PathBuf, pre_filter: &str) -> Result<LoudnessInfo, String> {
let filter = format!("{pre_filter},loudnorm=print_format=json");
let output = tokio::process::Command::new("ffmpeg")
.args(["-i"])
.arg(path)
.args(["-af", &filter, "-f", "null", "-"])
.output()
.await
.map_err(|e| format!("Kunne ikke kjøre ffmpeg loudnorm pass 1: {e}"))?;
let stderr = String::from_utf8_lossy(&output.stderr);
parse_loudnorm_json(&stderr)
}
fn parse_loudnorm_json(stderr: &str) -> Result<LoudnessInfo, String> {
let json_start = stderr
.find("{\n")
.ok_or("Fant ikke loudnorm JSON i ffmpeg-output")?;
let json_end = stderr[json_start..]
.find("\n}")
.map(|i| json_start + i + 2)
.ok_or("Ufullstendig loudnorm JSON")?;
let json: serde_json::Value = serde_json::from_str(&stderr[json_start..json_end])
.map_err(|e| format!("Kunne ikke parse loudnorm JSON: {e}"))?;
let get = |field: &str| -> Result<f64, String> {
json[field]
.as_str()
.and_then(|s| s.parse::<f64>().ok())
.ok_or_else(|| format!("Mangler felt '{field}' i loudnorm-output"))
};
Ok(LoudnessInfo {
input_i: get("input_i")?,
input_tp: get("input_tp")?,
input_lra: get("input_lra")?,
input_thresh: get("input_thresh")?,
})
}
/// Detekter stille regioner i en lydfil.
async fn detect_silence(
path: &PathBuf,
threshold_db: f32,
min_duration_ms: u32,
) -> Result<Vec<SilenceRegion>, String> {
let min_duration_secs = min_duration_ms as f64 / 1000.0;
let filter = format!("silencedetect=noise={threshold_db}dB:d={min_duration_secs}");
let output = tokio::process::Command::new("ffmpeg")
.args(["-i"])
.arg(path)
.args(["-af", &filter, "-f", "null", "-"])
.output()
.await
.map_err(|e| format!("Kunne ikke kjøre ffmpeg silencedetect: {e}"))?;
let stderr = String::from_utf8_lossy(&output.stderr);
let mut regions = Vec::new();
let mut current_start: Option<f64> = None;
for line in stderr.lines() {
if let Some(pos) = line.find("silence_start: ") {
let val_str = &line[pos + 15..];
if let Some(secs) = val_str.split_whitespace().next().and_then(|s| s.parse::<f64>().ok()) {
current_start = Some(secs);
}
}
if let Some(pos) = line.find("silence_end: ") {
let val_str = &line[pos + 13..];
if let Some(end_secs) = val_str.split_whitespace().next().and_then(|s| s.parse::<f64>().ok()) {
if let Some(start_secs) = current_start.take() {
regions.push(SilenceRegion {
start_ms: (start_secs * 1000.0) as i64,
end_ms: (end_secs * 1000.0) as i64,
});
}
}
}
}
Ok(regions)
}
/// Konverter TrimSilence-operasjoner til faktiske Cut-operasjoner.
async fn resolve_silence_cuts(
cas_root: &str,
edl: &EdlDocument,
) -> Result<Vec<EdlOperation>, String> {
let mut cuts = Vec::new();
for op in &edl.operations {
if let EdlOperation::TrimSilence {
threshold_db,
min_duration_ms,
} = op
{
let source_path = synops_common::cas::path(cas_root, &edl.source_hash);
let regions = detect_silence(&source_path, *threshold_db, *min_duration_ms).await?;
for region in regions {
// Behold 200ms stillhet på hver side for naturlig lyd,
// men aldri mer enn halve regionens varighet
let region_duration = region.end_ms - region.start_ms;
let margin_ms = 200i64.min(region_duration / 2);
let start = region.start_ms + margin_ms;
let end = region.end_ms - margin_ms;
if end > start {
cuts.push(EdlOperation::Cut {
start_ms: start,
end_ms: end,
});
}
}
}
}
Ok(cuts)
}
// ─── EDL → FFmpeg filtergraf ──────────────────────────────────────
/// Bygg ffmpeg-filtergraf fra EDL-operasjoner.
///
/// Operasjonsrekkefølge:
/// 1. Cuts (aselect) — fjerner regioner
/// 2. Noise reduction (afftdn)
/// 3. EQ (equalizer)
/// 4. Compressor (acompressor)
/// 5. Normalize (loudnorm) — alltid nest sist
/// 6. Fades (afade) — aller sist
fn build_filter_chain(
ops: &[EdlOperation],
duration_ms: i64,
loudness_measured: Option<&LoudnessInfo>,
) -> String {
let mut filters: Vec<String> = Vec::new();
// Samle alle cuts
let mut cuts: Vec<(i64, i64)> = Vec::new();
for op in ops {
if let EdlOperation::Cut { start_ms, end_ms } = op {
cuts.push((*start_ms, *end_ms));
}
}
// Sorter cuts og bygg aselect-filter
if !cuts.is_empty() {
cuts.sort_by_key(|c| c.0);
let conditions: Vec<String> = cuts
.iter()
.map(|(s, e)| {
format!(
"between(t,{:.3},{:.3})",
*s as f64 / 1000.0,
*e as f64 / 1000.0
)
})
.collect();
filters.push(format!(
"aselect='not({})',asetpts=N/SR/TB",
conditions.join("+")
));
}
// Noise reduction
for op in ops {
if let EdlOperation::NoiseReduction { strength_db } = op {
filters.push(format!("afftdn=nf={strength_db}"));
}
}
// EQ — tre-bånds parametrisk
for op in ops {
if let EdlOperation::Equalizer { low_gain, mid_gain, high_gain } = op {
let mut eq_parts = Vec::new();
if *low_gain != 0.0 {
eq_parts.push(format!("equalizer=f=100:t=h:w=200:g={low_gain}"));
}
if *mid_gain != 0.0 {
eq_parts.push(format!("equalizer=f=1000:t=h:w=1000:g={mid_gain}"));
}
if *high_gain != 0.0 {
eq_parts.push(format!("equalizer=f=8000:t=h:w=4000:g={high_gain}"));
}
filters.extend(eq_parts);
}
}
// Compressor
for op in ops {
if let EdlOperation::Compressor { threshold_db, ratio } = op {
filters.push(format!(
"acompressor=threshold={threshold_db}dB:ratio={ratio}:attack=5:release=50"
));
}
}
// Normalize (loudnorm) — to-pass hvis vi har målte verdier
for op in ops {
if let EdlOperation::Normalize { target_lufs } = op {
if let Some(measured) = loudness_measured {
filters.push(format!(
"loudnorm=I={target_lufs}:TP=-1.5:LRA=11:\
measured_I={:.1}:measured_TP={:.1}:measured_LRA={:.1}:\
measured_thresh={:.1}:linear=true",
measured.input_i,
measured.input_tp,
measured.input_lra,
measured.input_thresh,
));
} else {
filters.push(format!("loudnorm=I={target_lufs}:TP=-1.5:LRA=11"));
}
}
}
// Beregn varighet etter cuts for fade-out posisjonering
let total_cut_ms: i64 = cuts.iter().map(|(s, e)| e - s).sum();
let effective_duration_ms = duration_ms - total_cut_ms;
// Fades — helt sist
for op in ops {
match op {
EdlOperation::FadeIn { duration_ms } => {
let d = *duration_ms as f64 / 1000.0;
filters.push(format!("afade=t=in:d={d:.3}"));
}
EdlOperation::FadeOut { duration_ms: dur } => {
let d = *dur as f64 / 1000.0;
let start = ((effective_duration_ms as f64 / 1000.0) - d).max(0.0);
filters.push(format!("afade=t=out:st={start:.3}:d={d:.3}"));
}
_ => {}
}
}
filters.join(",")
}
// ─── CAS-operasjoner ─────────────────────────────────────────────
/// Beregn SHA-256, lagre i CAS med atomisk rename.
async fn store_in_cas(cas_root: &str, data: &[u8]) -> Result<(String, u64), String> {
let hash = synops_common::cas::hash_bytes(data);
let dest = synops_common::cas::path(cas_root, &hash);
// Allerede lagret?
if dest.exists() {
return Ok((hash, data.len() as u64));
}
// Opprett mappen
if let Some(parent) = dest.parent() {
tokio::fs::create_dir_all(parent)
.await
.map_err(|e| format!("Kunne ikke opprette CAS-katalog: {e}"))?;
}
// Skriv til temp-fil, deretter atomisk rename
let tmp_path = PathBuf::from(cas_root)
.join("tmp")
.join(format!("{}.tmp", hash));
tokio::fs::write(&tmp_path, data)
.await
.map_err(|e| format!("Kunne ikke skrive CAS temp-fil: {e}"))?;
tokio::fs::rename(&tmp_path, &dest)
.await
.map_err(|e| format!("Kunne ikke flytte til CAS: {e}"))?;
Ok((hash, data.len() as u64))
}
// ─── Database-operasjoner (kun med --write) ───────────────────────
async fn write_to_db(
db: &sqlx::PgPool,
media_node_id: Uuid,
requested_by: Uuid,
result_hash: &str,
result_size: u64,
output_format: &str,
edl: &EdlDocument,
) -> Result<Uuid, String> {
let mime = match output_format {
"mp3" => "audio/mpeg",
"wav" => "audio/wav",
"flac" => "audio/flac",
"ogg" => "audio/ogg",
_ => "audio/mpeg",
};
let processed_node_id = Uuid::now_v7();
let metadata = serde_json::json!({
"cas_hash": result_hash,
"mime": mime,
"size_bytes": result_size,
"source_hash": edl.source_hash,
"edl": edl,
});
// Hent tittel fra original node
let original_title: Option<String> = sqlx::query_scalar(
"SELECT title FROM nodes WHERE id = $1"
)
.bind(media_node_id)
.fetch_optional(db)
.await
.map_err(|e| format!("DB-feil: {e}"))?
.flatten();
let title = original_title
.map(|t| format!("{t} (prosessert)"))
.unwrap_or_else(|| "Prosessert lyd".to_string());
// Insert processed media node
sqlx::query(
"INSERT INTO nodes (id, node_kind, title, visibility, metadata, created_by)
VALUES ($1, 'media', $2, 'hidden', $3, $4)",
)
.bind(processed_node_id)
.bind(&title)
.bind(&metadata)
.bind(requested_by)
.execute(db)
.await
.map_err(|e| format!("Kunne ikke opprette prosessert node: {e}"))?;
// Opprett derived_from edge: processed → original
let edge_id = Uuid::now_v7();
sqlx::query(
"INSERT INTO edges (id, source_id, target_id, edge_type, system, created_by)
VALUES ($1, $2, $3, 'derived_from', true, $4)",
)
.bind(edge_id)
.bind(processed_node_id)
.bind(media_node_id)
.bind(requested_by)
.execute(db)
.await
.map_err(|e| format!("Kunne ikke opprette derived_from edge: {e}"))?;
// Logg ressursforbruk
let collection_id: Option<Uuid> = sqlx::query_scalar(
"SELECT e.target_id FROM edges e
JOIN nodes n ON n.id = e.target_id
WHERE e.source_id = $1 AND e.edge_type = 'belongs_to' AND n.node_kind = 'collection'
LIMIT 1",
)
.bind(media_node_id)
.fetch_optional(db)
.await
.ok()
.flatten();
let detail = serde_json::json!({
"output_format": output_format,
"operations": edl.operations.len(),
"result_size_bytes": result_size,
});
if let Err(e) = sqlx::query(
"INSERT INTO resource_usage_log (target_node_id, triggered_by, collection_id, resource_type, detail)
VALUES ($1, $2, $3, $4, $5)",
)
.bind(media_node_id)
.bind(requested_by)
.bind(collection_id)
.bind("ffmpeg_audio")
.bind(&detail)
.execute(db)
.await
{
tracing::warn!(error = %e, "Kunne ikke logge ressursforbruk");
}
Ok(processed_node_id)
}
// ─── Tester ──────────────────────────────────────────────────────
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn valid_operations_pass() {
let ops = vec![
EdlOperation::Cut { start_ms: 0, end_ms: 1000 },
EdlOperation::Normalize { target_lufs: -16.0 },
EdlOperation::TrimSilence { threshold_db: -30.0, min_duration_ms: 500 },
EdlOperation::FadeIn { duration_ms: 1000 },
EdlOperation::FadeOut { duration_ms: 2000 },
EdlOperation::NoiseReduction { strength_db: -25.0 },
EdlOperation::Equalizer { low_gain: 2.0, mid_gain: 0.0, high_gain: -1.0 },
EdlOperation::Compressor { threshold_db: -20.0, ratio: 4.0 },
];
assert!(validate_operations(&ops).is_ok());
}
#[test]
fn cut_end_before_start_rejected() {
let ops = vec![EdlOperation::Cut { start_ms: 5000, end_ms: 3000 }];
let err = validate_operations(&ops).unwrap_err();
assert!(err.contains("end_ms"));
}
#[test]
fn cut_negative_start_rejected() {
let ops = vec![EdlOperation::Cut { start_ms: -100, end_ms: 1000 }];
let err = validate_operations(&ops).unwrap_err();
assert!(err.contains("negativ"));
}
#[test]
fn normalize_out_of_range_rejected() {
let ops = vec![EdlOperation::Normalize { target_lufs: 5.0 }];
assert!(validate_operations(&ops).is_err());
let ops = vec![EdlOperation::Normalize { target_lufs: -80.0 }];
assert!(validate_operations(&ops).is_err());
}
#[test]
fn compressor_ratio_out_of_range_rejected() {
let ops = vec![EdlOperation::Compressor { threshold_db: -20.0, ratio: 0.5 }];
let err = validate_operations(&ops).unwrap_err();
assert!(err.contains("ratio"));
let ops = vec![EdlOperation::Compressor { threshold_db: -20.0, ratio: 25.0 }];
assert!(validate_operations(&ops).is_err());
}
#[test]
fn eq_gain_out_of_range_rejected() {
let ops = vec![EdlOperation::Equalizer { low_gain: 50.0, mid_gain: 0.0, high_gain: 0.0 }];
let err = validate_operations(&ops).unwrap_err();
assert!(err.contains("low_gain"));
}
#[test]
fn noise_reduction_out_of_range_rejected() {
let ops = vec![EdlOperation::NoiseReduction { strength_db: 10.0 }];
assert!(validate_operations(&ops).is_err());
}
#[test]
fn fade_zero_duration_rejected() {
let ops = vec![EdlOperation::FadeIn { duration_ms: 0 }];
assert!(validate_operations(&ops).is_err());
}
#[test]
fn trim_silence_zero_duration_rejected() {
let ops = vec![EdlOperation::TrimSilence { threshold_db: -30.0, min_duration_ms: 0 }];
assert!(validate_operations(&ops).is_err());
}
#[test]
fn nan_values_rejected() {
let ops = vec![EdlOperation::Normalize { target_lufs: f64::NAN }];
assert!(validate_operations(&ops).is_err());
let ops = vec![EdlOperation::Compressor { threshold_db: f32::NAN, ratio: 4.0 }];
assert!(validate_operations(&ops).is_err());
}
#[test]
fn infinity_values_rejected() {
let ops = vec![EdlOperation::Normalize { target_lufs: f64::INFINITY }];
assert!(validate_operations(&ops).is_err());
let ops = vec![EdlOperation::Equalizer { low_gain: f32::INFINITY, mid_gain: 0.0, high_gain: 0.0 }];
assert!(validate_operations(&ops).is_err());
}
#[test]
fn multiple_errors_collected() {
let ops = vec![
EdlOperation::Normalize { target_lufs: 100.0 },
EdlOperation::Compressor { threshold_db: 50.0, ratio: 0.0 },
];
let err = validate_operations(&ops).unwrap_err();
assert!(err.contains("normalize"));
assert!(err.contains("compressor"));
}
#[test]
fn empty_operations_valid() {
assert!(validate_operations(&[]).is_ok());
}
#[test]
fn filter_chain_cuts() {
let ops = vec![
EdlOperation::Cut { start_ms: 5000, end_ms: 8000 },
EdlOperation::Cut { start_ms: 15000, end_ms: 17000 },
];
let chain = build_filter_chain(&ops, 60000, None);
assert!(chain.contains("aselect"));
assert!(chain.contains("between"));
assert!(chain.contains("asetpts"));
}
#[test]
fn filter_chain_order() {
let ops = vec![
EdlOperation::FadeIn { duration_ms: 1000 },
EdlOperation::NoiseReduction { strength_db: -25.0 },
EdlOperation::Normalize { target_lufs: -16.0 },
];
let chain = build_filter_chain(&ops, 60000, None);
let nr_pos = chain.find("afftdn").unwrap();
let norm_pos = chain.find("loudnorm").unwrap();
let fade_pos = chain.find("afade").unwrap();
// noise reduction < normalize < fade
assert!(nr_pos < norm_pos);
assert!(norm_pos < fade_pos);
}
#[test]
fn filter_chain_two_pass_loudnorm() {
let ops = vec![EdlOperation::Normalize { target_lufs: -16.0 }];
let measured = LoudnessInfo {
input_i: -24.0,
input_tp: -1.0,
input_lra: 7.0,
input_thresh: -34.0,
};
let chain = build_filter_chain(&ops, 60000, Some(&measured));
assert!(chain.contains("measured_I=-24.0"));
assert!(chain.contains("linear=true"));
}
#[test]
fn filter_chain_fade_out_positioning() {
let ops = vec![EdlOperation::FadeOut { duration_ms: 2000 }];
let chain = build_filter_chain(&ops, 10000, None);
// 10s - 2s = 8s start
assert!(chain.contains("st=8.000"));
}
#[test]
fn filter_chain_fade_out_after_cuts() {
let ops = vec![
EdlOperation::Cut { start_ms: 2000, end_ms: 5000 },
EdlOperation::FadeOut { duration_ms: 1000 },
];
// 10s - 3s cut = 7s effective. 7s - 1s fade = 6s start
let chain = build_filter_chain(&ops, 10000, None);
assert!(chain.contains("st=6.000"));
}
#[test]
fn cas_path_format() {
let p = synops_common::cas::path("/srv/synops/media/cas", "b94d27b9934d3e08");
assert_eq!(
p,
PathBuf::from("/srv/synops/media/cas/b9/4d/b94d27b9934d3e08")
);
}
#[test]
fn edl_json_roundtrip() {
let edl = EdlDocument {
source_hash: "abc123".to_string(),
operations: vec![
EdlOperation::Cut { start_ms: 0, end_ms: 1000 },
EdlOperation::Normalize { target_lufs: -16.0 },
],
};
let json = serde_json::to_string(&edl).unwrap();
let parsed: EdlDocument = serde_json::from_str(&json).unwrap();
assert_eq!(parsed.source_hash, "abc123");
assert_eq!(parsed.operations.len(), 2);
}
#[test]
fn validate_fade_duration_exceeds_audio() {
let ops = vec![EdlOperation::FadeIn { duration_ms: 5000 }];
assert!(validate_fade_durations(&ops, 3000).is_err());
assert!(validate_fade_durations(&ops, 5000).is_ok());
assert!(validate_fade_durations(&ops, 10000).is_ok());
}
}