Utvider synops-calendar CLI med --url for å hente ICS fra eksterne URLer (Google Calendar, Outlook, etc). Ny calendar_poller i maskinrommet poller samlingers calendar_subscriptions[] med konfigurerbart intervall, etter samme mønster som feed_poller for RSS-feeds. Endringer: - synops-calendar: ny --url parameter + reqwest for HTTP-henting - calendar_poller.rs: bakgrunnsloop som finner forfalne abonnementer - calendar_poll jobbtype i dispatcher med CLI-dispatch til synops-calendar - API: configure_calendar_subscription + remove_calendar_subscription - Migrasjon 031: indeks + prioritetsregel for calendar_poll-jobber
456 lines
14 KiB
Rust
456 lines
14 KiB
Rust
// synops-calendar — Importer ICS-filer til kalendernoder i Synops.
|
|
//
|
|
// Parser en ICS-fil (RFC 5545) og oppretter content-noder med
|
|
// scheduled-edges for hver VEVENT. Duplikatdeteksjon via ICS UID:
|
|
// ved re-import oppdateres eksisterende noder.
|
|
//
|
|
// Bruk:
|
|
// synops-calendar --file kalender.ics --collection-id <uuid>
|
|
// synops-calendar --url https://calendar.google.com/...ical --collection-id <uuid>
|
|
// synops-calendar --payload-json '{"url":"...","collection_id":"..."}'
|
|
//
|
|
// Output: JSON til stdout med antall opprettet/oppdatert/feilet.
|
|
// Feil: stderr + exit code != 0.
|
|
//
|
|
// Ref: docs/retninger/unix_filosofi.md, docs/primitiver/edges.md
|
|
|
|
use chrono::{DateTime, NaiveDate, NaiveDateTime, Utc};
|
|
use clap::Parser;
|
|
use ical::parser::ical::component::IcalEvent;
|
|
use serde::{Deserialize, Serialize};
|
|
use sqlx::PgPool;
|
|
use std::io::BufReader;
|
|
use uuid::Uuid;
|
|
|
|
/// Importer ICS-fil til kalendernoder.
|
|
#[derive(Parser)]
|
|
#[command(name = "synops-calendar", about = "Importer ICS-fil eller CalDAV-URL til kalendernoder i Synops")]
|
|
struct Cli {
|
|
/// Sti til ICS-fil
|
|
#[arg(long)]
|
|
file: Option<String>,
|
|
|
|
/// URL til ICS/CalDAV-kalender (hentes via HTTP GET)
|
|
#[arg(long)]
|
|
url: Option<String>,
|
|
|
|
/// Samlings-ID (node som hendelsene tilhører)
|
|
#[arg(long)]
|
|
collection_id: Option<Uuid>,
|
|
|
|
/// Payload fra jobbkø (JSON). Overstyrer andre argumenter.
|
|
#[arg(long)]
|
|
payload_json: Option<String>,
|
|
}
|
|
|
|
#[derive(Deserialize)]
|
|
struct JobPayload {
|
|
#[serde(default)]
|
|
file: Option<String>,
|
|
#[serde(default)]
|
|
url: Option<String>,
|
|
collection_id: String,
|
|
}
|
|
|
|
/// Parsed calendar event fra ICS.
|
|
struct CalendarEvent {
|
|
uid: String,
|
|
summary: Option<String>,
|
|
description: Option<String>,
|
|
dtstart: Option<String>,
|
|
dtend: Option<String>,
|
|
location: Option<String>,
|
|
}
|
|
|
|
#[derive(Serialize)]
|
|
struct ImportResult {
|
|
ok: bool,
|
|
created: usize,
|
|
updated: usize,
|
|
errors: Vec<String>,
|
|
}
|
|
|
|
/// Kilde for ICS-data.
|
|
enum IcsSource {
|
|
File(String),
|
|
Url(String),
|
|
}
|
|
|
|
#[tokio::main]
|
|
async fn main() {
|
|
synops_common::logging::init("synops_calendar");
|
|
|
|
let cli = Cli::parse();
|
|
|
|
let (source, collection_id) = if let Some(ref json_str) = cli.payload_json {
|
|
let payload: JobPayload = serde_json::from_str(json_str).unwrap_or_else(|e| {
|
|
eprintln!("Ugyldig --payload-json: {e}");
|
|
std::process::exit(1);
|
|
});
|
|
let cid = payload.collection_id.parse::<Uuid>().unwrap_or_else(|e| {
|
|
eprintln!("Ugyldig collection_id i payload: {e}");
|
|
std::process::exit(1);
|
|
});
|
|
let source = if let Some(url) = payload.url {
|
|
IcsSource::Url(url)
|
|
} else if let Some(file) = payload.file {
|
|
IcsSource::File(file)
|
|
} else {
|
|
eprintln!("Payload må inneholde 'url' eller 'file'");
|
|
std::process::exit(1);
|
|
};
|
|
(source, cid)
|
|
} else if let Some(url) = cli.url {
|
|
let cid = cli.collection_id.unwrap_or_else(|| {
|
|
eprintln!("Mangler --collection-id");
|
|
std::process::exit(1);
|
|
});
|
|
(IcsSource::Url(url), cid)
|
|
} else {
|
|
let file = cli.file.unwrap_or_else(|| {
|
|
eprintln!("Mangler --file eller --url");
|
|
std::process::exit(1);
|
|
});
|
|
let cid = cli.collection_id.unwrap_or_else(|| {
|
|
eprintln!("Mangler --collection-id");
|
|
std::process::exit(1);
|
|
});
|
|
(IcsSource::File(file), cid)
|
|
};
|
|
|
|
// Hent ICS-data
|
|
let ics_data = match &source {
|
|
IcsSource::File(path) => {
|
|
std::fs::read_to_string(path).unwrap_or_else(|e| {
|
|
eprintln!("Kunne ikke lese {path}: {e}");
|
|
std::process::exit(1);
|
|
})
|
|
}
|
|
IcsSource::Url(url) => {
|
|
tracing::info!(url = %url, "Henter ICS fra URL");
|
|
fetch_ics(url).await.unwrap_or_else(|e| {
|
|
eprintln!("Kunne ikke hente {url}: {e}");
|
|
std::process::exit(1);
|
|
})
|
|
}
|
|
};
|
|
|
|
// Parse ICS-data
|
|
let events = parse_ics_data(&ics_data);
|
|
|
|
if events.is_empty() {
|
|
let output = ImportResult {
|
|
ok: true,
|
|
created: 0,
|
|
updated: 0,
|
|
errors: vec!["Ingen VEVENT funnet i ICS-data".to_string()],
|
|
};
|
|
println!("{}", serde_json::to_string_pretty(&output).unwrap());
|
|
return;
|
|
}
|
|
|
|
let source_label = match &source {
|
|
IcsSource::File(path) => path.clone(),
|
|
IcsSource::Url(url) => url.clone(),
|
|
};
|
|
tracing::info!(events = events.len(), source = %source_label, "Parsed ICS-data");
|
|
|
|
// Koble til database
|
|
let db = synops_common::db::connect().await.unwrap_or_else(|e| {
|
|
eprintln!("{e}");
|
|
std::process::exit(1);
|
|
});
|
|
|
|
// Verifiser at collection-noden eksisterer
|
|
let collection_exists: bool = sqlx::query_scalar(
|
|
"SELECT EXISTS(SELECT 1 FROM nodes WHERE id = $1)",
|
|
)
|
|
.bind(collection_id)
|
|
.fetch_one(&db)
|
|
.await
|
|
.unwrap_or_else(|e| {
|
|
eprintln!("DB-feil ved oppslag av samling: {e}");
|
|
std::process::exit(1);
|
|
});
|
|
|
|
if !collection_exists {
|
|
eprintln!("Samlings-node {collection_id} finnes ikke");
|
|
std::process::exit(1);
|
|
}
|
|
|
|
// Importer hendelser
|
|
let mut created = 0usize;
|
|
let mut updated = 0usize;
|
|
let mut errors = Vec::new();
|
|
|
|
for event in &events {
|
|
match import_event(&db, event, collection_id).await {
|
|
Ok(EventAction::Created) => created += 1,
|
|
Ok(EventAction::Updated) => updated += 1,
|
|
Err(e) => {
|
|
let uid = &event.uid;
|
|
let msg = format!("Feil ved import av {uid}: {e}");
|
|
tracing::warn!("{}", msg);
|
|
errors.push(msg);
|
|
}
|
|
}
|
|
}
|
|
|
|
let all_ok = errors.is_empty();
|
|
let output = ImportResult {
|
|
ok: all_ok,
|
|
created,
|
|
updated,
|
|
errors,
|
|
};
|
|
|
|
tracing::info!(created, updated, errors = output.errors.len(), "Import fullført");
|
|
println!("{}", serde_json::to_string_pretty(&output).unwrap());
|
|
|
|
if !all_ok {
|
|
std::process::exit(1);
|
|
}
|
|
}
|
|
|
|
/// Hent ICS-data fra en URL via HTTP GET.
|
|
async fn fetch_ics(url: &str) -> Result<String, String> {
|
|
let client = reqwest::Client::builder()
|
|
.timeout(std::time::Duration::from_secs(30))
|
|
.build()
|
|
.map_err(|e| format!("Kunne ikke opprette HTTP-klient: {e}"))?;
|
|
|
|
let response = client
|
|
.get(url)
|
|
.header("User-Agent", "synops-calendar/0.1")
|
|
.send()
|
|
.await
|
|
.map_err(|e| format!("HTTP-forespørsel feilet: {e}"))?;
|
|
|
|
if !response.status().is_success() {
|
|
return Err(format!("HTTP {}: {}", response.status(), url));
|
|
}
|
|
|
|
let body = response
|
|
.text()
|
|
.await
|
|
.map_err(|e| format!("Kunne ikke lese respons: {e}"))?;
|
|
|
|
// Enkel validering: sjekk at det ser ut som ICS
|
|
if !body.contains("BEGIN:VCALENDAR") {
|
|
return Err(format!("Responsen ser ikke ut som ICS-data (mangler BEGIN:VCALENDAR)"));
|
|
}
|
|
|
|
Ok(body)
|
|
}
|
|
|
|
enum EventAction {
|
|
Created,
|
|
Updated,
|
|
}
|
|
|
|
/// Importer én kalender-hendelse. Duplikatdeteksjon via UID i metadata.
|
|
async fn import_event(
|
|
db: &PgPool,
|
|
event: &CalendarEvent,
|
|
collection_id: Uuid,
|
|
) -> Result<EventAction, String> {
|
|
let dtstart = event.dtstart.as_deref().ok_or("Mangler DTSTART")?;
|
|
let at = parse_ics_datetime(dtstart).ok_or_else(|| format!("Kunne ikke parse DTSTART: {dtstart}"))?;
|
|
let at_str = at.to_rfc3339();
|
|
|
|
let title = event.summary.clone().unwrap_or_default();
|
|
|
|
// Bygg metadata
|
|
let mut meta = serde_json::json!({
|
|
"ics_uid": event.uid,
|
|
});
|
|
if let Some(ref loc) = event.location {
|
|
meta["location"] = serde_json::Value::String(loc.clone());
|
|
}
|
|
if let Some(ref dtend) = event.dtend {
|
|
if let Some(end) = parse_ics_datetime(dtend) {
|
|
meta["dtend"] = serde_json::Value::String(end.to_rfc3339());
|
|
}
|
|
}
|
|
|
|
// Sjekk om noden allerede eksisterer (duplikatdeteksjon via ics_uid)
|
|
let existing: Option<Uuid> = sqlx::query_scalar(
|
|
r#"SELECT n.id FROM nodes n
|
|
JOIN edges e ON e.source_id = n.id
|
|
WHERE e.target_id = $1
|
|
AND e.edge_type = 'belongs_to'
|
|
AND n.metadata->>'ics_uid' = $2"#,
|
|
)
|
|
.bind(collection_id)
|
|
.bind(&event.uid)
|
|
.fetch_optional(db)
|
|
.await
|
|
.map_err(|e| format!("DB-feil ved duplikatsjekk: {e}"))?;
|
|
|
|
if let Some(node_id) = existing {
|
|
// Oppdater eksisterende node
|
|
let mut tx = db.begin().await.map_err(|e| format!("Transaksjon feilet: {e}"))?;
|
|
|
|
sqlx::query(
|
|
"UPDATE nodes SET title = $1, content = $2, metadata = $3 WHERE id = $4",
|
|
)
|
|
.bind(&title)
|
|
.bind(event.description.as_deref())
|
|
.bind(&meta)
|
|
.bind(node_id)
|
|
.execute(&mut *tx)
|
|
.await
|
|
.map_err(|e| format!("Kunne ikke oppdatere node: {e}"))?;
|
|
|
|
// Oppdater scheduled-edge metadata
|
|
let sched_meta = serde_json::json!({ "at": at_str });
|
|
sqlx::query(
|
|
r#"UPDATE edges SET metadata = $1
|
|
WHERE source_id = $2 AND target_id = $3 AND edge_type = 'scheduled'"#,
|
|
)
|
|
.bind(&sched_meta)
|
|
.bind(node_id)
|
|
.bind(collection_id)
|
|
.execute(&mut *tx)
|
|
.await
|
|
.map_err(|e| format!("Kunne ikke oppdatere scheduled-edge: {e}"))?;
|
|
|
|
tx.commit().await.map_err(|e| format!("Commit feilet: {e}"))?;
|
|
|
|
tracing::debug!(node_id = %node_id, uid = %event.uid, "Oppdatert eksisterende hendelse");
|
|
Ok(EventAction::Updated)
|
|
} else {
|
|
// Opprett ny node + edges
|
|
let node_id = Uuid::now_v7();
|
|
let mut tx = db.begin().await.map_err(|e| format!("Transaksjon feilet: {e}"))?;
|
|
|
|
sqlx::query(
|
|
r#"INSERT INTO nodes (id, node_kind, title, content, visibility, metadata)
|
|
VALUES ($1, 'content', $2, $3, 'hidden', $4)"#,
|
|
)
|
|
.bind(node_id)
|
|
.bind(&title)
|
|
.bind(event.description.as_deref())
|
|
.bind(&meta)
|
|
.execute(&mut *tx)
|
|
.await
|
|
.map_err(|e| format!("Kunne ikke opprette node: {e}"))?;
|
|
|
|
// belongs_to-edge: hendelse → samling
|
|
sqlx::query(
|
|
r#"INSERT INTO edges (id, source_id, target_id, edge_type, metadata)
|
|
VALUES ($1, $2, $3, 'belongs_to', '{}')"#,
|
|
)
|
|
.bind(Uuid::now_v7())
|
|
.bind(node_id)
|
|
.bind(collection_id)
|
|
.execute(&mut *tx)
|
|
.await
|
|
.map_err(|e| format!("Kunne ikke opprette belongs_to-edge: {e}"))?;
|
|
|
|
// scheduled-edge: hendelse → samling med tidspunkt
|
|
let sched_meta = serde_json::json!({ "at": at_str });
|
|
sqlx::query(
|
|
r#"INSERT INTO edges (id, source_id, target_id, edge_type, metadata)
|
|
VALUES ($1, $2, $3, 'scheduled', $4)"#,
|
|
)
|
|
.bind(Uuid::now_v7())
|
|
.bind(node_id)
|
|
.bind(collection_id)
|
|
.bind(&sched_meta)
|
|
.execute(&mut *tx)
|
|
.await
|
|
.map_err(|e| format!("Kunne ikke opprette scheduled-edge: {e}"))?;
|
|
|
|
tx.commit().await.map_err(|e| format!("Commit feilet: {e}"))?;
|
|
|
|
tracing::debug!(node_id = %node_id, uid = %event.uid, "Opprettet ny hendelse");
|
|
Ok(EventAction::Created)
|
|
}
|
|
}
|
|
|
|
/// Parse ICS-data fra en streng og returner liste med hendelser.
|
|
fn parse_ics_data(data: &str) -> Vec<CalendarEvent> {
|
|
let reader = BufReader::new(data.as_bytes());
|
|
let parser = ical::IcalParser::new(reader);
|
|
|
|
let mut events = Vec::new();
|
|
|
|
for calendar in parser {
|
|
let calendar = match calendar {
|
|
Ok(c) => c,
|
|
Err(e) => {
|
|
eprintln!("Feil ved parsing av ICS: {e}");
|
|
continue;
|
|
}
|
|
};
|
|
|
|
for vevent in calendar.events {
|
|
if let Some(event) = extract_event(&vevent) {
|
|
events.push(event);
|
|
}
|
|
}
|
|
}
|
|
|
|
events
|
|
}
|
|
|
|
/// Ekstraher relevant data fra en VEVENT.
|
|
fn extract_event(vevent: &IcalEvent) -> Option<CalendarEvent> {
|
|
let uid = get_property(vevent, "UID")?;
|
|
|
|
Some(CalendarEvent {
|
|
uid,
|
|
summary: get_property(vevent, "SUMMARY"),
|
|
description: get_property(vevent, "DESCRIPTION"),
|
|
dtstart: get_property(vevent, "DTSTART"),
|
|
dtend: get_property(vevent, "DTEND"),
|
|
location: get_property(vevent, "LOCATION"),
|
|
})
|
|
}
|
|
|
|
/// Hent en property-verdi fra en VEVENT.
|
|
fn get_property(vevent: &IcalEvent, name: &str) -> Option<String> {
|
|
vevent
|
|
.properties
|
|
.iter()
|
|
.find(|p| p.name == name)
|
|
.and_then(|p| p.value.clone())
|
|
}
|
|
|
|
/// Parse ICS datetime-streng til UTC DateTime.
|
|
///
|
|
/// Støtter formater:
|
|
/// - 20260320T140000Z (UTC)
|
|
/// - 20260320T140000 (lokal, tolkes som UTC)
|
|
/// - 20260320 (heldagshendelse)
|
|
fn parse_ics_datetime(s: &str) -> Option<DateTime<Utc>> {
|
|
let s = s.trim();
|
|
|
|
// Fjern TZID-prefix om det finnes (noen ICS-filer har dette i verdien)
|
|
let s = if s.contains(':') {
|
|
s.rsplit(':').next().unwrap_or(s)
|
|
} else {
|
|
s
|
|
};
|
|
|
|
// 20260320T140000Z
|
|
if s.ends_with('Z') {
|
|
let s = &s[..s.len() - 1];
|
|
let naive = NaiveDateTime::parse_from_str(s, "%Y%m%dT%H%M%S").ok()?;
|
|
return Some(naive.and_utc());
|
|
}
|
|
|
|
// 20260320T140000
|
|
if s.contains('T') {
|
|
let naive = NaiveDateTime::parse_from_str(s, "%Y%m%dT%H%M%S").ok()?;
|
|
return Some(naive.and_utc());
|
|
}
|
|
|
|
// 20260320 (heldagshendelse → 00:00 UTC)
|
|
let date = NaiveDate::parse_from_str(s, "%Y%m%d").ok()?;
|
|
Some(
|
|
date.and_hms_opt(0, 0, 0)?
|
|
.and_utc(),
|
|
)
|
|
}
|