Nytt CLI-verktøy som henter og parser RSS/Atom-feeds (via feed-rs), oppretter content-noder for nye entries med: - metadata.source_url, feed_entry_id, original_author, published_at - tagged-edge "feed" (selv-edge) - belongs_to-edge til angitt samling - Paywall-deteksjon gjenbrukt fra synops-clip Dedupliserer på source_url — kjøring nummer to oppretter ingen duplikater. Designet for å kjøres periodisk av maskinrommet/jobbkø (én gang per poll). --interval lagres som metadata for orkestreringsoppsettet. Støtter --payload-json for jobbkø-dispatch.
487 lines
14 KiB
Rust
487 lines
14 KiB
Rust
// synops-feed — Abonner på RSS/Atom-feed og opprett content-noder.
|
|
//
|
|
// Input: --url <feed-url> --collection-id <uuid> --created-by <uuid>
|
|
// [--interval 30m] [--payload-json <json>]
|
|
//
|
|
// Henter feed, parser med feed-rs, oppretter content-node for nye entries
|
|
// med metadata.source_url, tagged-edge "feed", og belongs_to-edge til samling.
|
|
// Dedupliserer på entry-URL (sjekker metadata.source_url i eksisterende noder).
|
|
// Paywall-deteksjon gjenbrukt fra synops-clip.
|
|
//
|
|
// Designvalg: Verktøyet kjøres én gang per poll. Maskinrommet orkestrerer
|
|
// periodisk kjøring via jobbkø. --interval lagres som metadata for
|
|
// orkestreringsoppsettet, men selve pollinga skjer ikke i denne prosessen.
|
|
//
|
|
// Miljøvariabler:
|
|
// DATABASE_URL — PostgreSQL-tilkobling (påkrevd)
|
|
// RUST_LOG — Loggnivå (default: synops_feed=info)
|
|
//
|
|
// Ref: docs/retninger/unix_filosofi.md
|
|
|
|
use chrono::{DateTime, Utc};
|
|
use clap::Parser;
|
|
use feed_rs::parser;
|
|
use serde::{Deserialize, Serialize};
|
|
use std::process;
|
|
use tracing::{debug, info, warn};
|
|
use uuid::Uuid;
|
|
|
|
// =============================================================================
|
|
// CLI
|
|
// =============================================================================
|
|
|
|
/// Abonner på RSS/Atom-feed og opprett content-noder.
|
|
#[derive(Parser)]
|
|
#[command(name = "synops-feed", about = "Abonner på RSS/Atom-feed")]
|
|
struct Cli {
|
|
/// Feed-URL (RSS eller Atom)
|
|
#[arg(long)]
|
|
url: Option<String>,
|
|
|
|
/// Samlings-ID å legge nye entries i
|
|
#[arg(long)]
|
|
collection_id: Option<Uuid>,
|
|
|
|
/// Bruker-ID som oppretter nodene
|
|
#[arg(long)]
|
|
created_by: Option<Uuid>,
|
|
|
|
/// Poll-intervall (f.eks. "30m", "1h") — lagres som metadata, brukes av orkestrator
|
|
#[arg(long, default_value = "30m")]
|
|
interval: String,
|
|
|
|
/// Jobb-payload som JSON (for maskinrommet/jobbkø)
|
|
#[arg(long)]
|
|
payload_json: Option<String>,
|
|
}
|
|
|
|
/// Payload fra jobbkø.
|
|
#[derive(Deserialize)]
|
|
struct JobPayload {
|
|
url: String,
|
|
collection_id: Uuid,
|
|
created_by: Uuid,
|
|
#[serde(default = "default_interval")]
|
|
interval: String,
|
|
}
|
|
|
|
fn default_interval() -> String {
|
|
"30m".to_string()
|
|
}
|
|
|
|
/// Resultat-output som JSON til stdout.
|
|
#[derive(Serialize)]
|
|
struct FeedResult {
|
|
status: String,
|
|
feed_url: String,
|
|
feed_title: Option<String>,
|
|
items_found: usize,
|
|
nodes_created: usize,
|
|
items_skipped: usize,
|
|
errors: Vec<String>,
|
|
}
|
|
|
|
// =============================================================================
|
|
// Main
|
|
// =============================================================================
|
|
|
|
#[tokio::main]
|
|
async fn main() {
|
|
synops_common::logging::init("synops_feed");
|
|
|
|
let cli = Cli::parse();
|
|
|
|
// Løs opp parametre: payload-json overskriver CLI-args
|
|
let (url, collection_id, created_by, _interval) = if let Some(ref json) = cli.payload_json {
|
|
match serde_json::from_str::<JobPayload>(json) {
|
|
Ok(p) => (p.url, p.collection_id, p.created_by, p.interval),
|
|
Err(e) => {
|
|
eprintln!("Ugyldig --payload-json: {e}");
|
|
process::exit(1);
|
|
}
|
|
}
|
|
} else {
|
|
let url = cli.url.unwrap_or_else(|| {
|
|
eprintln!("--url er påkrevd");
|
|
process::exit(1);
|
|
});
|
|
let collection_id = cli.collection_id.unwrap_or_else(|| {
|
|
eprintln!("--collection-id er påkrevd");
|
|
process::exit(1);
|
|
});
|
|
let created_by = cli.created_by.unwrap_or_else(|| {
|
|
eprintln!("--created-by er påkrevd");
|
|
process::exit(1);
|
|
});
|
|
(url, collection_id, created_by, cli.interval)
|
|
};
|
|
|
|
match fetch_and_process(&url, collection_id, created_by).await {
|
|
Ok(result) => {
|
|
let json = serde_json::to_string_pretty(&result).unwrap();
|
|
println!("{json}");
|
|
if result.status == "error" {
|
|
process::exit(1);
|
|
}
|
|
}
|
|
Err(e) => {
|
|
eprintln!("Feil: {e}");
|
|
process::exit(1);
|
|
}
|
|
}
|
|
}
|
|
|
|
// =============================================================================
|
|
// Feed-henting og -prosessering
|
|
// =============================================================================
|
|
|
|
async fn fetch_and_process(
|
|
url: &str,
|
|
collection_id: Uuid,
|
|
created_by: Uuid,
|
|
) -> Result<FeedResult, String> {
|
|
// 1. Hent feed-XML
|
|
info!(url, "Henter feed");
|
|
let client = reqwest::Client::builder()
|
|
.user_agent("synops-feed/0.1")
|
|
.timeout(std::time::Duration::from_secs(30))
|
|
.build()
|
|
.map_err(|e| format!("HTTP-klient feilet: {e}"))?;
|
|
|
|
let response = client
|
|
.get(url)
|
|
.send()
|
|
.await
|
|
.map_err(|e| format!("Kunne ikke hente feed: {e}"))?;
|
|
|
|
if !response.status().is_success() {
|
|
return Err(format!("HTTP {}: {}", response.status(), url));
|
|
}
|
|
|
|
let body = response
|
|
.bytes()
|
|
.await
|
|
.map_err(|e| format!("Kunne ikke lese respons: {e}"))?;
|
|
|
|
// 2. Parse feed (RSS 2.0, Atom 1.0, etc.)
|
|
let feed = parser::parse(&body[..]).map_err(|e| format!("Kunne ikke parse feed: {e}"))?;
|
|
let feed_title = feed.title.map(|t| t.content);
|
|
|
|
info!(
|
|
feed_title = feed_title.as_deref().unwrap_or("(uten tittel)"),
|
|
entries = feed.entries.len(),
|
|
"Feed parset"
|
|
);
|
|
|
|
// 3. Koble til database
|
|
let db = synops_common::db::connect().await?;
|
|
|
|
// 4. Hent eksisterende source_url-er for denne samlingen for deduplisering
|
|
let existing_urls: Vec<String> = sqlx::query_scalar(
|
|
r#"
|
|
SELECT n.metadata->>'source_url'
|
|
FROM nodes n
|
|
INNER JOIN edges e ON e.source_id = n.id
|
|
AND e.target_id = $1
|
|
AND e.edge_type = 'belongs_to'
|
|
WHERE n.node_kind = 'content'
|
|
AND n.metadata->>'source_url' IS NOT NULL
|
|
"#,
|
|
)
|
|
.bind(collection_id)
|
|
.fetch_all(&db)
|
|
.await
|
|
.map_err(|e| format!("PG-feil ved dedupliseringssjekk: {e}"))?;
|
|
|
|
debug!(existing = existing_urls.len(), "Eksisterende feed-entries i samling");
|
|
|
|
// 5. Prosesser nye entries
|
|
let mut nodes_created = 0usize;
|
|
let mut items_skipped = 0usize;
|
|
let mut errors = Vec::new();
|
|
|
|
for entry in &feed.entries {
|
|
// Finn entry-URL
|
|
let entry_url = entry
|
|
.links
|
|
.first()
|
|
.map(|l| l.href.clone())
|
|
.or_else(|| entry.id.clone().into())
|
|
.unwrap_or_default();
|
|
|
|
if entry_url.is_empty() {
|
|
debug!("Entry uten URL, hopper over");
|
|
items_skipped += 1;
|
|
continue;
|
|
}
|
|
|
|
// Dedupliser
|
|
if existing_urls.iter().any(|u| u == &entry_url) {
|
|
debug!(url = %entry_url, "Allerede importert, hopper over");
|
|
items_skipped += 1;
|
|
continue;
|
|
}
|
|
|
|
// Opprett node
|
|
match create_feed_entry_node(&db, entry, &entry_url, collection_id, created_by).await {
|
|
Ok(node_id) => {
|
|
info!(node_id = %node_id, url = %entry_url, "Feed-entry opprettet");
|
|
nodes_created += 1;
|
|
}
|
|
Err(e) => {
|
|
warn!(url = %entry_url, error = %e, "Kunne ikke opprette entry");
|
|
errors.push(format!("{entry_url}: {e}"));
|
|
}
|
|
}
|
|
}
|
|
|
|
info!(
|
|
nodes_created,
|
|
items_skipped,
|
|
errors = errors.len(),
|
|
"Feed-prosessering ferdig"
|
|
);
|
|
|
|
Ok(FeedResult {
|
|
status: if errors.is_empty() {
|
|
"completed".to_string()
|
|
} else {
|
|
"partial".to_string()
|
|
},
|
|
feed_url: url.to_string(),
|
|
feed_title,
|
|
items_found: feed.entries.len(),
|
|
nodes_created,
|
|
items_skipped,
|
|
errors,
|
|
})
|
|
}
|
|
|
|
// =============================================================================
|
|
// Node-opprettelse
|
|
// =============================================================================
|
|
|
|
async fn create_feed_entry_node(
|
|
db: &sqlx::PgPool,
|
|
entry: &feed_rs::model::Entry,
|
|
entry_url: &str,
|
|
collection_id: Uuid,
|
|
created_by: Uuid,
|
|
) -> Result<Uuid, String> {
|
|
let node_id = Uuid::now_v7();
|
|
|
|
// Tittel
|
|
let title = entry
|
|
.title
|
|
.as_ref()
|
|
.map(|t| t.content.clone())
|
|
.unwrap_or_else(|| truncate(entry_url, 80).to_string());
|
|
|
|
// Innhold: bruk summary eller content fra feed
|
|
let content_text = entry
|
|
.summary
|
|
.as_ref()
|
|
.map(|s| s.content.clone())
|
|
.or_else(|| {
|
|
entry
|
|
.content
|
|
.as_ref()
|
|
.and_then(|c| c.body.as_ref().map(|b| strip_html_tags(b)))
|
|
})
|
|
.unwrap_or_default();
|
|
|
|
// Forfatter
|
|
let author = entry.authors.first().map(|a| a.name.clone());
|
|
|
|
// Publiseringsdato
|
|
let published: Option<DateTime<Utc>> = entry.published.or(entry.updated);
|
|
|
|
// Paywall-sjekk på innholdet
|
|
let raw_html = entry
|
|
.content
|
|
.as_ref()
|
|
.and_then(|c| c.body.clone())
|
|
.unwrap_or_default();
|
|
let paywall = detect_paywall(&content_text, &raw_html);
|
|
if paywall {
|
|
debug!(url = %entry_url, "Betalingsmur detektert");
|
|
}
|
|
|
|
// Metadata
|
|
let metadata = serde_json::json!({
|
|
"source_url": entry_url,
|
|
"original_author": author,
|
|
"published_at": published.map(|d| d.to_rfc3339()),
|
|
"paywall": paywall,
|
|
"feed_entry_id": entry.id,
|
|
});
|
|
|
|
// 1. Opprett content-node
|
|
sqlx::query(
|
|
r#"
|
|
INSERT INTO nodes (id, node_kind, title, content, visibility, metadata, created_by)
|
|
VALUES ($1, 'content', $2, $3, 'hidden'::visibility, $4, $5)
|
|
"#,
|
|
)
|
|
.bind(node_id)
|
|
.bind(&title)
|
|
.bind(&content_text)
|
|
.bind(&metadata)
|
|
.bind(created_by)
|
|
.execute(db)
|
|
.await
|
|
.map_err(|e| format!("INSERT node feilet: {e}"))?;
|
|
|
|
// 2. tagged-edge "feed" (selv-edge)
|
|
sqlx::query(
|
|
r#"
|
|
INSERT INTO edges (id, source_id, target_id, edge_type, metadata, system, created_by)
|
|
VALUES ($1, $2, $2, 'tagged', $3, false, $4)
|
|
ON CONFLICT (source_id, target_id, edge_type) DO NOTHING
|
|
"#,
|
|
)
|
|
.bind(Uuid::now_v7())
|
|
.bind(node_id)
|
|
.bind(serde_json::json!({"tag": "feed"}))
|
|
.bind(created_by)
|
|
.execute(db)
|
|
.await
|
|
.map_err(|e| format!("INSERT tagged-edge feilet: {e}"))?;
|
|
|
|
// 3. belongs_to-edge (content → samling)
|
|
sqlx::query(
|
|
r#"
|
|
INSERT INTO edges (id, source_id, target_id, edge_type, metadata, system, created_by)
|
|
VALUES ($1, $2, $3, 'belongs_to', '{}', false, $4)
|
|
ON CONFLICT (source_id, target_id, edge_type) DO NOTHING
|
|
"#,
|
|
)
|
|
.bind(Uuid::now_v7())
|
|
.bind(node_id)
|
|
.bind(collection_id)
|
|
.bind(created_by)
|
|
.execute(db)
|
|
.await
|
|
.map_err(|e| format!("INSERT belongs_to-edge feilet: {e}"))?;
|
|
|
|
Ok(node_id)
|
|
}
|
|
|
|
// =============================================================================
|
|
// Paywall-deteksjon (gjenbrukt fra synops-clip)
|
|
// =============================================================================
|
|
|
|
/// Detekter betalingsmur basert på innhold og HTML-mønstre.
|
|
fn detect_paywall(content: &str, html: &str) -> bool {
|
|
let content_lower = content.to_lowercase();
|
|
let html_lower = html.to_lowercase();
|
|
|
|
// 1. Veldig kort innhold (under 200 tegn) tyder på betalingsmur
|
|
let trimmed_len = content.trim().len();
|
|
if trimmed_len > 0 && trimmed_len < 200 {
|
|
debug!(trimmed_len, "Kort innhold — mulig betalingsmur");
|
|
return true;
|
|
}
|
|
|
|
// 2. Kjente betalingsmur-mønstre i innholdet
|
|
let paywall_content_patterns = [
|
|
"logg inn for å lese",
|
|
"log in to read",
|
|
"sign in to continue",
|
|
"subscribe to continue",
|
|
"abonner for å lese",
|
|
"kun for abonnenter",
|
|
"for subscribers only",
|
|
"du må være innlogget",
|
|
"denne artikkelen er for abonnenter",
|
|
"kjøp tilgang",
|
|
"buy access",
|
|
"unlock this article",
|
|
"lås opp artikkelen",
|
|
"registrer deg for å lese",
|
|
"create an account to read",
|
|
"meld deg inn",
|
|
"bli abonnent",
|
|
"allerede abonnent",
|
|
"already a subscriber",
|
|
];
|
|
|
|
for pattern in &paywall_content_patterns {
|
|
if content_lower.contains(pattern) {
|
|
debug!(pattern, "Betalingsmur-mønster funnet i innhold");
|
|
return true;
|
|
}
|
|
}
|
|
|
|
// 3. Kjente betalingsmur-klasser/elementer i HTML
|
|
let paywall_html_patterns = [
|
|
"class=\"paywall",
|
|
"class=\"pw-",
|
|
"id=\"paywall",
|
|
"data-paywall",
|
|
"class=\"subscriber-only",
|
|
"class=\"premium-content",
|
|
"class=\"locked-content",
|
|
"class=\"article-paywall",
|
|
"class=\"metered-",
|
|
"class=\"piano-",
|
|
"id=\"piano-",
|
|
"class=\"tp-modal",
|
|
"data-piano",
|
|
"class=\"ns-paywall",
|
|
"class=\"nettavisen-paywall",
|
|
"class=\"schibsted-paywall",
|
|
"class=\"amedia-paywall",
|
|
];
|
|
|
|
for pattern in &paywall_html_patterns {
|
|
if html_lower.contains(pattern) {
|
|
debug!(pattern, "Betalingsmur-mønster funnet i HTML");
|
|
return true;
|
|
}
|
|
}
|
|
|
|
// 4. Sjekk for meta-tagger som indikerer betalt innhold
|
|
if html_lower.contains(r#"content="locked""#)
|
|
|| html_lower.contains(r#"content="premium""#)
|
|
|| html_lower.contains("isaccessibleforfree") && html_lower.contains(r#""false""#)
|
|
{
|
|
debug!("Betalingsmur-meta-tagg funnet");
|
|
return true;
|
|
}
|
|
|
|
false
|
|
}
|
|
|
|
// =============================================================================
|
|
// Hjelpefunksjoner
|
|
// =============================================================================
|
|
|
|
/// Enkel HTML-tag-stripping for feed-innhold.
|
|
fn strip_html_tags(html: &str) -> String {
|
|
let mut result = String::with_capacity(html.len());
|
|
let mut in_tag = false;
|
|
for ch in html.chars() {
|
|
match ch {
|
|
'<' => in_tag = true,
|
|
'>' => in_tag = false,
|
|
_ if !in_tag => result.push(ch),
|
|
_ => {}
|
|
}
|
|
}
|
|
// Normaliser whitespace
|
|
result
|
|
.split_whitespace()
|
|
.collect::<Vec<_>>()
|
|
.join(" ")
|
|
.trim()
|
|
.to_string()
|
|
}
|
|
|
|
fn truncate(s: &str, max: usize) -> &str {
|
|
if s.len() <= max {
|
|
s
|
|
} else {
|
|
&s[..s.floor_char_boundary(max)]
|
|
}
|
|
}
|