synops/frontend/src/lib/livekit.ts
vegard 8acb5a8731 Fullfører oppgave 16.5: Sound pads med 4×2 pad-grid
Implementerer lydpads (inspirert av RødeCaster Pro II) i mixeren:

- mixer.ts: Nytt pad-system med AudioBuffer-caching, GainNode per pad,
  og one-shot AudioBufferSourceNode-avspilling. Funksjoner for load,
  play, stop, og gain-kontroll.

- livekit.ts: Data message-støtte (sendDataMessage, onDataMessage) for
  synkronisert pad-avspilling på tvers av LiveKit-deltakere. Bruker
  reliable delivery med topic-filtrering.

- SoundPadGrid.svelte: 4×2 responsivt pad-grid med fargede knapper.
  Forhåndslaster lydfiler fra CAS til AudioBuffer. Visuell feedback
  ved avspilling (scale-animasjon). Konfigurasjonsmodus for å sette
  label, farge og laste opp lydfil per pad. Pad-konfig lagres i
  metadata.mixer.pads på samlingsnoden.

- MixerTrait.svelte: Integrerer SoundPadGrid mellom kanalstriper og
  master-seksjon. Sender isViewer-prop for tilgangskontroll.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-18 05:17:30 +00:00

267 lines
7.7 KiB
TypeScript

/**
* LiveKit client wrapper for Synops.
*
* Handles room connection, participant tracking, and Web Audio routing.
* LiveKit's auto-attach of <audio> elements is disabled — all audio is
* routed through the Web Audio API via the mixer module (mixer.ts).
*/
import {
Room,
RoomEvent,
Track,
ConnectionState,
type RemoteTrack,
type RemoteTrackPublication,
type RemoteParticipant,
type Participant,
} from 'livekit-client';
import { addChannel, removeChannel, destroyMixer, ensureAudioContext } from './mixer';
// ─── Types ──────────────────────────────────────────────────────────────────
export interface LiveKitParticipant {
identity: string;
displayName: string;
isSpeaking: boolean;
audioLevel: number;
isMuted: boolean;
}
export type RoomStatus = 'disconnected' | 'connecting' | 'connected' | 'reconnecting';
// ─── State ──────────────────────────────────────────────────────────────────
let room: Room | null = null;
// Reactive state via callbacks
type StateListener = () => void;
const listeners = new Set<StateListener>();
let _status: RoomStatus = 'disconnected';
let _participants: LiveKitParticipant[] = [];
let _localIdentity: string = '';
export function getStatus(): RoomStatus { return _status; }
export function getParticipants(): LiveKitParticipant[] { return _participants; }
export function getLocalIdentity(): string { return _localIdentity; }
export function subscribe(fn: StateListener): () => void {
listeners.add(fn);
return () => { listeners.delete(fn); };
}
function notify() {
for (const fn of listeners) fn();
}
function setStatus(s: RoomStatus) {
_status = s;
notify();
}
// ─── Participant tracking ───────────────────────────────────────────────────
function buildParticipantList(): LiveKitParticipant[] {
if (!room) return [];
const list: LiveKitParticipant[] = [];
// Local participant
const local = room.localParticipant;
list.push({
identity: local.identity,
displayName: local.name || local.identity,
isSpeaking: local.isSpeaking,
audioLevel: local.audioLevel,
isMuted: !local.isMicrophoneEnabled,
});
// Remote participants
for (const [, p] of room.remoteParticipants) {
list.push({
identity: p.identity,
displayName: p.name || p.identity,
isSpeaking: p.isSpeaking,
audioLevel: p.audioLevel,
isMuted: isParticipantMuted(p),
});
}
return list;
}
function isParticipantMuted(p: RemoteParticipant): boolean {
for (const [, pub] of p.trackPublications) {
if (pub.kind === Track.Kind.Audio) {
return pub.isMuted;
}
}
return true; // no audio track = effectively muted
}
function refreshParticipants() {
_participants = buildParticipantList();
notify();
}
// ─── Web Audio routing (delegated to mixer.ts) ────────────────────────────
/**
* Route a remote participant's audio track through the mixer graph
* instead of letting LiveKit auto-attach an <audio> element.
*/
function attachTrackToWebAudio(track: RemoteTrack, participant: RemoteParticipant) {
if (track.kind !== Track.Kind.Audio) return;
const mediaStream = track.mediaStream;
if (!mediaStream) return;
ensureAudioContext();
addChannel(participant.identity, mediaStream);
}
function detachParticipantAudio(identity: string) {
removeChannel(identity);
}
// ─── Room connection ────────────────────────────────────────────────────────
export async function connect(wsUrl: string, token: string): Promise<void> {
if (room) {
await disconnect();
}
const newRoom = new Room({
// Disable auto-attach — we route audio through Web Audio API
audioCaptureDefaults: {
autoGainControl: true,
echoCancellation: true,
noiseSuppression: true,
},
adaptiveStream: true,
dynacast: true,
});
// Set up event handlers before connecting
newRoom
.on(RoomEvent.Connected, () => {
_localIdentity = newRoom.localParticipant.identity;
setStatus('connected');
refreshParticipants();
})
.on(RoomEvent.Reconnecting, () => {
setStatus('reconnecting');
})
.on(RoomEvent.Reconnected, () => {
setStatus('connected');
refreshParticipants();
})
.on(RoomEvent.Disconnected, () => {
cleanupAudio();
setStatus('disconnected');
_participants = [];
notify();
})
.on(RoomEvent.ParticipantConnected, () => {
refreshParticipants();
})
.on(RoomEvent.ParticipantDisconnected, (participant: RemoteParticipant) => {
detachParticipantAudio(participant.identity);
refreshParticipants();
})
.on(RoomEvent.TrackSubscribed, (track: RemoteTrack, pub: RemoteTrackPublication, participant: RemoteParticipant) => {
attachTrackToWebAudio(track, participant);
refreshParticipants();
})
.on(RoomEvent.TrackUnsubscribed, (_track: RemoteTrack, _pub: RemoteTrackPublication, participant: RemoteParticipant) => {
detachParticipantAudio(participant.identity);
refreshParticipants();
})
.on(RoomEvent.TrackMuted, () => {
refreshParticipants();
})
.on(RoomEvent.TrackUnmuted, () => {
refreshParticipants();
})
.on(RoomEvent.ActiveSpeakersChanged, () => {
refreshParticipants();
})
.on(RoomEvent.DataReceived, (payload: Uint8Array, participant?: RemoteParticipant, kind?: DataPacket_Kind, topic?: string) => {
const senderIdentity = participant?.identity;
for (const handler of dataListeners) {
handler(payload, senderIdentity, topic);
}
});
room = newRoom;
setStatus('connecting');
await newRoom.connect(wsUrl, token, {
autoSubscribe: true,
});
// Enable microphone after connecting
await newRoom.localParticipant.setMicrophoneEnabled(true);
refreshParticipants();
}
export async function disconnect(): Promise<void> {
if (!room) return;
cleanupAudio();
await room.disconnect();
room = null;
_status = 'disconnected';
_participants = [];
_localIdentity = '';
notify();
}
function cleanupAudio() {
destroyMixer();
}
/** Toggle local microphone mute */
export async function toggleMute(): Promise<boolean> {
if (!room) return false;
const enabled = room.localParticipant.isMicrophoneEnabled;
await room.localParticipant.setMicrophoneEnabled(!enabled);
refreshParticipants();
return !enabled;
}
// Mixer controls are now exported from mixer.ts directly.
// Use: import { getChannel, setChannelGain, ... } from './mixer';
export function isConnected(): boolean {
return room?.state === ConnectionState.Connected;
}
// ─── Data Messages (for sound pad sync) ────────────────────────────────────
export type DataMessageHandler = (payload: Uint8Array, senderIdentity: string | undefined, topic: string | undefined) => void;
const dataListeners = new Set<DataMessageHandler>();
/**
* Subscribe to incoming data messages from other participants.
* Returns an unsubscribe function.
*/
export function onDataMessage(handler: DataMessageHandler): () => void {
dataListeners.add(handler);
return () => { dataListeners.delete(handler); };
}
/**
* Send a reliable data message to all participants in the room.
* Uses DataPacket_Kind.RELIABLE for guaranteed delivery.
*/
export async function sendDataMessage(payload: Uint8Array, topic?: string): Promise<void> {
if (!room || room.state !== ConnectionState.Connected) return;
await room.localParticipant.publishData(payload, {
reliable: true,
topic,
});
}