synops/frontend/src/lib/livekit.ts
vegard a838e0e1c2 Fullfører oppgave 16.1: LiveKit-klient i frontend
Installerer livekit-client og bygger grunnlaget for sanntidslyd:
- $lib/livekit.ts: LiveKit-wrapper med romtilkobling, deltakersporing,
  og Web Audio-ruting. Auto-attach av <audio> er deaktivert — all lyd
  rutes gjennom AudioContext med GainNode per deltaker (klar for mixer).
- api.ts: joinCommunication/leaveCommunication API-funksjoner
- RecordingTrait.svelte: UI for tilkobling, mikrofon-toggle og deltakerliste
  med live speaking-indikator

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-18 04:49:02 +00:00

274 lines
7.6 KiB
TypeScript

/**
* LiveKit client wrapper for Synops.
*
* Handles room connection, participant tracking, and Web Audio routing.
* LiveKit's auto-attach of <audio> elements is disabled — all audio is
* routed through the Web Audio API so the mixer (Fase 16) can process it.
*/
import {
Room,
RoomEvent,
Track,
ConnectionState,
type RemoteTrack,
type RemoteTrackPublication,
type RemoteParticipant,
type Participant,
} from 'livekit-client';
// ─── Types ──────────────────────────────────────────────────────────────────
export interface LiveKitParticipant {
identity: string;
displayName: string;
isSpeaking: boolean;
audioLevel: number;
isMuted: boolean;
}
export type RoomStatus = 'disconnected' | 'connecting' | 'connected' | 'reconnecting';
// ─── State ──────────────────────────────────────────────────────────────────
let room: Room | null = null;
let audioContext: AudioContext | null = null;
/** Map from participant identity → their Web Audio source node */
const audioSources = new Map<string, MediaStreamAudioSourceNode>();
/** Map from participant identity → their GainNode (for future mixer control) */
const gainNodes = new Map<string, GainNode>();
// Reactive state via callbacks
type StateListener = () => void;
const listeners = new Set<StateListener>();
let _status: RoomStatus = 'disconnected';
let _participants: LiveKitParticipant[] = [];
let _localIdentity: string = '';
export function getStatus(): RoomStatus { return _status; }
export function getParticipants(): LiveKitParticipant[] { return _participants; }
export function getLocalIdentity(): string { return _localIdentity; }
export function subscribe(fn: StateListener): () => void {
listeners.add(fn);
return () => { listeners.delete(fn); };
}
function notify() {
for (const fn of listeners) fn();
}
function setStatus(s: RoomStatus) {
_status = s;
notify();
}
// ─── Participant tracking ───────────────────────────────────────────────────
function buildParticipantList(): LiveKitParticipant[] {
if (!room) return [];
const list: LiveKitParticipant[] = [];
// Local participant
const local = room.localParticipant;
list.push({
identity: local.identity,
displayName: local.name || local.identity,
isSpeaking: local.isSpeaking,
audioLevel: local.audioLevel,
isMuted: !local.isMicrophoneEnabled,
});
// Remote participants
for (const [, p] of room.remoteParticipants) {
list.push({
identity: p.identity,
displayName: p.name || p.identity,
isSpeaking: p.isSpeaking,
audioLevel: p.audioLevel,
isMuted: isParticipantMuted(p),
});
}
return list;
}
function isParticipantMuted(p: RemoteParticipant): boolean {
for (const [, pub] of p.trackPublications) {
if (pub.kind === Track.Kind.Audio) {
return pub.isMuted;
}
}
return true; // no audio track = effectively muted
}
function refreshParticipants() {
_participants = buildParticipantList();
notify();
}
// ─── Web Audio routing ─────────────────────────────────────────────────────
function ensureAudioContext(): AudioContext {
if (!audioContext || audioContext.state === 'closed') {
audioContext = new AudioContext();
}
return audioContext;
}
/**
* Route a remote participant's audio track through Web Audio API
* instead of letting LiveKit auto-attach an <audio> element.
*/
function attachTrackToWebAudio(track: RemoteTrack, participant: RemoteParticipant) {
if (track.kind !== Track.Kind.Audio) return;
const mediaStream = track.mediaStream;
if (!mediaStream) return;
const ctx = ensureAudioContext();
// Clean up previous source for this participant
detachParticipantAudio(participant.identity);
const source = ctx.createMediaStreamSource(mediaStream);
const gain = ctx.createGain();
gain.gain.value = 1.0;
source.connect(gain);
gain.connect(ctx.destination);
audioSources.set(participant.identity, source);
gainNodes.set(participant.identity, gain);
}
function detachParticipantAudio(identity: string) {
const source = audioSources.get(identity);
if (source) {
source.disconnect();
audioSources.delete(identity);
}
const gain = gainNodes.get(identity);
if (gain) {
gain.disconnect();
gainNodes.delete(identity);
}
}
// ─── Room connection ────────────────────────────────────────────────────────
export async function connect(wsUrl: string, token: string): Promise<void> {
if (room) {
await disconnect();
}
const newRoom = new Room({
// Disable auto-attach — we route audio through Web Audio API
audioCaptureDefaults: {
autoGainControl: true,
echoCancellation: true,
noiseSuppression: true,
},
adaptiveStream: true,
dynacast: true,
});
// Set up event handlers before connecting
newRoom
.on(RoomEvent.Connected, () => {
_localIdentity = newRoom.localParticipant.identity;
setStatus('connected');
refreshParticipants();
})
.on(RoomEvent.Reconnecting, () => {
setStatus('reconnecting');
})
.on(RoomEvent.Reconnected, () => {
setStatus('connected');
refreshParticipants();
})
.on(RoomEvent.Disconnected, () => {
cleanupAudio();
setStatus('disconnected');
_participants = [];
notify();
})
.on(RoomEvent.ParticipantConnected, () => {
refreshParticipants();
})
.on(RoomEvent.ParticipantDisconnected, (participant: RemoteParticipant) => {
detachParticipantAudio(participant.identity);
refreshParticipants();
})
.on(RoomEvent.TrackSubscribed, (track: RemoteTrack, pub: RemoteTrackPublication, participant: RemoteParticipant) => {
attachTrackToWebAudio(track, participant);
refreshParticipants();
})
.on(RoomEvent.TrackUnsubscribed, (_track: RemoteTrack, _pub: RemoteTrackPublication, participant: RemoteParticipant) => {
detachParticipantAudio(participant.identity);
refreshParticipants();
})
.on(RoomEvent.TrackMuted, () => {
refreshParticipants();
})
.on(RoomEvent.TrackUnmuted, () => {
refreshParticipants();
})
.on(RoomEvent.ActiveSpeakersChanged, () => {
refreshParticipants();
});
room = newRoom;
setStatus('connecting');
await newRoom.connect(wsUrl, token, {
autoSubscribe: true,
});
// Enable microphone after connecting
await newRoom.localParticipant.setMicrophoneEnabled(true);
refreshParticipants();
}
export async function disconnect(): Promise<void> {
if (!room) return;
cleanupAudio();
await room.disconnect();
room = null;
_status = 'disconnected';
_participants = [];
_localIdentity = '';
notify();
}
function cleanupAudio() {
for (const [identity] of audioSources) {
detachParticipantAudio(identity);
}
if (audioContext && audioContext.state !== 'closed') {
audioContext.close();
audioContext = null;
}
}
/** Toggle local microphone mute */
export async function toggleMute(): Promise<boolean> {
if (!room) return false;
const enabled = room.localParticipant.isMicrophoneEnabled;
await room.localParticipant.setMicrophoneEnabled(!enabled);
refreshParticipants();
return !enabled;
}
/** Get the GainNode for a participant (for future mixer integration) */
export function getParticipantGain(identity: string): GainNode | undefined {
return gainNodes.get(identity);
}
export function isConnected(): boolean {
return room?.state === ConnectionState.Connected;
}