Files
siprouter/ts_web/state/webrtc-client.ts
Juergen Kunz f3e1c96872 initial commit — SIP B2BUA + WebRTC bridge with Rust codec engine
Full-featured SIP router with multi-provider trunking, browser softphone
via WebRTC, real-time Opus/G.722/PCM transcoding in Rust, RNNoise ML
noise suppression, Kokoro neural TTS announcements, and a Lit-based
web dashboard with live call monitoring and REST API.
2026-04-09 23:03:55 +00:00

249 lines
7.2 KiB
TypeScript

/**
* Browser-side WebRTC client — manages audio capture, playback, and peer connection.
*/
export interface IAudioDevices {
inputs: MediaDeviceInfo[];
outputs: MediaDeviceInfo[];
}
export class WebRtcClient {
private pc: RTCPeerConnection | null = null;
private localStream: MediaStream | null = null;
private remoteAudio: HTMLAudioElement | null = null;
private ws: WebSocket | null = null;
private sessionId: string;
private onStateChange: (state: string) => void;
// Audio analysis for level meters.
private localAnalyser: AnalyserNode | null = null;
private remoteAnalyser: AnalyserNode | null = null;
private audioCtx: AudioContext | null = null;
// Device selection.
private selectedInputId: string = '';
private selectedOutputId: string = '';
state: 'idle' | 'requesting-mic' | 'connecting' | 'connected' | 'error' = 'idle';
constructor(onStateChange: (state: string) => void) {
this.sessionId = `web-${Math.random().toString(36).slice(2, 10)}-${Date.now().toString(36)}`;
this.onStateChange = onStateChange;
}
setWebSocket(ws: WebSocket): void {
this.ws = ws;
}
setInputDevice(deviceId: string): void {
this.selectedInputId = deviceId;
}
setOutputDevice(deviceId: string): void {
this.selectedOutputId = deviceId;
if (this.remoteAudio && 'setSinkId' in this.remoteAudio) {
(this.remoteAudio as any).setSinkId(deviceId).catch(() => {});
}
}
handleSignaling(msg: { type: string; sessionId?: string; sdp?: string; candidate?: any; error?: string }): void {
if (msg.sessionId !== this.sessionId) return;
if (msg.type === 'webrtc-answer' && msg.sdp) {
this.handleAnswer(msg.sdp);
} else if (msg.type === 'webrtc-ice' && msg.candidate) {
this.handleRemoteIce(msg.candidate);
} else if (msg.type === 'webrtc-error') {
this.setState('error');
console.error('[webrtc] server error:', msg.error);
}
}
async startCall(): Promise<void> {
this.setState('requesting-mic');
const audioConstraints: MediaTrackConstraints = {
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true,
};
if (this.selectedInputId) {
audioConstraints.deviceId = { exact: this.selectedInputId };
}
try {
this.localStream = await navigator.mediaDevices.getUserMedia({
audio: audioConstraints,
video: false,
});
} catch (err) {
console.error('[webrtc] mic access denied:', err);
this.setState('error');
return;
}
this.setState('connecting');
// Set up AudioContext for level meters.
this.audioCtx = new AudioContext();
const localSource = this.audioCtx.createMediaStreamSource(this.localStream);
this.localAnalyser = this.audioCtx.createAnalyser();
this.localAnalyser.fftSize = 256;
localSource.connect(this.localAnalyser);
this.pc = new RTCPeerConnection({
iceServers: [{ urls: 'stun:stun.l.google.com:19302' }],
});
// Add local audio track.
for (const track of this.localStream.getTracks()) {
this.pc.addTrack(track, this.localStream);
}
// Handle remote audio (incoming from SIP provider via proxy).
this.pc.ontrack = (event) => {
console.log('[webrtc] ontrack fired, streams:', event.streams.length);
this.remoteAudio = new Audio();
this.remoteAudio.autoplay = true;
this.remoteAudio.srcObject = event.streams[0] || new MediaStream([event.track]);
// Route to selected output device.
if (this.selectedOutputId && 'setSinkId' in this.remoteAudio) {
(this.remoteAudio as any).setSinkId(this.selectedOutputId).catch(() => {});
}
this.remoteAudio.play().catch((e) => console.warn('[webrtc] autoplay blocked:', e));
// Set up remote audio analyser for level meter.
if (this.audioCtx && event.streams[0]) {
const remoteSource = this.audioCtx.createMediaStreamSource(event.streams[0]);
this.remoteAnalyser = this.audioCtx.createAnalyser();
this.remoteAnalyser.fftSize = 256;
remoteSource.connect(this.remoteAnalyser);
}
};
// Send ICE candidates to server.
this.pc.onicecandidate = (event) => {
if (event.candidate) {
this.wsSend({
type: 'webrtc-ice',
sessionId: this.sessionId,
candidate: event.candidate.toJSON(),
});
}
};
this.pc.onconnectionstatechange = () => {
if (this.pc?.connectionState === 'connected') {
this.setState('connected');
} else if (this.pc?.connectionState === 'failed') {
this.setState('error');
}
};
// Create offer and send to server.
const offer = await this.pc.createOffer();
await this.pc.setLocalDescription(offer);
this.wsSend({
type: 'webrtc-offer',
sessionId: this.sessionId,
sdp: offer.sdp,
});
}
/** Get current mic input level (0-1). */
getLocalLevel(): number {
return this.getLevel(this.localAnalyser);
}
/** Get current remote audio level (0-1). */
getRemoteLevel(): number {
return this.getLevel(this.remoteAnalyser);
}
private getLevel(analyser: AnalyserNode | null): number {
if (!analyser) return 0;
const data = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteTimeDomainData(data);
let sum = 0;
for (let i = 0; i < data.length; i++) {
const v = (data[i] - 128) / 128;
sum += v * v;
}
return Math.sqrt(sum / data.length);
}
hangup(): void {
this.wsSend({
type: 'webrtc-hangup',
sessionId: this.sessionId,
});
this.cleanup();
this.setState('idle');
}
private async handleAnswer(sdp: string): Promise<void> {
if (!this.pc) return;
await this.pc.setRemoteDescription({ type: 'answer', sdp });
}
private async handleRemoteIce(candidate: RTCIceCandidateInit): Promise<void> {
if (!this.pc) return;
try {
await this.pc.addIceCandidate(candidate);
} catch (err) {
console.error('[webrtc] ice error:', err);
}
}
private setState(state: typeof this.state): void {
this.state = state;
this.onStateChange(state);
}
private cleanup(): void {
this.localStream?.getTracks().forEach((t) => t.stop());
this.localStream = null;
this.localAnalyser = null;
this.remoteAnalyser = null;
if (this.remoteAudio) {
this.remoteAudio.pause();
this.remoteAudio.srcObject = null;
this.remoteAudio = null;
}
this.audioCtx?.close().catch(() => {});
this.audioCtx = null;
this.pc?.close();
this.pc = null;
}
private wsSend(data: unknown): void {
if (this.ws?.readyState === WebSocket.OPEN) {
this.ws.send(JSON.stringify(data));
}
}
get id(): string {
return this.sessionId;
}
}
/** Enumerate audio input/output devices. */
export async function getAudioDevices(): Promise<IAudioDevices> {
try {
// Need to request mic first to get labels.
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
stream.getTracks().forEach((t) => t.stop());
const devices = await navigator.mediaDevices.enumerateDevices();
return {
inputs: devices.filter((d) => d.kind === 'audioinput'),
outputs: devices.filter((d) => d.kind === 'audiooutput'),
};
} catch {
return { inputs: [], outputs: [] };
}
}