392 lines
11 KiB
TypeScript
392 lines
11 KiB
TypeScript
/**
|
|
* RecorderService - Handles all MediaRecorder, audio monitoring, and video export logic
|
|
*/
|
|
|
|
export interface IRecorderEvents {
|
|
onDurationUpdate?: (duration: number) => void;
|
|
onRecordingComplete?: (blob: Blob) => void;
|
|
onAudioLevelUpdate?: (level: number) => void;
|
|
onError?: (error: Error) => void;
|
|
onStreamEnded?: () => void;
|
|
}
|
|
|
|
export interface IRecordingOptions {
|
|
mode: 'viewport' | 'screen';
|
|
audioDeviceId?: string;
|
|
viewportElement?: HTMLElement;
|
|
}
|
|
|
|
export class RecorderService {
|
|
// Recording state
|
|
private mediaRecorder: MediaRecorder | null = null;
|
|
private recordedChunks: Blob[] = [];
|
|
private durationInterval: number | null = null;
|
|
private _duration: number = 0;
|
|
private _recordedBlob: Blob | null = null;
|
|
private _isRecording: boolean = false;
|
|
|
|
// Audio monitoring state
|
|
private audioContext: AudioContext | null = null;
|
|
private audioAnalyser: AnalyserNode | null = null;
|
|
private audioMonitoringInterval: number | null = null;
|
|
private monitoringStream: MediaStream | null = null;
|
|
|
|
// Current recording stream
|
|
private currentStream: MediaStream | null = null;
|
|
|
|
// Event callbacks
|
|
private events: IRecorderEvents = {};
|
|
|
|
constructor(events?: IRecorderEvents) {
|
|
if (events) {
|
|
this.events = events;
|
|
}
|
|
}
|
|
|
|
// Public getters
|
|
get isRecording(): boolean {
|
|
return this._isRecording;
|
|
}
|
|
|
|
get duration(): number {
|
|
return this._duration;
|
|
}
|
|
|
|
get recordedBlob(): Blob | null {
|
|
return this._recordedBlob;
|
|
}
|
|
|
|
// Update event callbacks
|
|
setEvents(events: IRecorderEvents): void {
|
|
this.events = { ...this.events, ...events };
|
|
}
|
|
|
|
// ==================== Microphone Management ====================
|
|
|
|
async loadMicrophones(requestPermission: boolean = false): Promise<MediaDeviceInfo[]> {
|
|
try {
|
|
if (requestPermission) {
|
|
// Request permission by getting a temporary stream
|
|
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
stream.getTracks().forEach(track => track.stop());
|
|
}
|
|
|
|
const devices = await navigator.mediaDevices.enumerateDevices();
|
|
return devices.filter(d => d.kind === 'audioinput');
|
|
} catch (error) {
|
|
console.error('Error loading microphones:', error);
|
|
return [];
|
|
}
|
|
}
|
|
|
|
async startAudioMonitoring(deviceId: string): Promise<void> {
|
|
this.stopAudioMonitoring();
|
|
|
|
if (!deviceId) return;
|
|
|
|
try {
|
|
const stream = await navigator.mediaDevices.getUserMedia({
|
|
audio: { deviceId: { exact: deviceId } }
|
|
});
|
|
|
|
this.monitoringStream = stream;
|
|
this.audioContext = new AudioContext();
|
|
const source = this.audioContext.createMediaStreamSource(stream);
|
|
this.audioAnalyser = this.audioContext.createAnalyser();
|
|
this.audioAnalyser.fftSize = 256;
|
|
source.connect(this.audioAnalyser);
|
|
|
|
const dataArray = new Uint8Array(this.audioAnalyser.frequencyBinCount);
|
|
|
|
this.audioMonitoringInterval = window.setInterval(() => {
|
|
if (this.audioAnalyser) {
|
|
this.audioAnalyser.getByteFrequencyData(dataArray);
|
|
const average = dataArray.reduce((a, b) => a + b) / dataArray.length;
|
|
const level = Math.min(100, (average / 128) * 100);
|
|
this.events.onAudioLevelUpdate?.(level);
|
|
}
|
|
}, 50);
|
|
} catch (error) {
|
|
console.error('Error starting audio monitoring:', error);
|
|
this.events.onAudioLevelUpdate?.(0);
|
|
}
|
|
}
|
|
|
|
stopAudioMonitoring(): void {
|
|
if (this.audioMonitoringInterval) {
|
|
clearInterval(this.audioMonitoringInterval);
|
|
this.audioMonitoringInterval = null;
|
|
}
|
|
if (this.audioContext) {
|
|
this.audioContext.close();
|
|
this.audioContext = null;
|
|
}
|
|
if (this.monitoringStream) {
|
|
this.monitoringStream.getTracks().forEach(track => track.stop());
|
|
this.monitoringStream = null;
|
|
}
|
|
this.audioAnalyser = null;
|
|
}
|
|
|
|
// ==================== Recording Control ====================
|
|
|
|
async startRecording(options: IRecordingOptions): Promise<void> {
|
|
try {
|
|
// Stop audio monitoring before recording
|
|
this.stopAudioMonitoring();
|
|
|
|
// Get video stream based on mode
|
|
const displayMediaOptions: DisplayMediaStreamOptions = {
|
|
video: {
|
|
displaySurface: options.mode === 'viewport' ? 'browser' : 'monitor'
|
|
} as MediaTrackConstraints,
|
|
audio: false
|
|
};
|
|
|
|
// Add preferCurrentTab hint for viewport mode
|
|
if (options.mode === 'viewport') {
|
|
(displayMediaOptions as any).preferCurrentTab = true;
|
|
}
|
|
|
|
const videoStream = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
|
|
|
|
// If viewport mode, try to crop to viewport element using Element Capture API
|
|
if (options.mode === 'viewport' && options.viewportElement) {
|
|
try {
|
|
if ('CropTarget' in window) {
|
|
const cropTarget = await (window as any).CropTarget.fromElement(options.viewportElement);
|
|
const [videoTrack] = videoStream.getVideoTracks();
|
|
await (videoTrack as any).cropTo(cropTarget);
|
|
}
|
|
} catch (e) {
|
|
console.warn('Element Capture not supported, recording full tab:', e);
|
|
}
|
|
}
|
|
|
|
// Combine video with audio if enabled
|
|
let combinedStream = videoStream;
|
|
if (options.audioDeviceId) {
|
|
try {
|
|
const audioStream = await navigator.mediaDevices.getUserMedia({
|
|
audio: { deviceId: { exact: options.audioDeviceId } }
|
|
});
|
|
combinedStream = new MediaStream([
|
|
...videoStream.getVideoTracks(),
|
|
...audioStream.getAudioTracks()
|
|
]);
|
|
} catch (audioError) {
|
|
console.warn('Could not add audio:', audioError);
|
|
}
|
|
}
|
|
|
|
// Store stream for cleanup
|
|
this.currentStream = combinedStream;
|
|
|
|
// Create MediaRecorder
|
|
const mimeType = MediaRecorder.isTypeSupported('video/webm;codecs=vp9')
|
|
? 'video/webm;codecs=vp9'
|
|
: 'video/webm';
|
|
|
|
this.mediaRecorder = new MediaRecorder(combinedStream, { mimeType });
|
|
this.recordedChunks = [];
|
|
|
|
this.mediaRecorder.ondataavailable = (e) => {
|
|
if (e.data.size > 0) {
|
|
this.recordedChunks.push(e.data);
|
|
}
|
|
};
|
|
|
|
this.mediaRecorder.onstop = () => this.handleRecordingComplete();
|
|
|
|
// Handle stream ending (user clicks "Stop sharing")
|
|
videoStream.getVideoTracks()[0].onended = () => {
|
|
if (this._isRecording) {
|
|
this.stopRecording();
|
|
this.events.onStreamEnded?.();
|
|
}
|
|
};
|
|
|
|
this.mediaRecorder.start(1000); // Capture in 1-second chunks
|
|
|
|
// Start duration timer
|
|
this._duration = 0;
|
|
this.durationInterval = window.setInterval(() => {
|
|
this._duration++;
|
|
this.events.onDurationUpdate?.(this._duration);
|
|
}, 1000);
|
|
|
|
this._isRecording = true;
|
|
} catch (error) {
|
|
console.error('Error starting recording:', error);
|
|
this._isRecording = false;
|
|
this.events.onError?.(error as Error);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
stopRecording(): void {
|
|
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
|
|
this.mediaRecorder.stop();
|
|
}
|
|
|
|
if (this.durationInterval) {
|
|
clearInterval(this.durationInterval);
|
|
this.durationInterval = null;
|
|
}
|
|
}
|
|
|
|
private handleRecordingComplete(): void {
|
|
// Create blob from recorded chunks
|
|
this._recordedBlob = new Blob(this.recordedChunks, { type: 'video/webm' });
|
|
|
|
// Stop all tracks
|
|
if (this.currentStream) {
|
|
this.currentStream.getTracks().forEach(track => track.stop());
|
|
this.currentStream = null;
|
|
}
|
|
|
|
this._isRecording = false;
|
|
this.events.onRecordingComplete?.(this._recordedBlob);
|
|
}
|
|
|
|
// ==================== Trim & Export ====================
|
|
|
|
async exportTrimmedVideo(
|
|
videoElement: HTMLVideoElement,
|
|
trimStart: number,
|
|
trimEnd: number
|
|
): Promise<Blob> {
|
|
return new Promise((resolve, reject) => {
|
|
// Create a canvas for capturing frames
|
|
const canvas = document.createElement('canvas');
|
|
canvas.width = videoElement.videoWidth || 1280;
|
|
canvas.height = videoElement.videoHeight || 720;
|
|
const ctx = canvas.getContext('2d');
|
|
|
|
if (!ctx) {
|
|
reject(new Error('Could not get canvas context'));
|
|
return;
|
|
}
|
|
|
|
// Create canvas stream for video
|
|
const canvasStream = canvas.captureStream(30);
|
|
|
|
// Try to capture audio from video element
|
|
let combinedStream: MediaStream;
|
|
|
|
try {
|
|
// Create audio context to capture video's audio
|
|
const audioCtx = new AudioContext();
|
|
const source = audioCtx.createMediaElementSource(videoElement);
|
|
const destination = audioCtx.createMediaStreamDestination();
|
|
source.connect(destination);
|
|
source.connect(audioCtx.destination); // Also play through speakers
|
|
|
|
// Combine video (from canvas) and audio (from video element)
|
|
combinedStream = new MediaStream([
|
|
...canvasStream.getVideoTracks(),
|
|
...destination.stream.getAudioTracks()
|
|
]);
|
|
|
|
// Store audioCtx for cleanup
|
|
const cleanup = () => {
|
|
audioCtx.close();
|
|
};
|
|
|
|
this.recordTrimmedStream(videoElement, canvas, ctx, combinedStream, trimStart, trimEnd, cleanup, resolve, reject);
|
|
} catch (audioError) {
|
|
console.warn('Could not capture audio, recording video only:', audioError);
|
|
combinedStream = canvasStream;
|
|
this.recordTrimmedStream(videoElement, canvas, ctx, combinedStream, trimStart, trimEnd, () => {}, resolve, reject);
|
|
}
|
|
});
|
|
}
|
|
|
|
private recordTrimmedStream(
|
|
video: HTMLVideoElement,
|
|
canvas: HTMLCanvasElement,
|
|
ctx: CanvasRenderingContext2D,
|
|
stream: MediaStream,
|
|
trimStart: number,
|
|
trimEnd: number,
|
|
cleanup: () => void,
|
|
resolve: (blob: Blob) => void,
|
|
reject: (error: Error) => void
|
|
): void {
|
|
const mimeType = MediaRecorder.isTypeSupported('video/webm;codecs=vp9')
|
|
? 'video/webm;codecs=vp9'
|
|
: 'video/webm';
|
|
|
|
const recorder = new MediaRecorder(stream, { mimeType });
|
|
const chunks: Blob[] = [];
|
|
|
|
recorder.ondataavailable = (e) => {
|
|
if (e.data.size > 0) {
|
|
chunks.push(e.data);
|
|
}
|
|
};
|
|
|
|
recorder.onstop = () => {
|
|
cleanup();
|
|
resolve(new Blob(chunks, { type: 'video/webm' }));
|
|
};
|
|
|
|
recorder.onerror = (e) => {
|
|
cleanup();
|
|
reject(new Error('Recording error: ' + e));
|
|
};
|
|
|
|
// Seek to trim start
|
|
video.currentTime = trimStart;
|
|
|
|
video.onseeked = () => {
|
|
// Start recording
|
|
recorder.start(100);
|
|
|
|
// Start playing
|
|
video.play();
|
|
|
|
// Draw frames to canvas
|
|
const drawFrame = () => {
|
|
if (video.currentTime >= trimEnd || video.paused || video.ended) {
|
|
video.pause();
|
|
video.onseeked = null;
|
|
|
|
// Give a small delay before stopping to ensure last frame is captured
|
|
setTimeout(() => {
|
|
if (recorder.state === 'recording') {
|
|
recorder.stop();
|
|
}
|
|
}, 100);
|
|
return;
|
|
}
|
|
|
|
ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
|
|
requestAnimationFrame(drawFrame);
|
|
};
|
|
|
|
drawFrame();
|
|
};
|
|
}
|
|
|
|
// ==================== Cleanup ====================
|
|
|
|
reset(): void {
|
|
this._recordedBlob = null;
|
|
this.recordedChunks = [];
|
|
this._duration = 0;
|
|
this._isRecording = false;
|
|
}
|
|
|
|
dispose(): void {
|
|
this.stopRecording();
|
|
this.stopAudioMonitoring();
|
|
this.reset();
|
|
|
|
if (this.currentStream) {
|
|
this.currentStream.getTracks().forEach(track => track.stop());
|
|
this.currentStream = null;
|
|
}
|
|
}
|
|
}
|