This commit is contained in:
2025-12-11 12:06:18 +00:00
parent 7c8c194fd8
commit 6cbfd714eb
7 changed files with 1516 additions and 761 deletions

View File

@@ -28,7 +28,7 @@
"@git.zone/tsbundle": "^2.6.3",
"@git.zone/tsrun": "^2.0.0",
"@git.zone/tstest": "^3.1.3",
"@git.zone/tswatch": "^2.3.9",
"@git.zone/tswatch": "^2.3.10",
"@push.rocks/projectinfo": "^5.0.2",
"@types/node": "^25.0.0"
},

24
pnpm-lock.yaml generated
View File

@@ -37,8 +37,8 @@ importers:
specifier: ^3.1.3
version: 3.1.3(@push.rocks/smartserve@1.4.0)(socks@2.8.7)(typescript@5.9.3)
'@git.zone/tswatch':
specifier: ^2.3.9
version: 2.3.9(@tiptap/pm@2.27.1)
specifier: ^2.3.10
version: 2.3.10(@tiptap/pm@2.27.1)
'@push.rocks/projectinfo':
specifier: ^5.0.2
version: 5.0.2
@@ -489,8 +489,8 @@ packages:
resolution: {integrity: sha512-t+/cKV21JHK8X7NGAmihs5M/eMm+V+jn4R5rzfwGG97WJFAcP5qE1Os9VYtyZw3tx/NZXA2yA4abo/ELluTuRA==}
hasBin: true
'@git.zone/tswatch@2.3.9':
resolution: {integrity: sha512-lm3rwkeLXrT8arsQYTTnLSobyXYio+Q70vciBTflpf2Sf4I9fd4QH/89EmKSLysJso2Gnrz63brLzTYCtbdlQQ==}
'@git.zone/tswatch@2.3.10':
resolution: {integrity: sha512-88bdzD15mYoG0T0AUTg8ATNkV/dN5ecqfiYcQRX1gJHmLrE2yqymFGkb0W0/xWgpcRakc08V+wRbSI7pqg+EOQ==}
hasBin: true
'@happy-dom/global-registrator@15.11.7':
@@ -912,6 +912,10 @@ packages:
resolution: {integrity: sha512-M7rMLdcO423JIF7PbMnqy730h4seAx8lXkP3d7yGhIXep2jizPP+KlkdbdkBdaVp7YupcFZiTnu2HY66SKVtpQ==}
engines: {node: '>=20.0.0'}
'@push.rocks/smartwatch@6.2.4':
resolution: {integrity: sha512-cxGx/RJXSU45cfyJn0DNgXA1jPwmzraJhy+8J8hL2Bjn0K+DxatQRyeIvRVCSLLgBhVTN6yYaUjUtjs19gJLkA==}
engines: {node: '>=20.0.0'}
'@push.rocks/smartxml@2.0.0':
resolution: {integrity: sha512-1d06zYJX4Zt8s5w5qFOUg2LAEz9ykrh9d6CQPK4WAgOBIefb1xzVEWHc7yoxicc2OkzNgC3IBCEg3s6BncZKWw==}
@@ -4803,7 +4807,7 @@ snapshots:
- utf-8-validate
- vue
'@git.zone/tswatch@2.3.9(@tiptap/pm@2.27.1)':
'@git.zone/tswatch@2.3.10(@tiptap/pm@2.27.1)':
dependencies:
'@api.global/typedserver': 7.11.1(@tiptap/pm@2.27.1)
'@git.zone/tsbundle': 2.6.3
@@ -4816,7 +4820,7 @@ snapshots:
'@push.rocks/smartlog': 3.1.10
'@push.rocks/smartlog-destination-local': 9.0.2
'@push.rocks/smartshell': 3.3.0
'@push.rocks/smartwatch': 6.2.3
'@push.rocks/smartwatch': 6.2.4
'@push.rocks/taskbuffer': 3.5.0
transitivePeerDependencies:
- '@nuxt/kit'
@@ -5783,6 +5787,14 @@ snapshots:
'@push.rocks/smartrx': 3.0.10
picomatch: 4.0.3
'@push.rocks/smartwatch@6.2.4':
dependencies:
'@push.rocks/lik': 6.2.2
'@push.rocks/smartenv': 6.0.0
'@push.rocks/smartpromise': 4.2.3
'@push.rocks/smartrx': 3.0.10
picomatch: 4.0.3
'@push.rocks/smartxml@2.0.0':
dependencies:
fast-xml-parser: 5.3.2

View File

@@ -1,6 +1,8 @@
import { DeesElement, property, html, customElement, type TemplateResult, state } from '@design.estate/dees-element';
import { WccDashboard } from './wcc-dashboard.js';
import type { TTemplateFactory } from './wcctools.helpers.js';
import './wcc-record-button.js';
import './wcc-recording-panel.js';
export type TPropertyType = 'String' | 'Number' | 'Boolean' | 'Object' | 'Enum' | 'Array';
@@ -48,43 +50,16 @@ export class WccProperties extends DeesElement {
editorError: string;
}> = [];
// Recording state properties
// Recording coordination state
@state()
accessor recordingState: 'idle' | 'options' | 'recording' | 'preview' = 'idle';
accessor showRecordingPanel: boolean = false;
@state()
accessor recordingMode: 'viewport' | 'screen' = 'screen';
@state()
accessor audioEnabled: boolean = false;
@state()
accessor selectedMicrophoneId: string = '';
@state()
accessor availableMicrophones: MediaDeviceInfo[] = [];
@state()
accessor audioLevel: number = 0;
accessor isRecording: boolean = false;
@state()
accessor recordingDuration: number = 0;
@state()
accessor recordedBlob: Blob | null = null;
@state()
accessor previewVideoUrl: string = '';
// Recording private members
private mediaRecorder: MediaRecorder | null = null;
private recordedChunks: Blob[] = [];
private durationInterval: number | null = null;
private audioContext: AudioContext | null = null;
private audioAnalyser: AnalyserNode | null = null;
private audioMonitoringInterval: number | null = null;
private currentStream: MediaStream | null = null;
public editorHeight: number = 300;
public render(): TemplateResult {
@@ -550,360 +525,6 @@ export class WccProperties extends DeesElement {
bottom: 0;
height: 100px;
}
/* Recording styles */
.recordingButton {
display: flex;
align-items: center;
justify-content: center;
background: transparent;
cursor: pointer;
transition: all 0.15s ease;
color: #666;
border-left: 1px solid var(--border);
}
.recordingButton:hover {
background: rgba(239, 68, 68, 0.05);
color: #f87171;
}
.recordingButton.recording {
background: rgba(239, 68, 68, 0.15);
color: #f87171;
}
.recordingButton .rec-icon {
width: 12px;
height: 12px;
border-radius: 50%;
background: currentColor;
}
.recordingButton.recording .rec-icon {
animation: pulse-recording 1s ease-in-out infinite;
}
@keyframes pulse-recording {
0%, 100% { opacity: 1; transform: scale(1); }
50% { opacity: 0.5; transform: scale(0.9); }
}
.recording-timer {
font-family: 'Consolas', 'Monaco', monospace;
font-size: 0.7rem;
margin-left: 0.25rem;
}
/* Recording Options Panel */
.recording-options-panel {
position: fixed;
right: 16px;
bottom: 116px;
width: 360px;
background: #0c0c0c;
border: 1px solid rgba(255, 255, 255, 0.1);
border-radius: var(--radius-md);
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.4);
z-index: 1000;
overflow: hidden;
}
.recording-options-header {
padding: 0.75rem 1rem;
background: rgba(255, 255, 255, 0.02);
border-bottom: 1px solid rgba(255, 255, 255, 0.05);
display: flex;
justify-content: space-between;
align-items: center;
}
.recording-options-title {
font-size: 0.8rem;
font-weight: 500;
color: #ccc;
}
.recording-options-close {
width: 24px;
height: 24px;
background: transparent;
border: none;
color: #666;
cursor: pointer;
display: flex;
align-items: center;
justify-content: center;
border-radius: var(--radius-sm);
transition: all 0.15s ease;
}
.recording-options-close:hover {
background: rgba(255, 255, 255, 0.05);
color: #999;
}
.recording-options-content {
padding: 1rem;
}
.recording-option-group {
margin-bottom: 1rem;
}
.recording-option-group:last-child {
margin-bottom: 0;
}
.recording-option-label {
font-size: 0.7rem;
font-weight: 500;
color: #888;
text-transform: uppercase;
letter-spacing: 0.05em;
margin-bottom: 0.5rem;
}
.recording-mode-buttons {
display: flex;
gap: 0.5rem;
}
.recording-mode-btn {
flex: 1;
padding: 0.6rem 0.75rem;
background: var(--input);
border: 1px solid transparent;
border-radius: var(--radius-sm);
color: #999;
font-size: 0.75rem;
cursor: pointer;
transition: all 0.15s ease;
text-align: center;
}
.recording-mode-btn:hover {
border-color: var(--primary);
color: #ccc;
}
.recording-mode-btn.selected {
background: rgba(59, 130, 246, 0.15);
border-color: var(--primary);
color: var(--primary);
}
.audio-toggle {
display: flex;
align-items: center;
gap: 0.5rem;
margin-bottom: 0.75rem;
}
.audio-toggle input[type="checkbox"] {
width: 1rem;
height: 1rem;
accent-color: var(--primary);
}
.audio-toggle label {
font-size: 0.75rem;
color: #999;
cursor: pointer;
}
.microphone-select {
width: 100%;
padding: 0.5rem 0.75rem;
background: var(--input);
border: 1px solid transparent;
border-radius: var(--radius-sm);
color: var(--foreground);
font-size: 0.75rem;
outline: none;
cursor: pointer;
transition: all 0.15s ease;
}
.microphone-select:focus {
border-color: var(--primary);
}
.microphone-select:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.audio-level-container {
margin-top: 0.75rem;
padding: 0.5rem;
background: rgba(255, 255, 255, 0.02);
border-radius: var(--radius-sm);
}
.audio-level-label {
font-size: 0.65rem;
color: #666;
margin-bottom: 0.25rem;
}
.audio-level-bar {
height: 8px;
background: var(--input);
border-radius: 4px;
overflow: hidden;
}
.audio-level-fill {
height: 100%;
background: linear-gradient(90deg, #22c55e, #84cc16, #eab308);
border-radius: 4px;
transition: width 0.1s ease;
}
.start-recording-btn {
width: 100%;
padding: 0.75rem;
background: #dc2626;
border: none;
border-radius: var(--radius-sm);
color: white;
font-size: 0.8rem;
font-weight: 500;
cursor: pointer;
transition: all 0.15s ease;
margin-top: 1rem;
display: flex;
align-items: center;
justify-content: center;
gap: 0.5rem;
}
.start-recording-btn:hover {
background: #b91c1c;
}
.start-recording-btn .rec-dot {
width: 10px;
height: 10px;
background: white;
border-radius: 50%;
}
/* Preview Modal */
.preview-modal-overlay {
position: fixed;
top: 0;
left: 0;
right: 0;
bottom: 0;
background: rgba(0, 0, 0, 0.8);
display: flex;
align-items: center;
justify-content: center;
z-index: 1000;
backdrop-filter: blur(4px);
}
.preview-modal {
width: 90%;
max-width: 800px;
background: #0c0c0c;
border: 1px solid rgba(255, 255, 255, 0.1);
border-radius: var(--radius-lg);
overflow: hidden;
box-shadow: 0 25px 50px rgba(0, 0, 0, 0.5);
}
.preview-modal-header {
padding: 1rem 1.25rem;
background: rgba(255, 255, 255, 0.02);
border-bottom: 1px solid rgba(255, 255, 255, 0.05);
display: flex;
justify-content: space-between;
align-items: center;
}
.preview-modal-title {
font-size: 0.9rem;
font-weight: 500;
color: #ccc;
}
.preview-modal-close {
width: 28px;
height: 28px;
background: transparent;
border: none;
color: #666;
cursor: pointer;
display: flex;
align-items: center;
justify-content: center;
border-radius: var(--radius-sm);
font-size: 1.2rem;
transition: all 0.15s ease;
}
.preview-modal-close:hover {
background: rgba(255, 255, 255, 0.05);
color: #999;
}
.preview-modal-content {
padding: 1.25rem;
}
.preview-video-container {
background: #000;
border-radius: var(--radius-sm);
overflow: hidden;
aspect-ratio: 16 / 9;
}
.preview-video {
width: 100%;
height: 100%;
object-fit: contain;
}
.preview-modal-actions {
padding: 1rem 1.25rem;
border-top: 1px solid rgba(255, 255, 255, 0.05);
display: flex;
justify-content: flex-end;
gap: 0.75rem;
}
.preview-btn {
padding: 0.6rem 1.25rem;
border-radius: var(--radius-sm);
font-size: 0.8rem;
font-weight: 500;
cursor: pointer;
transition: all 0.15s ease;
}
.preview-btn.secondary {
background: transparent;
border: 1px solid rgba(255, 255, 255, 0.1);
color: #999;
}
.preview-btn.secondary:hover {
border-color: rgba(255, 255, 255, 0.2);
color: #ccc;
}
.preview-btn.primary {
background: var(--primary);
border: none;
color: white;
}
.preview-btn.primary:hover {
background: #2563eb;
}
</style>
${this.editingProperties.length > 0 ? html`
<div class="advanced-editor-container">
@@ -1046,118 +667,24 @@ export class WccProperties extends DeesElement {
</i>
</div>
<!-- Recording Button -->
<div
class="recordingButton ${this.recordingState === 'recording' ? 'recording' : ''}"
@click=${() => this.handleRecordingButtonClick()}
>
${this.recordingState === 'recording' ? html`
<div class="rec-icon"></div>
<span class="recording-timer">${this.formatDuration(this.recordingDuration)}</span>
` : html`
<div class="rec-icon"></div>
`}
</div>
<wcc-record-button
.state=${this.isRecording ? 'recording' : 'idle'}
.duration=${this.recordingDuration}
@record-click=${() => this.handleRecordButtonClick()}
></wcc-record-button>
</div>
${this.warning ? html`<div class="warning">${this.warning}</div>` : null}
</div>
<!-- Recording Options Panel -->
${this.recordingState === 'options' ? html`
<div class="recording-options-panel">
<div class="recording-options-header">
<span class="recording-options-title">Recording Settings</span>
<button class="recording-options-close" @click=${() => this.recordingState = 'idle'}>✕</button>
</div>
<div class="recording-options-content">
<div class="recording-option-group">
<div class="recording-option-label">Record Area</div>
<div class="recording-mode-buttons">
<button
class="recording-mode-btn ${this.recordingMode === 'viewport' ? 'selected' : ''}"
@click=${() => this.recordingMode = 'viewport'}
>
Viewport Only
</button>
<button
class="recording-mode-btn ${this.recordingMode === 'screen' ? 'selected' : ''}"
@click=${() => this.recordingMode = 'screen'}
>
Entire Screen
</button>
</div>
</div>
<div class="recording-option-group">
<div class="recording-option-label">Audio</div>
<div class="audio-toggle">
<input
type="checkbox"
id="audioToggle"
?checked=${this.audioEnabled}
@change=${(e: Event) => this.handleAudioToggle((e.target as HTMLInputElement).checked)}
/>
<label for="audioToggle">Enable Microphone</label>
</div>
${this.audioEnabled ? html`
<select
class="microphone-select"
.value=${this.selectedMicrophoneId}
@change=${(e: Event) => this.handleMicrophoneChange((e.target as HTMLSelectElement).value)}
>
<option value="">Select Microphone...</option>
${this.availableMicrophones.map(mic => html`
<option value=${mic.deviceId}>${mic.label || `Microphone ${mic.deviceId.slice(0, 8)}`}</option>
`)}
</select>
${this.selectedMicrophoneId ? html`
<div class="audio-level-container">
<div class="audio-level-label">Input Level</div>
<div class="audio-level-bar">
<div class="audio-level-fill" style="width: ${this.audioLevel}%"></div>
</div>
</div>
` : null}
` : null}
</div>
<button class="start-recording-btn" @click=${() => this.startRecording()}>
<div class="rec-dot"></div>
Start Recording
</button>
</div>
</div>
` : null}
<!-- Preview Modal -->
${this.recordingState === 'preview' && this.previewVideoUrl ? html`
<div class="preview-modal-overlay" @click=${(e: Event) => {
if ((e.target as HTMLElement).classList.contains('preview-modal-overlay')) {
this.discardRecording();
}
}}>
<div class="preview-modal">
<div class="preview-modal-header">
<span class="preview-modal-title">Recording Preview</span>
<button class="preview-modal-close" @click=${() => this.discardRecording()}>✕</button>
</div>
<div class="preview-modal-content">
<div class="preview-video-container">
<video
class="preview-video"
src=${this.previewVideoUrl}
controls
autoplay
></video>
</div>
</div>
<div class="preview-modal-actions">
<button class="preview-btn secondary" @click=${() => this.discardRecording()}>Discard</button>
<button class="preview-btn primary" @click=${() => this.downloadRecording()}>Download</button>
</div>
</div>
</div>
<!-- Recording Panel (options + preview) -->
${this.showRecordingPanel ? html`
<wcc-recording-panel
.dashboardRef=${this.dashboardRef}
@recording-start=${() => { this.isRecording = true; }}
@recording-stop=${() => { this.isRecording = false; }}
@duration-update=${(e: CustomEvent) => { this.recordingDuration = e.detail.duration; }}
@close=${() => { this.showRecordingPanel = false; this.isRecording = false; this.recordingDuration = 0; }}
></wcc-recording-panel>
` : null}
`;
}
@@ -1499,270 +1026,16 @@ export class WccProperties extends DeesElement {
// ==================== Recording Methods ====================
private formatDuration(seconds: number): string {
const mins = Math.floor(seconds / 60);
const secs = seconds % 60;
return `${mins.toString().padStart(2, '0')}:${secs.toString().padStart(2, '0')}`;
}
private handleRecordingButtonClick() {
if (this.recordingState === 'recording') {
this.stopRecording();
} else if (this.recordingState === 'idle') {
this.recordingState = 'options';
// Don't request permissions here - just show the options panel
// Permissions will be requested when user enables audio or starts recording
} else if (this.recordingState === 'options') {
this.recordingState = 'idle';
this.stopAudioMonitoring();
}
}
private async loadMicrophones(requestPermission: boolean = false) {
try {
// Only request permission if explicitly asked (when user enables audio toggle)
if (requestPermission) {
await navigator.mediaDevices.getUserMedia({ audio: true }).then(stream => {
stream.getTracks().forEach(track => track.stop());
});
private handleRecordButtonClick() {
if (this.isRecording) {
// Stop recording by calling the panel's stopRecording method
const panel = this.shadowRoot?.querySelector('wcc-recording-panel') as any;
if (panel && panel.stopRecording) {
panel.stopRecording();
}
const devices = await navigator.mediaDevices.enumerateDevices();
this.availableMicrophones = devices.filter(d => d.kind === 'audioinput');
// Auto-select the first microphone if available and we have permission
if (requestPermission && this.availableMicrophones.length > 0 && !this.selectedMicrophoneId) {
this.selectedMicrophoneId = this.availableMicrophones[0].deviceId;
// Start monitoring after auto-selecting
await this.startAudioMonitoring();
}
} catch (error) {
console.error('Error loading microphones:', error);
this.availableMicrophones = [];
}
}
private async handleAudioToggle(enabled: boolean) {
this.audioEnabled = enabled;
if (enabled) {
// Request permission and load microphones when user explicitly enables audio
await this.loadMicrophones(true);
} else {
this.stopAudioMonitoring();
this.selectedMicrophoneId = '';
this.audioLevel = 0;
// Toggle the recording panel
this.showRecordingPanel = !this.showRecordingPanel;
}
}
private async handleMicrophoneChange(deviceId: string) {
this.selectedMicrophoneId = deviceId;
if (deviceId) {
await this.startAudioMonitoring();
} else {
this.stopAudioMonitoring();
this.audioLevel = 0;
}
}
private async startAudioMonitoring() {
this.stopAudioMonitoring();
if (!this.selectedMicrophoneId) return;
try {
const stream = await navigator.mediaDevices.getUserMedia({
audio: { deviceId: { exact: this.selectedMicrophoneId } }
});
this.audioContext = new AudioContext();
const source = this.audioContext.createMediaStreamSource(stream);
this.audioAnalyser = this.audioContext.createAnalyser();
this.audioAnalyser.fftSize = 256;
source.connect(this.audioAnalyser);
const dataArray = new Uint8Array(this.audioAnalyser.frequencyBinCount);
this.audioMonitoringInterval = window.setInterval(() => {
if (this.audioAnalyser) {
this.audioAnalyser.getByteFrequencyData(dataArray);
const average = dataArray.reduce((a, b) => a + b) / dataArray.length;
this.audioLevel = Math.min(100, (average / 128) * 100);
}
}, 50);
// Store stream for cleanup
this.currentStream = stream;
} catch (error) {
console.error('Error starting audio monitoring:', error);
this.audioLevel = 0;
}
}
private stopAudioMonitoring() {
if (this.audioMonitoringInterval) {
clearInterval(this.audioMonitoringInterval);
this.audioMonitoringInterval = null;
}
if (this.audioContext) {
this.audioContext.close();
this.audioContext = null;
}
if (this.currentStream) {
this.currentStream.getTracks().forEach(track => track.stop());
this.currentStream = null;
}
this.audioAnalyser = null;
}
private async startRecording() {
try {
// Stop audio monitoring before recording
this.stopAudioMonitoring();
// Get video stream based on mode
const displayMediaOptions: DisplayMediaStreamOptions = {
video: {
displaySurface: this.recordingMode === 'viewport' ? 'browser' : 'monitor'
} as MediaTrackConstraints,
audio: false
};
// Add preferCurrentTab hint for viewport mode
if (this.recordingMode === 'viewport') {
(displayMediaOptions as any).preferCurrentTab = true;
}
const videoStream = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
// If viewport mode, try to crop to viewport element using Element Capture API
if (this.recordingMode === 'viewport') {
try {
const wccFrame = await this.dashboardRef.wccFrame;
const viewport = await wccFrame.getViewportElement();
// Check if Element Capture API is available (Chrome 104+)
if ('CropTarget' in window) {
const cropTarget = await (window as any).CropTarget.fromElement(viewport);
const [videoTrack] = videoStream.getVideoTracks();
await (videoTrack as any).cropTo(cropTarget);
}
} catch (e) {
console.warn('Element Capture not supported, recording full tab:', e);
}
}
// Combine video with audio if enabled
let combinedStream = videoStream;
if (this.audioEnabled && this.selectedMicrophoneId) {
try {
const audioStream = await navigator.mediaDevices.getUserMedia({
audio: { deviceId: { exact: this.selectedMicrophoneId } }
});
combinedStream = new MediaStream([
...videoStream.getVideoTracks(),
...audioStream.getAudioTracks()
]);
} catch (audioError) {
console.warn('Could not add audio:', audioError);
}
}
// Store stream for cleanup
this.currentStream = combinedStream;
// Create MediaRecorder
const mimeType = MediaRecorder.isTypeSupported('video/webm;codecs=vp9')
? 'video/webm;codecs=vp9'
: 'video/webm';
this.mediaRecorder = new MediaRecorder(combinedStream, { mimeType });
this.recordedChunks = [];
this.mediaRecorder.ondataavailable = (e) => {
if (e.data.size > 0) {
this.recordedChunks.push(e.data);
}
};
this.mediaRecorder.onstop = () => this.handleRecordingComplete();
// Handle stream ending (user clicks "Stop sharing")
videoStream.getVideoTracks()[0].onended = () => {
if (this.recordingState === 'recording') {
this.stopRecording();
}
};
this.mediaRecorder.start(1000); // Capture in 1-second chunks
// Start duration timer
this.recordingDuration = 0;
this.durationInterval = window.setInterval(() => {
this.recordingDuration++;
}, 1000);
this.recordingState = 'recording';
} catch (error) {
console.error('Error starting recording:', error);
this.recordingState = 'idle';
}
}
private stopRecording() {
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
this.mediaRecorder.stop();
}
if (this.durationInterval) {
clearInterval(this.durationInterval);
this.durationInterval = null;
}
}
private handleRecordingComplete() {
// Create blob from recorded chunks
this.recordedBlob = new Blob(this.recordedChunks, { type: 'video/webm' });
// Create preview URL
if (this.previewVideoUrl) {
URL.revokeObjectURL(this.previewVideoUrl);
}
this.previewVideoUrl = URL.createObjectURL(this.recordedBlob);
// Stop all tracks
if (this.currentStream) {
this.currentStream.getTracks().forEach(track => track.stop());
this.currentStream = null;
}
this.recordingState = 'preview';
}
private discardRecording() {
if (this.previewVideoUrl) {
URL.revokeObjectURL(this.previewVideoUrl);
this.previewVideoUrl = '';
}
this.recordedBlob = null;
this.recordedChunks = [];
this.recordingDuration = 0;
this.recordingState = 'idle';
}
private downloadRecording() {
if (!this.recordedBlob) return;
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19);
const filename = `wcctools-recording-${timestamp}.webm`;
const a = document.createElement('a');
a.href = this.previewVideoUrl;
a.download = filename;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
// Clean up after download
this.discardRecording();
}
}

View File

@@ -0,0 +1,108 @@
import { DeesElement, customElement, html, css, property, type TemplateResult } from '@design.estate/dees-element';
@customElement('wcc-record-button')
export class WccRecordButton extends DeesElement {
@property({ type: String })
accessor state: 'idle' | 'recording' = 'idle';
@property({ type: Number })
accessor duration: number = 0;
public static styles = [
css`
:host {
display: flex;
align-items: center;
justify-content: center;
background: transparent;
cursor: pointer;
transition: all 0.15s ease;
color: #666;
user-select: none;
}
:host(:hover) {
background: rgba(239, 68, 68, 0.05);
color: #f87171;
}
:host(.recording) {
background: rgba(239, 68, 68, 0.15);
color: #f87171;
}
.content {
display: flex;
align-items: center;
justify-content: center;
gap: 0.25rem;
}
.rec-icon {
width: 12px;
height: 12px;
border-radius: 50%;
background: currentColor;
}
:host(.recording) .rec-icon {
animation: pulse-recording 1s ease-in-out infinite;
}
@keyframes pulse-recording {
0%, 100% { opacity: 1; transform: scale(1); }
50% { opacity: 0.5; transform: scale(0.9); }
}
.recording-timer {
font-family: 'Consolas', 'Monaco', monospace;
font-size: 0.7rem;
}
`
];
private formatDuration(seconds: number): string {
const mins = Math.floor(seconds / 60);
const secs = seconds % 60;
return `${mins.toString().padStart(2, '0')}:${secs.toString().padStart(2, '0')}`;
}
public render(): TemplateResult {
return html`
<div class="content">
<div class="rec-icon"></div>
${this.state === 'recording' ? html`
<span class="recording-timer">${this.formatDuration(this.duration)}</span>
` : null}
</div>
`;
}
async connectedCallback(): Promise<void> {
await super.connectedCallback();
this.addEventListener('click', this.handleClick);
}
async disconnectedCallback(): Promise<void> {
await super.disconnectedCallback();
this.removeEventListener('click', this.handleClick);
}
private handleClick = (): void => {
this.dispatchEvent(new CustomEvent('record-click', {
bubbles: true,
composed: true
}));
};
updated(changedProperties: Map<string, unknown>): void {
super.updated(changedProperties);
if (changedProperties.has('state')) {
if (this.state === 'recording') {
this.classList.add('recording');
} else {
this.classList.remove('recording');
}
}
}
}

View File

@@ -0,0 +1,966 @@
import { DeesElement, customElement, html, css, property, state, type TemplateResult } from '@design.estate/dees-element';
import { RecorderService } from '../services/recorder.service.js';
import type { WccDashboard } from './wcc-dashboard.js';
@customElement('wcc-recording-panel')
export class WccRecordingPanel extends DeesElement {
// External configuration
@property({ attribute: false })
accessor dashboardRef: WccDashboard;
// Panel state
@state()
accessor panelState: 'options' | 'recording' | 'preview' = 'options';
// Recording options
@state()
accessor recordingMode: 'viewport' | 'screen' = 'viewport';
@state()
accessor audioEnabled: boolean = false;
@state()
accessor selectedMicrophoneId: string = '';
@state()
accessor availableMicrophones: MediaDeviceInfo[] = [];
@state()
accessor audioLevel: number = 0;
// Recording state
@state()
accessor recordingDuration: number = 0;
// Preview/trim state
@state()
accessor previewVideoUrl: string = '';
@state()
accessor trimStart: number = 0;
@state()
accessor trimEnd: number = 0;
@state()
accessor videoDuration: number = 0;
@state()
accessor isDraggingTrim: 'start' | 'end' | null = null;
@state()
accessor isExporting: boolean = false;
// Service instance
private recorderService: RecorderService;
constructor() {
super();
this.recorderService = new RecorderService({
onDurationUpdate: (duration) => {
this.recordingDuration = duration;
this.dispatchEvent(new CustomEvent('duration-update', {
detail: { duration },
bubbles: true,
composed: true
}));
},
onRecordingComplete: (blob) => {
this.handleRecordingComplete(blob);
},
onAudioLevelUpdate: (level) => {
this.audioLevel = level;
},
onStreamEnded: () => {
this.stopRecording();
}
});
}
public static styles = [
css`
:host {
/* CSS Variables */
--background: #0a0a0a;
--foreground: #e5e5e5;
--input: #141414;
--primary: #3b82f6;
--border: rgba(255, 255, 255, 0.06);
--radius-sm: 2px;
--radius-md: 4px;
--radius-lg: 6px;
}
/* Recording Options Panel */
.recording-options-panel {
position: fixed;
right: 16px;
bottom: 116px;
width: 360px;
background: #0c0c0c;
border: 1px solid rgba(255, 255, 255, 0.1);
border-radius: var(--radius-md);
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.4);
z-index: 1000;
overflow: hidden;
font-family: 'Inter', -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', sans-serif;
}
.recording-options-header {
padding: 0.75rem 1rem;
background: rgba(255, 255, 255, 0.02);
border-bottom: 1px solid rgba(255, 255, 255, 0.05);
display: flex;
justify-content: space-between;
align-items: center;
}
.recording-options-title {
font-size: 0.8rem;
font-weight: 500;
color: #ccc;
}
.recording-options-close {
width: 24px;
height: 24px;
background: transparent;
border: none;
color: #666;
cursor: pointer;
display: flex;
align-items: center;
justify-content: center;
border-radius: var(--radius-sm);
transition: all 0.15s ease;
}
.recording-options-close:hover {
background: rgba(255, 255, 255, 0.05);
color: #999;
}
.recording-options-content {
padding: 1rem;
}
.recording-option-group {
margin-bottom: 1rem;
}
.recording-option-group:last-child {
margin-bottom: 0;
}
.recording-option-label {
font-size: 0.7rem;
font-weight: 500;
color: #888;
text-transform: uppercase;
letter-spacing: 0.05em;
margin-bottom: 0.5rem;
}
.recording-mode-buttons {
display: flex;
gap: 0.5rem;
}
.recording-mode-btn {
flex: 1;
padding: 0.6rem 0.75rem;
background: var(--input);
border: 1px solid transparent;
border-radius: var(--radius-sm);
color: #999;
font-size: 0.75rem;
cursor: pointer;
transition: all 0.15s ease;
text-align: center;
}
.recording-mode-btn:hover {
border-color: var(--primary);
color: #ccc;
}
.recording-mode-btn.selected {
background: rgba(59, 130, 246, 0.15);
border-color: var(--primary);
color: var(--primary);
}
.audio-toggle {
display: flex;
align-items: center;
gap: 0.5rem;
margin-bottom: 0.75rem;
}
.audio-toggle input[type="checkbox"] {
width: 1rem;
height: 1rem;
accent-color: var(--primary);
}
.audio-toggle label {
font-size: 0.75rem;
color: #999;
cursor: pointer;
}
.microphone-select {
width: 100%;
padding: 0.5rem 0.75rem;
background: var(--input);
border: 1px solid transparent;
border-radius: var(--radius-sm);
color: var(--foreground);
font-size: 0.75rem;
outline: none;
cursor: pointer;
transition: all 0.15s ease;
}
.microphone-select:focus {
border-color: var(--primary);
}
.microphone-select:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.audio-level-container {
margin-top: 0.75rem;
padding: 0.5rem;
background: rgba(255, 255, 255, 0.02);
border-radius: var(--radius-sm);
}
.audio-level-label {
font-size: 0.65rem;
color: #666;
margin-bottom: 0.25rem;
}
.audio-level-bar {
height: 8px;
background: var(--input);
border-radius: 4px;
overflow: hidden;
}
.audio-level-fill {
height: 100%;
background: linear-gradient(90deg, #22c55e, #84cc16, #eab308);
border-radius: 4px;
transition: width 0.1s ease;
}
.start-recording-btn {
width: 100%;
padding: 0.75rem;
background: #dc2626;
border: none;
border-radius: var(--radius-sm);
color: white;
font-size: 0.8rem;
font-weight: 500;
cursor: pointer;
transition: all 0.15s ease;
margin-top: 1rem;
display: flex;
align-items: center;
justify-content: center;
gap: 0.5rem;
}
.start-recording-btn:hover {
background: #b91c1c;
}
.start-recording-btn .rec-dot {
width: 10px;
height: 10px;
background: white;
border-radius: 50%;
}
/* Preview Modal */
.preview-modal-overlay {
position: fixed;
top: 0;
left: 0;
right: 0;
bottom: 0;
background: rgba(0, 0, 0, 0.8);
display: flex;
align-items: center;
justify-content: center;
z-index: 1000;
backdrop-filter: blur(4px);
font-family: 'Inter', -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', sans-serif;
}
.preview-modal {
width: 90%;
max-width: 800px;
background: #0c0c0c;
border: 1px solid rgba(255, 255, 255, 0.1);
border-radius: var(--radius-lg);
overflow: hidden;
box-shadow: 0 25px 50px rgba(0, 0, 0, 0.5);
}
.preview-modal-header {
padding: 1rem 1.25rem;
background: rgba(255, 255, 255, 0.02);
border-bottom: 1px solid rgba(255, 255, 255, 0.05);
display: flex;
justify-content: space-between;
align-items: center;
}
.preview-modal-title {
font-size: 0.9rem;
font-weight: 500;
color: #ccc;
}
.preview-modal-close {
width: 28px;
height: 28px;
background: transparent;
border: none;
color: #666;
cursor: pointer;
display: flex;
align-items: center;
justify-content: center;
border-radius: var(--radius-sm);
font-size: 1.2rem;
transition: all 0.15s ease;
}
.preview-modal-close:hover {
background: rgba(255, 255, 255, 0.05);
color: #999;
}
.preview-modal-content {
padding: 1.25rem;
}
.preview-video-container {
background: #000;
border-radius: var(--radius-sm);
overflow: hidden;
aspect-ratio: 16 / 9;
}
.preview-video {
width: 100%;
height: 100%;
object-fit: contain;
}
.preview-modal-actions {
padding: 1rem 1.25rem;
border-top: 1px solid rgba(255, 255, 255, 0.05);
display: flex;
justify-content: flex-end;
gap: 0.75rem;
}
.preview-btn {
padding: 0.6rem 1.25rem;
border-radius: var(--radius-sm);
font-size: 0.8rem;
font-weight: 500;
cursor: pointer;
transition: all 0.15s ease;
}
.preview-btn.secondary {
background: transparent;
border: 1px solid rgba(255, 255, 255, 0.1);
color: #999;
}
.preview-btn.secondary:hover {
border-color: rgba(255, 255, 255, 0.2);
color: #ccc;
}
.preview-btn.primary {
background: var(--primary);
border: none;
color: white;
}
.preview-btn.primary:hover {
background: #2563eb;
}
.preview-btn.primary:disabled {
background: #1e3a5f;
cursor: not-allowed;
opacity: 0.7;
}
/* Trim Timeline Styles */
.trim-section {
margin-top: 1.25rem;
padding-top: 1.25rem;
border-top: 1px solid rgba(255, 255, 255, 0.05);
}
.trim-section-header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 0.75rem;
}
.trim-section-title {
font-size: 0.75rem;
font-weight: 500;
color: #888;
text-transform: uppercase;
letter-spacing: 0.05em;
}
.trim-duration-info {
font-size: 0.7rem;
color: #666;
font-family: 'Consolas', 'Monaco', monospace;
}
.trim-timeline {
position: relative;
height: 48px;
background: var(--input);
border-radius: var(--radius-sm);
margin-bottom: 0.75rem;
user-select: none;
}
.trim-track {
position: absolute;
top: 50%;
left: 12px;
right: 12px;
height: 6px;
background: #333;
transform: translateY(-50%);
border-radius: 3px;
}
.trim-selected {
position: absolute;
top: 50%;
height: 6px;
background: var(--primary);
transform: translateY(-50%);
border-radius: 3px;
pointer-events: none;
}
.trim-handle {
position: absolute;
top: 50%;
width: 16px;
height: 36px;
background: white;
border: 2px solid var(--primary);
border-radius: 4px;
transform: translate(-50%, -50%);
cursor: ew-resize;
z-index: 2;
display: flex;
align-items: center;
justify-content: center;
transition: background 0.15s ease, transform 0.1s ease;
}
.trim-handle:hover {
background: #e0e0e0;
}
.trim-handle:active {
background: var(--primary);
transform: translate(-50%, -50%) scale(1.05);
}
.trim-handle::before {
content: '';
width: 2px;
height: 16px;
background: #666;
border-radius: 1px;
}
.trim-handle:active::before {
background: white;
}
.trim-time-labels {
display: flex;
justify-content: space-between;
font-size: 0.65rem;
color: #666;
font-family: 'Consolas', 'Monaco', monospace;
padding: 0 12px;
}
.trim-actions {
display: flex;
gap: 0.5rem;
margin-top: 0.75rem;
}
.trim-action-btn {
flex: 1;
padding: 0.5rem 0.75rem;
background: var(--input);
border: 1px solid transparent;
border-radius: var(--radius-sm);
color: #999;
font-size: 0.75rem;
cursor: pointer;
transition: all 0.15s ease;
text-align: center;
}
.trim-action-btn:hover {
border-color: var(--primary);
color: #ccc;
}
.export-spinner {
display: inline-block;
width: 14px;
height: 14px;
border: 2px solid rgba(255, 255, 255, 0.3);
border-radius: 50%;
border-top-color: white;
animation: spin 0.8s linear infinite;
margin-right: 0.5rem;
}
@keyframes spin {
to { transform: rotate(360deg); }
}
`
];
public render(): TemplateResult {
if (this.panelState === 'options') {
return this.renderOptionsPanel();
} else if (this.panelState === 'preview') {
return this.renderPreviewModal();
}
return html``;
}
private renderOptionsPanel(): TemplateResult {
return html`
<div class="recording-options-panel">
<div class="recording-options-header">
<span class="recording-options-title">Recording Settings</span>
<button class="recording-options-close" @click=${() => this.close()}>✕</button>
</div>
<div class="recording-options-content">
<div class="recording-option-group">
<div class="recording-option-label">Record Area</div>
<div class="recording-mode-buttons">
<button
class="recording-mode-btn ${this.recordingMode === 'viewport' ? 'selected' : ''}"
@click=${() => this.recordingMode = 'viewport'}
>
Viewport Only
</button>
<button
class="recording-mode-btn ${this.recordingMode === 'screen' ? 'selected' : ''}"
@click=${() => this.recordingMode = 'screen'}
>
Entire Screen
</button>
</div>
</div>
<div class="recording-option-group">
<div class="recording-option-label">Audio</div>
<div class="audio-toggle">
<input
type="checkbox"
id="audioToggle"
?checked=${this.audioEnabled}
@change=${(e: Event) => this.handleAudioToggle((e.target as HTMLInputElement).checked)}
/>
<label for="audioToggle">Enable Microphone</label>
</div>
${this.audioEnabled ? html`
<select
class="microphone-select"
.value=${this.selectedMicrophoneId}
@change=${(e: Event) => this.handleMicrophoneChange((e.target as HTMLSelectElement).value)}
>
<option value="">Select Microphone...</option>
${this.availableMicrophones.map(mic => html`
<option value=${mic.deviceId}>${mic.label || `Microphone ${mic.deviceId.slice(0, 8)}`}</option>
`)}
</select>
${this.selectedMicrophoneId ? html`
<div class="audio-level-container">
<div class="audio-level-label">Input Level</div>
<div class="audio-level-bar">
<div class="audio-level-fill" style="width: ${this.audioLevel}%"></div>
</div>
</div>
` : null}
` : null}
</div>
<button class="start-recording-btn" @click=${() => this.startRecording()}>
<div class="rec-dot"></div>
Start Recording
</button>
</div>
</div>
`;
}
private renderPreviewModal(): TemplateResult {
return html`
<div class="preview-modal-overlay" @click=${(e: Event) => {
if ((e.target as HTMLElement).classList.contains('preview-modal-overlay')) {
this.discardRecording();
}
}}>
<div class="preview-modal">
<div class="preview-modal-header">
<span class="preview-modal-title">Recording Preview</span>
<button class="preview-modal-close" @click=${() => this.discardRecording()}>✕</button>
</div>
<div class="preview-modal-content">
<div class="preview-video-container">
<video
class="preview-video"
src=${this.previewVideoUrl}
controls
@loadedmetadata=${(e: Event) => this.handleVideoLoaded(e.target as HTMLVideoElement)}
></video>
</div>
<!-- Trim Section -->
<div class="trim-section">
<div class="trim-section-header">
<span class="trim-section-title">Trim Video</span>
<span class="trim-duration-info">
${this.formatDuration(Math.floor(this.trimEnd - this.trimStart))}
${this.trimStart > 0 || this.trimEnd < this.videoDuration
? `(trimmed from ${this.formatDuration(Math.floor(this.videoDuration))})`
: ''}
</span>
</div>
<div
class="trim-timeline"
@mousedown=${(e: MouseEvent) => this.handleTimelineClick(e)}
@mousemove=${(e: MouseEvent) => this.handleTimelineDrag(e)}
@mouseup=${() => this.handleTimelineDragEnd()}
@mouseleave=${() => this.handleTimelineDragEnd()}
>
<div class="trim-track"></div>
<div
class="trim-selected"
style="left: ${this.getHandlePosition(this.trimStart)}px; right: ${this.getHandlePositionFromEnd(this.trimEnd)}px;"
></div>
<div
class="trim-handle start-handle"
style="left: ${this.getHandlePosition(this.trimStart)}px;"
@mousedown=${(e: MouseEvent) => { e.stopPropagation(); this.isDraggingTrim = 'start'; }}
></div>
<div
class="trim-handle end-handle"
style="left: ${this.getHandlePosition(this.trimEnd)}px;"
@mousedown=${(e: MouseEvent) => { e.stopPropagation(); this.isDraggingTrim = 'end'; }}
></div>
</div>
<div class="trim-time-labels">
<span>${this.formatDuration(Math.floor(this.trimStart))}</span>
<span>${this.formatDuration(Math.floor(this.trimEnd))}</span>
</div>
<div class="trim-actions">
<button class="trim-action-btn" @click=${() => this.resetTrim()}>
Reset Trim
</button>
<button class="trim-action-btn" @click=${() => this.previewTrimmedSection()}>
Preview Selection
</button>
</div>
</div>
</div>
<div class="preview-modal-actions">
<button class="preview-btn secondary" @click=${() => this.discardRecording()}>Discard</button>
<button
class="preview-btn primary"
?disabled=${this.isExporting}
@click=${() => this.downloadRecording()}
>
${this.isExporting ? html`<span class="export-spinner"></span>Exporting...` : 'Download'}
</button>
</div>
</div>
</div>
`;
}
// ==================== Audio Methods ====================
private async handleAudioToggle(enabled: boolean): Promise<void> {
this.audioEnabled = enabled;
if (enabled) {
this.availableMicrophones = await this.recorderService.loadMicrophones(true);
if (this.availableMicrophones.length > 0 && !this.selectedMicrophoneId) {
this.selectedMicrophoneId = this.availableMicrophones[0].deviceId;
await this.recorderService.startAudioMonitoring(this.selectedMicrophoneId);
}
} else {
this.recorderService.stopAudioMonitoring();
this.selectedMicrophoneId = '';
this.audioLevel = 0;
}
}
private async handleMicrophoneChange(deviceId: string): Promise<void> {
this.selectedMicrophoneId = deviceId;
if (deviceId) {
await this.recorderService.startAudioMonitoring(deviceId);
} else {
this.recorderService.stopAudioMonitoring();
this.audioLevel = 0;
}
}
// ==================== Recording Methods ====================
private async startRecording(): Promise<void> {
try {
let viewportElement: HTMLElement | undefined;
if (this.recordingMode === 'viewport' && this.dashboardRef) {
const wccFrame = await this.dashboardRef.wccFrame;
viewportElement = await wccFrame.getViewportElement();
}
await this.recorderService.startRecording({
mode: this.recordingMode,
audioDeviceId: this.audioEnabled ? this.selectedMicrophoneId : undefined,
viewportElement
});
this.panelState = 'recording';
this.dispatchEvent(new CustomEvent('recording-start', {
bubbles: true,
composed: true
}));
} catch (error) {
console.error('Failed to start recording:', error);
this.panelState = 'options';
}
}
public stopRecording(): void {
this.recorderService.stopRecording();
}
private handleRecordingComplete(blob: Blob): void {
if (this.previewVideoUrl) {
URL.revokeObjectURL(this.previewVideoUrl);
}
this.previewVideoUrl = URL.createObjectURL(blob);
this.panelState = 'preview';
this.dispatchEvent(new CustomEvent('recording-stop', {
bubbles: true,
composed: true
}));
}
private discardRecording(): void {
if (this.previewVideoUrl) {
URL.revokeObjectURL(this.previewVideoUrl);
this.previewVideoUrl = '';
}
this.recorderService.reset();
this.trimStart = 0;
this.trimEnd = 0;
this.videoDuration = 0;
this.isExporting = false;
this.recordingDuration = 0;
this.close();
}
private async downloadRecording(): Promise<void> {
const recordedBlob = this.recorderService.recordedBlob;
if (!recordedBlob) return;
this.isExporting = true;
try {
let blobToDownload: Blob;
const needsTrim = this.trimStart > 0.1 || this.trimEnd < this.videoDuration - 0.1;
if (needsTrim) {
const video = this.shadowRoot?.querySelector('.preview-video') as HTMLVideoElement;
if (video) {
blobToDownload = await this.recorderService.exportTrimmedVideo(video, this.trimStart, this.trimEnd);
} else {
blobToDownload = recordedBlob;
}
} else {
blobToDownload = recordedBlob;
}
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19);
const filename = `wcctools-recording-${timestamp}.webm`;
const url = URL.createObjectURL(blobToDownload);
const a = document.createElement('a');
a.href = url;
a.download = filename;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
URL.revokeObjectURL(url);
this.discardRecording();
} catch (error) {
console.error('Error exporting video:', error);
this.isExporting = false;
}
}
// ==================== Trim Methods ====================
private handleVideoLoaded(video: HTMLVideoElement): void {
this.videoDuration = video.duration;
this.trimStart = 0;
this.trimEnd = video.duration;
}
private formatDuration(seconds: number): string {
const mins = Math.floor(seconds / 60);
const secs = seconds % 60;
return `${mins.toString().padStart(2, '0')}:${secs.toString().padStart(2, '0')}`;
}
private getHandlePosition(time: number): number {
if (this.videoDuration === 0) return 12;
const percentage = time / this.videoDuration;
const trackWidth = 336;
return 12 + (percentage * trackWidth);
}
private getHandlePositionFromEnd(time: number): number {
if (this.videoDuration === 0) return 12;
const percentage = (this.videoDuration - time) / this.videoDuration;
const trackWidth = 336;
return 12 + (percentage * trackWidth);
}
private handleTimelineClick(e: MouseEvent): void {
if (this.isDraggingTrim) return;
const timeline = e.currentTarget as HTMLElement;
const rect = timeline.getBoundingClientRect();
const x = e.clientX - rect.left;
const percentage = Math.max(0, Math.min(1, (x - 12) / (rect.width - 24)));
const time = percentage * this.videoDuration;
const video = this.shadowRoot?.querySelector('.preview-video') as HTMLVideoElement;
if (video) {
video.currentTime = time;
}
}
private handleTimelineDrag(e: MouseEvent): void {
if (!this.isDraggingTrim) return;
const timeline = e.currentTarget as HTMLElement;
const rect = timeline.getBoundingClientRect();
const x = e.clientX - rect.left;
const percentage = Math.max(0, Math.min(1, (x - 12) / (rect.width - 24)));
const time = percentage * this.videoDuration;
const minDuration = 1;
if (this.isDraggingTrim === 'start') {
this.trimStart = Math.min(time, this.trimEnd - minDuration);
this.trimStart = Math.max(0, this.trimStart);
} else if (this.isDraggingTrim === 'end') {
this.trimEnd = Math.max(time, this.trimStart + minDuration);
this.trimEnd = Math.min(this.videoDuration, this.trimEnd);
}
const video = this.shadowRoot?.querySelector('.preview-video') as HTMLVideoElement;
if (video) {
video.currentTime = this.isDraggingTrim === 'start' ? this.trimStart : this.trimEnd;
}
}
private handleTimelineDragEnd(): void {
this.isDraggingTrim = null;
}
private resetTrim(): void {
this.trimStart = 0;
this.trimEnd = this.videoDuration;
const video = this.shadowRoot?.querySelector('.preview-video') as HTMLVideoElement;
if (video) {
video.currentTime = 0;
}
}
private previewTrimmedSection(): void {
const video = this.shadowRoot?.querySelector('.preview-video') as HTMLVideoElement;
if (!video) return;
video.currentTime = this.trimStart;
video.play();
const checkTime = () => {
if (video.currentTime >= this.trimEnd) {
video.pause();
video.removeEventListener('timeupdate', checkTime);
}
};
video.addEventListener('timeupdate', checkTime);
}
// ==================== Lifecycle ====================
private close(): void {
this.recorderService.stopAudioMonitoring();
this.dispatchEvent(new CustomEvent('close', {
bubbles: true,
composed: true
}));
}
async disconnectedCallback(): Promise<void> {
await super.disconnectedCallback();
this.recorderService.dispose();
if (this.previewVideoUrl) {
URL.revokeObjectURL(this.previewVideoUrl);
}
}
}

View File

@@ -2,6 +2,11 @@ import { WccDashboard } from './elements/wcc-dashboard.js';
import { LitElement } from 'lit';
import type { TTemplateFactory } from './elements/wcctools.helpers.js';
// Export recording components and service
export { RecorderService, type IRecorderEvents, type IRecordingOptions } from './services/recorder.service.js';
export { WccRecordButton } from './elements/wcc-record-button.js';
export { WccRecordingPanel } from './elements/wcc-recording-panel.js';
const setupWccTools = (
elementsArg?: { [key: string]: LitElement },
pagesArg?: Record<string, TTemplateFactory>

View File

@@ -0,0 +1,391 @@
/**
* RecorderService - Handles all MediaRecorder, audio monitoring, and video export logic
*/
export interface IRecorderEvents {
onDurationUpdate?: (duration: number) => void;
onRecordingComplete?: (blob: Blob) => void;
onAudioLevelUpdate?: (level: number) => void;
onError?: (error: Error) => void;
onStreamEnded?: () => void;
}
export interface IRecordingOptions {
mode: 'viewport' | 'screen';
audioDeviceId?: string;
viewportElement?: HTMLElement;
}
export class RecorderService {
// Recording state
private mediaRecorder: MediaRecorder | null = null;
private recordedChunks: Blob[] = [];
private durationInterval: number | null = null;
private _duration: number = 0;
private _recordedBlob: Blob | null = null;
private _isRecording: boolean = false;
// Audio monitoring state
private audioContext: AudioContext | null = null;
private audioAnalyser: AnalyserNode | null = null;
private audioMonitoringInterval: number | null = null;
private monitoringStream: MediaStream | null = null;
// Current recording stream
private currentStream: MediaStream | null = null;
// Event callbacks
private events: IRecorderEvents = {};
constructor(events?: IRecorderEvents) {
if (events) {
this.events = events;
}
}
// Public getters
get isRecording(): boolean {
return this._isRecording;
}
get duration(): number {
return this._duration;
}
get recordedBlob(): Blob | null {
return this._recordedBlob;
}
// Update event callbacks
setEvents(events: IRecorderEvents): void {
this.events = { ...this.events, ...events };
}
// ==================== Microphone Management ====================
async loadMicrophones(requestPermission: boolean = false): Promise<MediaDeviceInfo[]> {
try {
if (requestPermission) {
// Request permission by getting a temporary stream
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
stream.getTracks().forEach(track => track.stop());
}
const devices = await navigator.mediaDevices.enumerateDevices();
return devices.filter(d => d.kind === 'audioinput');
} catch (error) {
console.error('Error loading microphones:', error);
return [];
}
}
async startAudioMonitoring(deviceId: string): Promise<void> {
this.stopAudioMonitoring();
if (!deviceId) return;
try {
const stream = await navigator.mediaDevices.getUserMedia({
audio: { deviceId: { exact: deviceId } }
});
this.monitoringStream = stream;
this.audioContext = new AudioContext();
const source = this.audioContext.createMediaStreamSource(stream);
this.audioAnalyser = this.audioContext.createAnalyser();
this.audioAnalyser.fftSize = 256;
source.connect(this.audioAnalyser);
const dataArray = new Uint8Array(this.audioAnalyser.frequencyBinCount);
this.audioMonitoringInterval = window.setInterval(() => {
if (this.audioAnalyser) {
this.audioAnalyser.getByteFrequencyData(dataArray);
const average = dataArray.reduce((a, b) => a + b) / dataArray.length;
const level = Math.min(100, (average / 128) * 100);
this.events.onAudioLevelUpdate?.(level);
}
}, 50);
} catch (error) {
console.error('Error starting audio monitoring:', error);
this.events.onAudioLevelUpdate?.(0);
}
}
stopAudioMonitoring(): void {
if (this.audioMonitoringInterval) {
clearInterval(this.audioMonitoringInterval);
this.audioMonitoringInterval = null;
}
if (this.audioContext) {
this.audioContext.close();
this.audioContext = null;
}
if (this.monitoringStream) {
this.monitoringStream.getTracks().forEach(track => track.stop());
this.monitoringStream = null;
}
this.audioAnalyser = null;
}
// ==================== Recording Control ====================
async startRecording(options: IRecordingOptions): Promise<void> {
try {
// Stop audio monitoring before recording
this.stopAudioMonitoring();
// Get video stream based on mode
const displayMediaOptions: DisplayMediaStreamOptions = {
video: {
displaySurface: options.mode === 'viewport' ? 'browser' : 'monitor'
} as MediaTrackConstraints,
audio: false
};
// Add preferCurrentTab hint for viewport mode
if (options.mode === 'viewport') {
(displayMediaOptions as any).preferCurrentTab = true;
}
const videoStream = await navigator.mediaDevices.getDisplayMedia(displayMediaOptions);
// If viewport mode, try to crop to viewport element using Element Capture API
if (options.mode === 'viewport' && options.viewportElement) {
try {
if ('CropTarget' in window) {
const cropTarget = await (window as any).CropTarget.fromElement(options.viewportElement);
const [videoTrack] = videoStream.getVideoTracks();
await (videoTrack as any).cropTo(cropTarget);
}
} catch (e) {
console.warn('Element Capture not supported, recording full tab:', e);
}
}
// Combine video with audio if enabled
let combinedStream = videoStream;
if (options.audioDeviceId) {
try {
const audioStream = await navigator.mediaDevices.getUserMedia({
audio: { deviceId: { exact: options.audioDeviceId } }
});
combinedStream = new MediaStream([
...videoStream.getVideoTracks(),
...audioStream.getAudioTracks()
]);
} catch (audioError) {
console.warn('Could not add audio:', audioError);
}
}
// Store stream for cleanup
this.currentStream = combinedStream;
// Create MediaRecorder
const mimeType = MediaRecorder.isTypeSupported('video/webm;codecs=vp9')
? 'video/webm;codecs=vp9'
: 'video/webm';
this.mediaRecorder = new MediaRecorder(combinedStream, { mimeType });
this.recordedChunks = [];
this.mediaRecorder.ondataavailable = (e) => {
if (e.data.size > 0) {
this.recordedChunks.push(e.data);
}
};
this.mediaRecorder.onstop = () => this.handleRecordingComplete();
// Handle stream ending (user clicks "Stop sharing")
videoStream.getVideoTracks()[0].onended = () => {
if (this._isRecording) {
this.stopRecording();
this.events.onStreamEnded?.();
}
};
this.mediaRecorder.start(1000); // Capture in 1-second chunks
// Start duration timer
this._duration = 0;
this.durationInterval = window.setInterval(() => {
this._duration++;
this.events.onDurationUpdate?.(this._duration);
}, 1000);
this._isRecording = true;
} catch (error) {
console.error('Error starting recording:', error);
this._isRecording = false;
this.events.onError?.(error as Error);
throw error;
}
}
stopRecording(): void {
if (this.mediaRecorder && this.mediaRecorder.state !== 'inactive') {
this.mediaRecorder.stop();
}
if (this.durationInterval) {
clearInterval(this.durationInterval);
this.durationInterval = null;
}
}
private handleRecordingComplete(): void {
// Create blob from recorded chunks
this._recordedBlob = new Blob(this.recordedChunks, { type: 'video/webm' });
// Stop all tracks
if (this.currentStream) {
this.currentStream.getTracks().forEach(track => track.stop());
this.currentStream = null;
}
this._isRecording = false;
this.events.onRecordingComplete?.(this._recordedBlob);
}
// ==================== Trim & Export ====================
async exportTrimmedVideo(
videoElement: HTMLVideoElement,
trimStart: number,
trimEnd: number
): Promise<Blob> {
return new Promise((resolve, reject) => {
// Create a canvas for capturing frames
const canvas = document.createElement('canvas');
canvas.width = videoElement.videoWidth || 1280;
canvas.height = videoElement.videoHeight || 720;
const ctx = canvas.getContext('2d');
if (!ctx) {
reject(new Error('Could not get canvas context'));
return;
}
// Create canvas stream for video
const canvasStream = canvas.captureStream(30);
// Try to capture audio from video element
let combinedStream: MediaStream;
try {
// Create audio context to capture video's audio
const audioCtx = new AudioContext();
const source = audioCtx.createMediaElementSource(videoElement);
const destination = audioCtx.createMediaStreamDestination();
source.connect(destination);
source.connect(audioCtx.destination); // Also play through speakers
// Combine video (from canvas) and audio (from video element)
combinedStream = new MediaStream([
...canvasStream.getVideoTracks(),
...destination.stream.getAudioTracks()
]);
// Store audioCtx for cleanup
const cleanup = () => {
audioCtx.close();
};
this.recordTrimmedStream(videoElement, canvas, ctx, combinedStream, trimStart, trimEnd, cleanup, resolve, reject);
} catch (audioError) {
console.warn('Could not capture audio, recording video only:', audioError);
combinedStream = canvasStream;
this.recordTrimmedStream(videoElement, canvas, ctx, combinedStream, trimStart, trimEnd, () => {}, resolve, reject);
}
});
}
private recordTrimmedStream(
video: HTMLVideoElement,
canvas: HTMLCanvasElement,
ctx: CanvasRenderingContext2D,
stream: MediaStream,
trimStart: number,
trimEnd: number,
cleanup: () => void,
resolve: (blob: Blob) => void,
reject: (error: Error) => void
): void {
const mimeType = MediaRecorder.isTypeSupported('video/webm;codecs=vp9')
? 'video/webm;codecs=vp9'
: 'video/webm';
const recorder = new MediaRecorder(stream, { mimeType });
const chunks: Blob[] = [];
recorder.ondataavailable = (e) => {
if (e.data.size > 0) {
chunks.push(e.data);
}
};
recorder.onstop = () => {
cleanup();
resolve(new Blob(chunks, { type: 'video/webm' }));
};
recorder.onerror = (e) => {
cleanup();
reject(new Error('Recording error: ' + e));
};
// Seek to trim start
video.currentTime = trimStart;
video.onseeked = () => {
// Start recording
recorder.start(100);
// Start playing
video.play();
// Draw frames to canvas
const drawFrame = () => {
if (video.currentTime >= trimEnd || video.paused || video.ended) {
video.pause();
video.onseeked = null;
// Give a small delay before stopping to ensure last frame is captured
setTimeout(() => {
if (recorder.state === 'recording') {
recorder.stop();
}
}, 100);
return;
}
ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
requestAnimationFrame(drawFrame);
};
drawFrame();
};
}
// ==================== Cleanup ====================
reset(): void {
this._recordedBlob = null;
this.recordedChunks = [];
this._duration = 0;
this._isRecording = false;
}
dispose(): void {
this.stopRecording();
this.stopAudioMonitoring();
this.reset();
if (this.currentStream) {
this.currentStream.getTracks().forEach(track => track.stop());
this.currentStream = null;
}
}
}