From 5dff56179a372ddd1734127bfe9410702d77511c Mon Sep 17 00:00:00 2001 From: Flatlogic Bot Date: Tue, 17 Feb 2026 20:16:00 +0000 Subject: [PATCH] Final V2 --- assets/js/voice.js | 34 ++++++++++++++++++++++++++++------ requests.log | 3 +++ 2 files changed, 31 insertions(+), 6 deletions(-) diff --git a/assets/js/voice.js b/assets/js/voice.js index f42f043..f8edd4d 100644 --- a/assets/js/voice.js +++ b/assets/js/voice.js @@ -19,6 +19,8 @@ class VoiceChannel { this.analyser = null; this.microphone = null; this.scriptProcessor = null; + this.delayNode = null; + this.voxDestination = null; this.isTalking = false; this.pttPressed = false; @@ -85,10 +87,12 @@ class VoiceChannel { console.log('Requesting microphone access...'); this.localStream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false }); console.log('Microphone access granted'); - this.setMute(true); // Always setup VOX logic for volume meter and detection this.setupVOX(); + + // Initial mute (on the buffered stream if it exists) + this.setMute(true); // Join via PHP console.log('Calling API join...'); @@ -227,9 +231,10 @@ class VoiceChannel { }; if (this.localStream) { - this.localStream.getTracks().forEach(track => { + const streamToShare = (this.voxDestination && this.voxDestination.stream) ? this.voxDestination.stream : this.localStream; + streamToShare.getTracks().forEach(track => { console.log(`Adding track ${track.kind} to peer ${userId}`); - pc.addTrack(track, this.localStream); + pc.addTrack(track, streamToShare); }); } @@ -387,6 +392,14 @@ class VoiceChannel { this.microphone = this.audioContext.createMediaStreamSource(this.analysisStream); this.scriptProcessor = this.audioContext.createScriptProcessor(2048, 1, 1); + // Setup Delay Buffer for VOX + this.delayNode = this.audioContext.createDelay(1.0); + this.delayNode.delayTime.value = 0.3; // 300ms buffer + this.voxDestination = this.audioContext.createMediaStreamDestination(); + + this.microphone.connect(this.delayNode); + this.delayNode.connect(this.voxDestination); + this.microphone.connect(this.analyser); this.analyser.connect(this.scriptProcessor); @@ -457,9 +470,12 @@ class VoiceChannel { } setMute(mute) { - if (this.localStream) { - console.log('Setting mute to:', mute); - this.localStream.getAudioTracks().forEach(track => { track.enabled = !mute; }); + // We mute the destination stream (delayed) instead of the localStream source + // to ensure the delay buffer keeps filling with live audio. + const streamToMute = (this.voxDestination && this.voxDestination.stream) ? this.voxDestination.stream : this.localStream; + if (streamToMute) { + console.log('Setting mute to:', mute, 'on stream:', streamToMute.id); + streamToMute.getAudioTracks().forEach(track => { track.enabled = !mute; }); } } @@ -509,6 +525,12 @@ class VoiceChannel { try { this.microphone.disconnect(); } catch(e) {} this.microphone = null; } + if (this.delayNode) { + try { this.delayNode.disconnect(); } catch(e) {} + this.delayNode = null; + } + this.voxDestination = null; + if (this.audioContext && this.audioContext.state !== 'closed') { try { this.audioContext.suspend(); } catch(e) {} } diff --git a/requests.log b/requests.log index 7812737..c5c7cb6 100644 --- a/requests.log +++ b/requests.log @@ -644,3 +644,6 @@ 2026-02-17 19:14:39 - GET /?fl_project=38527 - POST: [] 2026-02-17 19:15:28 - GET /?fl_project=38527 - POST: [] 2026-02-17 19:16:15 - GET /?fl_project=38527 - POST: [] +2026-02-17 19:20:01 - GET /?fl_project=38527 - POST: [] +2026-02-17 20:01:09 - GET / - POST: [] +2026-02-17 20:15:23 - GET /?fl_project=38527 - POST: []