console.log('voice.js loaded'); class VoiceChannel { constructor(ws, settings) { // ws is ignored now as we use PHP signaling, but kept for compatibility this.settings = settings || { mode: 'vox', pttKey: 'v', voxThreshold: 0.1, inputDevice: 'default', outputDevice: 'default', inputVolume: 1.0, outputVolume: 1.0, echoCancellation: true, noiseSuppression: true }; console.log('VoiceChannel constructor called with settings:', this.settings); this.localStream = null; this.analysisStream = null; this.peers = {}; // userId -> RTCPeerConnection this.participants = {}; // userId -> {name} this.currentChannelId = null; this.myPeerId = null; this.pollInterval = null; this.canSpeak = true; this.remoteAudios = {}; // userId -> Audio element this.isSelfMuted = false; this.isDeafened = false; this.peerStates = {}; // userId -> { makingOffer, ignoreOffer } this.whisperSettings = []; // from DB this.whisperPeers = new Set(); // active whisper target peer_ids this.isWhispering = false; this.whisperListeners = []; this.audioContext = null; this.analyser = null; this.microphone = null; this.scriptProcessor = null; this.inputGain = null; this.isTalking = false; this.pttPressed = false; this.voxActive = false; this.lastVoiceTime = 0; this.voxHoldTime = 500; // Track who is speaking to persist across UI refreshes this.speakingUsers = new Set(); this.setupPTTListeners(); this.loadWhisperSettings(); window.addEventListener('beforeunload', () => { // We don't want to leave on page refresh if we want persistence // but we might want to tell the server we are "still here" soon. // Actually, for a simple refresh, we just let the session timeout or re-join. }); // Auto-rejoin if we were in a channel setTimeout(() => { const savedChannelId = sessionStorage.getItem('activeVoiceChannel'); const savedPeerId = sessionStorage.getItem('activeVoicePeerId'); if (savedChannelId) { console.log('Auto-rejoining voice channel:', savedChannelId); if (savedPeerId) this.myPeerId = savedPeerId; this.join(savedChannelId, true); // Pass true to indicate auto-rejoin } }, 200); } setupPTTListeners() { window.addEventListener('keydown', (e) => { // Ignore if in input field if (e.target.tagName === 'INPUT' || e.target.tagName === 'TEXTAREA') return; // Normal PTT if (this.settings.mode === 'ptt') { const isMatch = e.key.toLowerCase() === this.settings.pttKey.toLowerCase() || (e.code && e.code.toLowerCase() === this.settings.pttKey.toLowerCase()) || (this.settings.pttKey === '0' && e.code === 'Numpad0'); if (isMatch) { if (!this.pttPressed) { this.pttPressed = true; this.updateMuteState(); } return; } } // Whispers this.whisperSettings.forEach(w => { if (e.key.toLowerCase() === w.whisper_key.toLowerCase()) { this.startWhisper(w); } }); }); window.addEventListener('keyup', (e) => { if (this.settings.mode === 'ptt') { const isMatch = e.key.toLowerCase() === this.settings.pttKey.toLowerCase() || (e.code && e.code.toLowerCase() === this.settings.pttKey.toLowerCase()) || (this.settings.pttKey === '0' && e.code === 'Numpad0'); if (isMatch) { this.pttPressed = false; this.updateMuteState(); return; } } // Whispers this.whisperSettings.forEach(w => { if (e.key.toLowerCase() === w.whisper_key.toLowerCase()) { this.stopWhisper(w); } }); }); } async loadWhisperSettings() { try { const resp = await fetch('api_v1_voice.php?action=get_whispers'); const data = await resp.json(); if (data.success) { this.whisperSettings = data.whispers; console.log('VoiceChannel: Loaded whispers:', this.whisperSettings); } } catch (e) { console.error('Failed to load whispers in VoiceChannel:', e); } } setupWhisperListeners() { // This is called when settings are updated in the UI this.loadWhisperSettings(); } async startWhisper(config) { if (this.isWhispering) return; console.log('Starting whisper to:', config.target_type, config.target_id); try { const resp = await fetch(`api_v1_voice.php?action=find_whisper_targets&target_type=${config.target_type}&target_id=${config.target_id}`); const data = await resp.json(); if (data.success && data.targets.length > 0) { this.isWhispering = true; this.whisperPeers.clear(); for (const target of data.targets) { if (target.peer_id === this.myPeerId) continue; this.whisperPeers.add(target.peer_id); // Establish connection if not exists if (!this.peers[target.peer_id]) { console.log('Establishing temporary connection for whisper to:', target.peer_id); this.createPeerConnection(target.peer_id, true); } } this.updateMuteState(); } else { console.log('No active targets found for whisper.'); } } catch (e) { console.error('Whisper start error:', e); } } stopWhisper(config) { if (!this.isWhispering) return; console.log('Stopping whisper'); this.isWhispering = false; this.whisperPeers.clear(); this.updateMuteState(); // Optionally cleanup peers that are NOT in current channel // For now, keep them for future whispers to avoid re-handshake } async join(channelId, isAutoRejoin = false) { console.log('VoiceChannel.join process started for channel:', channelId, 'isAutoRejoin:', isAutoRejoin); if (this.currentChannelId === channelId && !isAutoRejoin) { console.log('Already in this channel'); return; } if (this.currentChannelId && this.currentChannelId != channelId) { console.log('Leaving previous channel:', this.currentChannelId); this.leave(); } this.currentChannelId = channelId; sessionStorage.setItem('activeVoiceChannel', channelId); try { console.log('Requesting microphone access with device:', this.settings.inputDevice); const constraints = { audio: { echoCancellation: this.settings.echoCancellation, noiseSuppression: this.settings.noiseSuppression, autoGainControl: true }, video: false }; if (this.settings.inputDevice !== 'default') { constraints.audio.deviceId = { exact: this.settings.inputDevice }; } this.localStream = await navigator.mediaDevices.getUserMedia(constraints); console.log('Microphone access granted'); this.setMute(false); // Join unmuted by default (self-mute off) // Always setup VOX logic for volume meter and detection this.setupVOX(); // Join via PHP console.log('Calling API join...'); const url = `api_v1_voice.php?action=join&room=${channelId}&name=${encodeURIComponent(window.currentUsername || 'Unknown')}${this.myPeerId ? '&peer_id='+this.myPeerId : ''}`; const resp = await fetch(url); const data = await resp.json(); console.log('API join response:', data); if (data.success) { this.myPeerId = data.peer_id; this.canSpeak = data.can_speak !== false; sessionStorage.setItem('activeVoicePeerId', this.myPeerId); console.log('Joined room with peer_id:', this.myPeerId); // Start polling this.startPolling(); this.updateVoiceUI(); } else { console.error('API join failed:', data.error); } } catch (e) { console.error('Failed to join voice:', e); alert('Microphone access required for voice channels. Error: ' + e.message); this.currentChannelId = null; } } startPolling() { if (this.pollInterval) clearInterval(this.pollInterval); this.pollInterval = setInterval(() => this.poll(), 500); this.poll(); // Initial poll } async poll() { if (!this.myPeerId || !this.currentChannelId) return; try { const resp = await fetch(`api_v1_voice.php?action=poll&room=${this.currentChannelId}&peer_id=${this.myPeerId}&is_muted=${this.isSelfMuted ? 1 : 0}&is_deafened=${this.isDeafened ? 1 : 0}`); const data = await resp.json(); if (data.success) { this.canSpeak = data.can_speak !== false; // Update participants const oldPs = Object.keys(this.participants); this.participants = data.participants; const newPs = Object.keys(this.participants); // If new people joined, initiate offer if I'm the "older" one (not really necessary here, can just offer to anyone I don't have a peer for) newPs.forEach(pid => { if (pid !== this.myPeerId && !this.peers[pid]) { console.log('New peer found via poll:', pid); this.createPeerConnection(pid, true); } }); // Cleanup left peers oldPs.forEach(pid => { if (!this.participants[pid] && this.peers[pid] && !this.whisperPeers.has(pid) && !this.speakingUsers.has(pid)) { console.log('Peer left or not in channel anymore:', pid); this.peers[pid].close(); delete this.peers[pid]; if (this.remoteAudios[pid]) { this.remoteAudios[pid].pause(); this.remoteAudios[pid].remove(); delete this.remoteAudios[pid]; } } }); // Handle incoming signals if (data.signals && data.signals.length > 0) { for (const sig of data.signals) { await this.handleSignaling(sig); } } this.updateVoiceUI(); } } catch (e) { console.error('Polling error:', e); } } async sendSignal(to, data) { if (!this.myPeerId || !this.currentChannelId) return; await fetch(`api_v1_voice.php?action=signal&room=${this.currentChannelId}&peer_id=${this.myPeerId}&to=${to}&data=${encodeURIComponent(JSON.stringify(data))}`); } createPeerConnection(userId, isOfferor) { if (this.peers[userId]) return this.peers[userId]; console.log('Creating PeerConnection for:', userId, 'as offeror:', isOfferor); if (!this.peerStates[userId]) { this.peerStates[userId] = { makingOffer: false, ignoreOffer: false }; } const pc = new RTCPeerConnection({ iceServers: [ { urls: 'stun:stun.l.google.com:19302' }, { urls: 'stun:stun1.l.google.com:19302' } ] }); this.peers[userId] = pc; pc.oniceconnectionstatechange = () => { console.log(`ICE Connection State with ${userId}: ${pc.iceConnectionState}`); if (pc.iceConnectionState === 'failed' || pc.iceConnectionState === 'disconnected') { console.log(`ICE failure with ${userId}, attempting to restart...`); // If it failed, we could try to renegotiate, but for now let's just wait for poll to maybe clean it up } }; pc.onnegotiationneeded = async () => { try { this.peerStates[userId].makingOffer = true; await pc.setLocalDescription(); this.sendSignal(userId, { type: 'offer', offer: pc.localDescription }); } catch (err) { console.error('onnegotiationneeded error:', err); } finally { this.peerStates[userId].makingOffer = false; } }; if (this.localStream) { this.localStream.getTracks().forEach(track => { console.log(`Adding track ${track.kind} to peer ${userId}`); pc.addTrack(track, this.localStream); }); } pc.onicecandidate = (event) => { if (event.candidate) { this.sendSignal(userId, { type: 'ice_candidate', candidate: event.candidate }); } }; pc.ontrack = (event) => { console.log('Received remote track from:', userId, 'Stream count:', event.streams.length); const stream = event.streams[0] || new MediaStream([event.track]); // Ensure AudioContext is running if (this.audioContext && this.audioContext.state === 'suspended') { this.audioContext.resume(); } if (this.remoteAudios[userId]) { console.log('Replacing existing audio element for:', userId); this.remoteAudios[userId].pause(); this.remoteAudios[userId].srcObject = null; this.remoteAudios[userId].remove(); } const remoteAudio = new Audio(); remoteAudio.autoplay = true; remoteAudio.style.display = 'none'; remoteAudio.srcObject = stream; remoteAudio.muted = this.isDeafened; remoteAudio.volume = this.settings.outputVolume || 1.0; if (this.settings.outputDevice !== 'default' && typeof remoteAudio.setSinkId === 'function') { remoteAudio.setSinkId(this.settings.outputDevice); } document.body.appendChild(remoteAudio); this.remoteAudios[userId] = remoteAudio; console.log('Playing remote audio for:', userId); remoteAudio.play().then(() => { console.log('Remote audio playing successfully for:', userId); }).catch(e => { console.warn('Autoplay prevented or play failed for:', userId, e); // In case of autoplay prevention, we might need a user gesture }); }; // Manual offer if explicitly requested (though onnegotiationneeded should handle it) if (isOfferor && pc.signalingState === 'stable') { pc.onnegotiationneeded(); } return pc; } async handleSignaling(sig) { const from = sig.from; const data = sig.data; console.log('Handling signaling from:', from, 'type:', data.type); try { switch (data.type) { case 'offer': await this.handleOffer(from, data.offer); break; case 'answer': await this.handleAnswer(from, data.answer); break; case 'ice_candidate': await this.handleCandidate(from, data.candidate); break; case 'voice_speaking': this.updateSpeakingUI(data.user_id, data.speaking, data.is_whisper); break; } } catch (err) { console.error('Signaling error:', err); } } async handleOffer(from, offer) { const pc = this.createPeerConnection(from, false); const state = this.peerStates[from]; const offerCollision = (offer.type === "offer") && (state.makingOffer || pc.signalingState !== "stable"); // Politeness: higher peer_id is polite const isPolite = this.myPeerId > from; state.ignoreOffer = !isPolite && offerCollision; if (state.ignoreOffer) { console.log('Polite peer: ignoring offer from impolite peer to avoid collision', from); return; } await pc.setRemoteDescription(new RTCSessionDescription(offer)); if (offer.type === "offer") { await pc.setLocalDescription(); this.sendSignal(from, { type: 'answer', answer: pc.localDescription }); } } async handleAnswer(from, answer) { const pc = this.peers[from]; if (pc) { await pc.setRemoteDescription(new RTCSessionDescription(answer)); } } async handleCandidate(from, candidate) { const pc = this.peers[from]; const state = this.peerStates[from]; try { if (pc) { await pc.addIceCandidate(new RTCIceCandidate(candidate)); } } catch (err) { if (!state || !state.ignoreOffer) { console.warn('Failed to add ICE candidate', err); } } } setupVOX() { if (!this.localStream) { console.warn('Cannot setup VOX: no localStream'); return; } console.log('Setting up VOX logic...'); try { if (!this.audioContext) { this.audioContext = new (window.AudioContext || window.webkitAudioContext)(); } // Re-ensure context is running if (this.audioContext.state === 'suspended') { this.audioContext.resume().then(() => console.log('AudioContext resumed')); } // Cleanup old nodes if (this.scriptProcessor) { this.scriptProcessor.onaudioprocess = null; try { this.scriptProcessor.disconnect(); } catch(e) {} } if (this.microphone) { try { this.microphone.disconnect(); } catch(e) {} } this.analyser = this.audioContext.createAnalyser(); this.analyser.fftSize = 512; // Use a cloned stream for analysis so VOX works even when localStream is muted/disabled if (this.analysisStream) { this.analysisStream.getTracks().forEach(t => t.stop()); } this.analysisStream = this.localStream.clone(); this.analysisStream.getAudioTracks().forEach(t => t.enabled = true); // Ensure analysis stream is NOT muted this.microphone = this.audioContext.createMediaStreamSource(this.analysisStream); this.scriptProcessor = this.audioContext.createScriptProcessor(2048, 1, 1); this.microphone.connect(this.analyser); this.analyser.connect(this.scriptProcessor); // Avoid feedback: connect to a gain node with 0 volume then to destination const silence = this.audioContext.createGain(); silence.gain.value = 0; this.scriptProcessor.connect(silence); silence.connect(this.audioContext.destination); this.voxActive = false; this.currentVolume = 0; this.scriptProcessor.onaudioprocess = () => { const array = new Uint8Array(this.analyser.frequencyBinCount); this.analyser.getByteFrequencyData(array); let values = 0; for (let i = 0; i < array.length; i++) values += array[i]; const average = values / array.length; this.currentVolume = average / 255; if (this.settings.mode !== 'vox') { this.voxActive = false; return; } if (this.currentVolume > this.settings.voxThreshold) { this.lastVoiceTime = Date.now(); if (!this.voxActive) { this.voxActive = true; this.updateMuteState(); } } else { if (this.voxActive && Date.now() - this.lastVoiceTime > this.voxHoldTime) { this.voxActive = false; this.updateMuteState(); } } }; console.log('VOX logic setup complete'); } catch (e) { console.error('Failed to setup VOX:', e); } } getVolume() { return this.currentVolume || 0; } updateMuteState() { if (!this.localStream) return; // If we are not in a channel, we can still whisper! // But for normal talking, we need currentChannelId. let shouldTalk = (this.settings.mode === 'ptt') ? this.pttPressed : this.voxActive; if (this.canSpeak === false) { shouldTalk = false; } // Always allow talking if whispering if (this.isWhispering) { shouldTalk = true; } console.log('updateMuteState: shouldTalk =', shouldTalk, 'isWhispering =', this.isWhispering); if (this.isTalking !== shouldTalk || this.lastWhisperState !== this.isWhispering) { this.isTalking = shouldTalk; this.lastWhisperState = this.isWhispering; this.applyAudioState(); this.updateSpeakingUI(window.currentUserId, shouldTalk, this.isWhispering); // Notify others in current channel const msg = { type: 'voice_speaking', channel_id: this.currentChannelId, user_id: window.currentUserId, speaking: shouldTalk, is_whisper: this.isWhispering }; // Send to channel peers Object.keys(this.peers).forEach(pid => { // If we are whispering, only send voice_speaking to whisper targets // but actually it's better to notify channel peers that we are NOT talking to them if (this.isWhispering) { if (this.whisperPeers.has(pid)) { this.sendSignal(pid, msg); } else { // Tell channel peers we are silent to them this.sendSignal(pid, { ...msg, speaking: false }); } } else { this.sendSignal(pid, msg); } }); // Also notify whisper peers that are NOT in the channel if (this.isWhispering) { this.whisperPeers.forEach(pid => { if (!this.peers[pid]) { // This should have been established in startWhisper } else { this.sendSignal(pid, msg); } }); } } } applyAudioState() { if (this.localStream) { const shouldTransmit = !this.isSelfMuted && this.isTalking && (this.canSpeak || this.isWhispering); console.log('applyAudioState: transmitting =', shouldTransmit, '(whisper=', this.isWhispering, ')'); this.localStream.getAudioTracks().forEach(track => { track.enabled = shouldTransmit; }); // We also need to ensure the audio only goes to the right peers // In P2P, we do this by enabling/disabling the track in the peer connection // or by simply enabling/disabling the local track (which affects all peers). // To be truly private, we should only enable the track for whisper peers. Object.entries(this.peers).forEach(([pid, pc]) => { const sender = pc.getSenders().find(s => s.track && s.track.kind === 'audio'); if (sender) { if (this.isWhispering) { sender.track.enabled = this.whisperPeers.has(pid); } else { // Normal mode: only send to people in the current channel participants sender.track.enabled = !!this.participants[pid]; } } }); } this.updateUserPanelButtons(); } setMute(mute) { this.isSelfMuted = mute; this.applyAudioState(); } toggleMute() { if (this.canSpeak === false) return; this.setMute(!this.isSelfMuted); } toggleDeafen() { this.isDeafened = !this.isDeafened; console.log('Setting deafen to:', this.isDeafened); Object.values(this.remoteAudios).forEach(audio => { audio.muted = this.isDeafened; if (!this.isDeafened) audio.volume = this.settings.outputVolume || 1.0; }); // If we deafen, we usually also mute in Discord if (this.isDeafened && !this.isSelfMuted) { this.setMute(true); } this.applyAudioState(); } setOutputVolume(vol) { this.settings.outputVolume = parseFloat(vol); Object.values(this.remoteAudios).forEach(audio => { audio.volume = this.settings.outputVolume; }); } setInputVolume(vol) { this.settings.inputVolume = parseFloat(vol); } async setInputDevice(deviceId) { this.settings.inputDevice = deviceId; if (this.currentChannelId && this.localStream) { const constraints = { audio: { echoCancellation: this.settings.echoCancellation, noiseSuppression: this.settings.noiseSuppression, autoGainControl: true }, video: false }; if (deviceId !== 'default') { constraints.audio.deviceId = { exact: deviceId }; } const newStream = await navigator.mediaDevices.getUserMedia(constraints); const newTrack = newStream.getAudioTracks()[0]; Object.values(this.peers).forEach(pc => { const sender = pc.getSenders().find(s => s.track && s.track.kind === 'audio'); if (sender) sender.replaceTrack(newTrack); }); this.localStream.getTracks().forEach(t => t.stop()); this.localStream = newStream; this.setupVOX(); this.applyAudioState(); } } async setOutputDevice(deviceId) { this.settings.outputDevice = deviceId; Object.values(this.remoteAudios).forEach(audio => { if (typeof audio.setSinkId === 'function') { audio.setSinkId(deviceId).catch(e => console.error('setSinkId failed:', e)); } }); } async updateAudioConstraints() { if (this.currentChannelId && this.localStream) { console.log('Updating audio constraints:', this.settings.echoCancellation, this.settings.noiseSuppression); const constraints = { audio: { echoCancellation: this.settings.echoCancellation, noiseSuppression: this.settings.noiseSuppression, autoGainControl: true }, video: false }; if (this.settings.inputDevice !== 'default') { constraints.audio.deviceId = { exact: this.settings.inputDevice }; } try { const newStream = await navigator.mediaDevices.getUserMedia(constraints); const newTrack = newStream.getAudioTracks()[0]; Object.values(this.peers).forEach(pc => { const sender = pc.getSenders().find(s => s.track && s.track.kind === 'audio'); if (sender) sender.replaceTrack(newTrack); }); this.localStream.getTracks().forEach(t => t.stop()); this.localStream = newStream; this.setupVOX(); this.applyAudioState(); } catch (e) { console.error('Failed to update audio constraints:', e); } } } updateUserPanelButtons() { const btnMute = document.getElementById('btn-panel-mute'); const btnDeafen = document.getElementById('btn-panel-deafen'); let displayMuted = this.isSelfMuted; if (this.canSpeak === false) { displayMuted = true; } if (btnMute) { btnMute.classList.toggle('active', displayMuted); btnMute.style.color = displayMuted ? '#f23f43' : 'var(--text-muted)'; btnMute.innerHTML = displayMuted ? '' : ''; if (this.canSpeak === false) { btnMute.title = "You do not have permission to speak in this channel"; btnMute.style.opacity = '0.5'; } else { btnMute.title = "Mute"; btnMute.style.opacity = '1'; } } if (btnDeafen) { btnDeafen.classList.toggle('active', this.isDeafened); btnDeafen.style.color = this.isDeafened ? '#f23f43' : 'var(--text-muted)'; btnDeafen.innerHTML = this.isDeafened ? '' : ''; } } leave() { if (!this.currentChannelId) { console.log('VoiceChannel.leave called but no active channel'); return; } console.log('Leaving voice channel:', this.currentChannelId, 'myPeerId:', this.myPeerId); const cid = this.currentChannelId; const pid = this.myPeerId; sessionStorage.removeItem('activeVoiceChannel'); sessionStorage.removeItem('activeVoicePeerId'); if (this.pollInterval) clearInterval(this.pollInterval); // Use keepalive for the leave fetch to ensure it reaches the server during page unload fetch(`api_v1_voice.php?action=leave&room=${cid}&peer_id=${pid}`, { keepalive: true }); if (this.localStream) { console.log('Stopping local stream tracks'); this.localStream.getTracks().forEach(track => { track.stop(); console.log('Track stopped:', track.kind); }); this.localStream = null; } if (this.analysisStream) { this.analysisStream.getTracks().forEach(track => track.stop()); this.analysisStream = null; } if (this.scriptProcessor) { try { this.scriptProcessor.disconnect(); this.scriptProcessor.onaudioprocess = null; } catch(e) {} this.scriptProcessor = null; } if (this.microphone) { try { this.microphone.disconnect(); } catch(e) {} this.microphone = null; } if (this.audioContext && this.audioContext.state !== 'closed') { // Keep AudioContext alive but suspended to reuse it this.audioContext.suspend(); } Object.values(this.peers).forEach(pc => pc.close()); Object.values(this.remoteAudios).forEach(audio => { audio.pause(); audio.remove(); audio.srcObject = null; }); this.peers = {}; this.remoteAudios = {}; this.participants = {}; this.currentChannelId = null; this.myPeerId = null; this.speakingUsers.clear(); // Also remove 'active' class from all voice items document.querySelectorAll('.voice-item').forEach(el => el.classList.remove('active')); this.updateVoiceUI(); } updateVoiceUI() { // We now use a global update mechanism for all channels VoiceChannel.refreshAllVoiceUsers(); if (this.currentChannelId) { if (!document.querySelector('.voice-controls')) { const controls = document.createElement('div'); controls.className = 'voice-controls p-2 d-flex justify-content-between align-items-center border-top bg-dark'; controls.style.backgroundColor = '#232428'; controls.innerHTML = `