747 lines
31 KiB
JavaScript
747 lines
31 KiB
JavaScript
console.log('voice.js loaded');
|
|
|
|
class VoiceChannel {
|
|
constructor(ws, settings) {
|
|
// ws is ignored now as we use PHP signaling, but kept for compatibility
|
|
this.settings = settings || {
|
|
mode: 'vox',
|
|
pttKey: 'v',
|
|
voxThreshold: 0.1,
|
|
inputDevice: 'default',
|
|
outputDevice: 'default',
|
|
inputVolume: 1.0,
|
|
outputVolume: 1.0,
|
|
echoCancellation: true,
|
|
noiseSuppression: true
|
|
};
|
|
console.log('VoiceChannel constructor called with settings:', this.settings);
|
|
this.localStream = null;
|
|
this.analysisStream = null;
|
|
this.peers = {}; // userId -> RTCPeerConnection
|
|
this.participants = {}; // userId -> {name}
|
|
this.currentChannelId = null;
|
|
this.myPeerId = null;
|
|
this.pollInterval = null;
|
|
this.remoteAudios = {}; // userId -> Audio element
|
|
this.isMuted = false;
|
|
this.isDeafened = false;
|
|
|
|
this.audioContext = null;
|
|
this.analyser = null;
|
|
this.microphone = null;
|
|
this.scriptProcessor = null;
|
|
this.inputGain = null;
|
|
|
|
this.isTalking = false;
|
|
this.pttPressed = false;
|
|
this.voxActive = false;
|
|
this.lastVoiceTime = 0;
|
|
this.voxHoldTime = 500;
|
|
|
|
// Track who is speaking to persist across UI refreshes
|
|
this.speakingUsers = new Set();
|
|
|
|
this.setupPTTListeners();
|
|
window.addEventListener('beforeunload', () => {
|
|
// We don't want to leave on page refresh if we want persistence
|
|
// but we might want to tell the server we are "still here" soon.
|
|
// Actually, for a simple refresh, we just let the session timeout or re-join.
|
|
});
|
|
|
|
// Auto-rejoin if we were in a channel
|
|
setTimeout(() => {
|
|
const savedChannelId = sessionStorage.getItem('activeVoiceChannel');
|
|
const savedPeerId = sessionStorage.getItem('activeVoicePeerId');
|
|
if (savedChannelId) {
|
|
console.log('Auto-rejoining voice channel:', savedChannelId);
|
|
if (savedPeerId) this.myPeerId = savedPeerId;
|
|
this.join(savedChannelId, true); // Pass true to indicate auto-rejoin
|
|
}
|
|
}, 200);
|
|
}
|
|
|
|
setupPTTListeners() {
|
|
window.addEventListener('keydown', (e) => {
|
|
// Ignore if in input field
|
|
if (e.target.tagName === 'INPUT' || e.target.tagName === 'TEXTAREA') return;
|
|
|
|
if (this.settings.mode !== 'ptt') return;
|
|
|
|
const isMatch = e.key.toLowerCase() === this.settings.pttKey.toLowerCase() ||
|
|
(e.code && e.code.toLowerCase() === this.settings.pttKey.toLowerCase()) ||
|
|
(this.settings.pttKey === '0' && e.code === 'Numpad0');
|
|
|
|
if (isMatch) {
|
|
if (!this.pttPressed) {
|
|
console.log('PTT Key Pressed:', e.key, e.code, 'Expected:', this.settings.pttKey);
|
|
this.pttPressed = true;
|
|
this.updateMuteState();
|
|
}
|
|
}
|
|
});
|
|
|
|
window.addEventListener('keyup', (e) => {
|
|
if (this.settings.mode !== 'ptt') return;
|
|
|
|
const isMatch = e.key.toLowerCase() === this.settings.pttKey.toLowerCase() ||
|
|
(e.code && e.code.toLowerCase() === this.settings.pttKey.toLowerCase()) ||
|
|
(this.settings.pttKey === '0' && e.code === 'Numpad0');
|
|
|
|
if (isMatch) {
|
|
console.log('PTT Key Released:', e.key, e.code, 'Expected:', this.settings.pttKey);
|
|
this.pttPressed = false;
|
|
this.updateMuteState();
|
|
}
|
|
});
|
|
}
|
|
|
|
async join(channelId, isAutoRejoin = false) {
|
|
console.log('VoiceChannel.join process started for channel:', channelId, 'isAutoRejoin:', isAutoRejoin);
|
|
if (this.currentChannelId === channelId && !isAutoRejoin) {
|
|
console.log('Already in this channel');
|
|
return;
|
|
}
|
|
if (this.currentChannelId && this.currentChannelId != channelId) {
|
|
console.log('Leaving previous channel:', this.currentChannelId);
|
|
this.leave();
|
|
}
|
|
|
|
this.currentChannelId = channelId;
|
|
sessionStorage.setItem('activeVoiceChannel', channelId);
|
|
|
|
try {
|
|
console.log('Requesting microphone access with device:', this.settings.inputDevice);
|
|
const constraints = {
|
|
audio: {
|
|
echoCancellation: this.settings.echoCancellation,
|
|
noiseSuppression: this.settings.noiseSuppression,
|
|
autoGainControl: true
|
|
},
|
|
video: false
|
|
};
|
|
if (this.settings.inputDevice !== 'default') {
|
|
constraints.audio.deviceId = { exact: this.settings.inputDevice };
|
|
}
|
|
this.localStream = await navigator.mediaDevices.getUserMedia(constraints);
|
|
console.log('Microphone access granted');
|
|
this.setMute(true);
|
|
|
|
// Always setup VOX logic for volume meter and detection
|
|
this.setupVOX();
|
|
|
|
// Join via PHP
|
|
console.log('Calling API join...');
|
|
const url = `api_v1_voice.php?action=join&room=${channelId}&name=${encodeURIComponent(window.currentUsername || 'Unknown')}${this.myPeerId ? '&peer_id='+this.myPeerId : ''}`;
|
|
const resp = await fetch(url);
|
|
const data = await resp.json();
|
|
console.log('API join response:', data);
|
|
|
|
if (data.success) {
|
|
this.myPeerId = data.peer_id;
|
|
sessionStorage.setItem('activeVoicePeerId', this.myPeerId);
|
|
console.log('Joined room with peer_id:', this.myPeerId);
|
|
|
|
// Start polling
|
|
this.startPolling();
|
|
this.updateVoiceUI();
|
|
} else {
|
|
console.error('API join failed:', data.error);
|
|
}
|
|
} catch (e) {
|
|
console.error('Failed to join voice:', e);
|
|
alert('Microphone access required for voice channels. Error: ' + e.message);
|
|
this.currentChannelId = null;
|
|
}
|
|
}
|
|
|
|
startPolling() {
|
|
if (this.pollInterval) clearInterval(this.pollInterval);
|
|
this.pollInterval = setInterval(() => this.poll(), 1000);
|
|
this.poll(); // Initial poll
|
|
}
|
|
|
|
async poll() {
|
|
if (!this.myPeerId || !this.currentChannelId) return;
|
|
|
|
try {
|
|
const resp = await fetch(`api_v1_voice.php?action=poll&room=${this.currentChannelId}&peer_id=${this.myPeerId}&is_muted=${this.isMuted ? 1 : 0}&is_deafened=${this.isDeafened ? 1 : 0}`);
|
|
const data = await resp.json();
|
|
|
|
if (data.success) {
|
|
// Update participants
|
|
const oldPs = Object.keys(this.participants);
|
|
this.participants = data.participants;
|
|
const newPs = Object.keys(this.participants);
|
|
|
|
// If new people joined, initiate offer if I'm the "older" one (not really necessary here, can just offer to anyone I don't have a peer for)
|
|
newPs.forEach(pid => {
|
|
if (pid !== this.myPeerId && !this.peers[pid]) {
|
|
console.log('New peer found via poll:', pid);
|
|
this.createPeerConnection(pid, true);
|
|
}
|
|
});
|
|
|
|
// Cleanup left peers
|
|
oldPs.forEach(pid => {
|
|
if (!this.participants[pid] && this.peers[pid]) {
|
|
console.log('Peer left:', pid);
|
|
this.peers[pid].close();
|
|
delete this.peers[pid];
|
|
}
|
|
});
|
|
|
|
// Handle incoming signals
|
|
if (data.signals && data.signals.length > 0) {
|
|
for (const sig of data.signals) {
|
|
await this.handleSignaling(sig);
|
|
}
|
|
}
|
|
|
|
this.updateVoiceUI();
|
|
}
|
|
} catch (e) {
|
|
console.error('Polling error:', e);
|
|
}
|
|
}
|
|
|
|
async sendSignal(to, data) {
|
|
if (!this.myPeerId || !this.currentChannelId) return;
|
|
await fetch(`api_v1_voice.php?action=signal&room=${this.currentChannelId}&peer_id=${this.myPeerId}&to=${to}&data=${encodeURIComponent(JSON.stringify(data))}`);
|
|
}
|
|
|
|
createPeerConnection(userId, isOfferor) {
|
|
if (this.peers[userId]) return this.peers[userId];
|
|
|
|
console.log('Creating PeerConnection for:', userId, 'as offeror:', isOfferor);
|
|
const pc = new RTCPeerConnection({
|
|
iceServers: [
|
|
{ urls: 'stun:stun.l.google.com:19302' },
|
|
{ urls: 'stun:stun1.l.google.com:19302' }
|
|
]
|
|
});
|
|
|
|
this.peers[userId] = pc;
|
|
|
|
pc.oniceconnectionstatechange = () => {
|
|
console.log(`ICE Connection State with ${userId}: ${pc.iceConnectionState}`);
|
|
};
|
|
|
|
if (this.localStream) {
|
|
this.localStream.getTracks().forEach(track => {
|
|
console.log(`Adding track ${track.kind} to peer ${userId}`);
|
|
pc.addTrack(track, this.localStream);
|
|
});
|
|
}
|
|
|
|
pc.onicecandidate = (event) => {
|
|
if (event.candidate) {
|
|
this.sendSignal(userId, { type: 'ice_candidate', candidate: event.candidate });
|
|
}
|
|
};
|
|
|
|
pc.ontrack = (event) => {
|
|
console.log('Received remote track from:', userId, 'Stream count:', event.streams.length);
|
|
const stream = event.streams[0] || new MediaStream([event.track]);
|
|
|
|
if (this.remoteAudios[userId]) {
|
|
console.log('Replacing existing audio element for:', userId);
|
|
this.remoteAudios[userId].pause();
|
|
this.remoteAudios[userId].srcObject = null;
|
|
this.remoteAudios[userId].remove();
|
|
}
|
|
|
|
const remoteAudio = new Audio();
|
|
remoteAudio.autoplay = true;
|
|
remoteAudio.style.display = 'none';
|
|
remoteAudio.srcObject = stream;
|
|
remoteAudio.muted = this.isDeafened;
|
|
remoteAudio.volume = this.settings.outputVolume || 1.0;
|
|
if (this.settings.outputDevice !== 'default' && typeof remoteAudio.setSinkId === 'function') {
|
|
remoteAudio.setSinkId(this.settings.outputDevice);
|
|
}
|
|
document.body.appendChild(remoteAudio);
|
|
this.remoteAudios[userId] = remoteAudio;
|
|
|
|
console.log('Playing remote audio for:', userId);
|
|
remoteAudio.play().then(() => {
|
|
console.log('Remote audio playing successfully for:', userId);
|
|
}).catch(e => {
|
|
console.warn('Autoplay prevented or play failed for:', userId, e);
|
|
// In case of autoplay prevention, we might need a user gesture,
|
|
// but they just clicked a channel so it should be fine.
|
|
});
|
|
};
|
|
|
|
if (isOfferor) {
|
|
pc.createOffer().then(offer => {
|
|
return pc.setLocalDescription(offer);
|
|
}).then(() => {
|
|
this.sendSignal(userId, { type: 'offer', offer: pc.localDescription });
|
|
});
|
|
}
|
|
|
|
return pc;
|
|
}
|
|
|
|
async handleSignaling(sig) {
|
|
const from = sig.from;
|
|
const data = sig.data;
|
|
|
|
console.log('Handling signaling from:', from, 'type:', data.type);
|
|
|
|
switch (data.type) {
|
|
case 'offer':
|
|
await this.handleOffer(from, data.offer);
|
|
break;
|
|
case 'answer':
|
|
await this.handleAnswer(from, data.answer);
|
|
break;
|
|
case 'ice_candidate':
|
|
await this.handleCandidate(from, data.candidate);
|
|
break;
|
|
case 'voice_speaking':
|
|
this.updateSpeakingUI(data.user_id, data.speaking);
|
|
break;
|
|
}
|
|
}
|
|
|
|
async handleOffer(from, offer) {
|
|
const pc = this.createPeerConnection(from, false);
|
|
await pc.setRemoteDescription(new RTCSessionDescription(offer));
|
|
const answer = await pc.createAnswer();
|
|
await pc.setLocalDescription(answer);
|
|
this.sendSignal(from, { type: 'answer', answer: pc.localDescription });
|
|
}
|
|
|
|
async handleAnswer(from, answer) {
|
|
const pc = this.peers[from];
|
|
if (pc) await pc.setRemoteDescription(new RTCSessionDescription(answer));
|
|
}
|
|
|
|
async handleCandidate(from, candidate) {
|
|
const pc = this.peers[from];
|
|
if (pc) await pc.addIceCandidate(new RTCIceCandidate(candidate));
|
|
}
|
|
|
|
setupVOX() {
|
|
if (!this.localStream) {
|
|
console.warn('Cannot setup VOX: no localStream');
|
|
return;
|
|
}
|
|
|
|
console.log('Setting up VOX logic...');
|
|
try {
|
|
if (!this.audioContext) {
|
|
this.audioContext = new (window.AudioContext || window.webkitAudioContext)();
|
|
}
|
|
|
|
// Re-ensure context is running
|
|
if (this.audioContext.state === 'suspended') {
|
|
this.audioContext.resume().then(() => console.log('AudioContext resumed'));
|
|
}
|
|
|
|
// Cleanup old nodes
|
|
if (this.scriptProcessor) {
|
|
this.scriptProcessor.onaudioprocess = null;
|
|
try { this.scriptProcessor.disconnect(); } catch(e) {}
|
|
}
|
|
if (this.microphone) {
|
|
try { this.microphone.disconnect(); } catch(e) {}
|
|
}
|
|
|
|
this.analyser = this.audioContext.createAnalyser();
|
|
this.analyser.fftSize = 512;
|
|
|
|
// Use a cloned stream for analysis so VOX works even when localStream is muted/disabled
|
|
if (this.analysisStream) {
|
|
this.analysisStream.getTracks().forEach(t => t.stop());
|
|
}
|
|
this.analysisStream = this.localStream.clone();
|
|
this.analysisStream.getAudioTracks().forEach(t => t.enabled = true); // Ensure analysis stream is NOT muted
|
|
|
|
this.microphone = this.audioContext.createMediaStreamSource(this.analysisStream);
|
|
this.scriptProcessor = this.audioContext.createScriptProcessor(2048, 1, 1);
|
|
|
|
this.microphone.connect(this.analyser);
|
|
this.analyser.connect(this.scriptProcessor);
|
|
|
|
// Avoid feedback: connect to a gain node with 0 volume then to destination
|
|
const silence = this.audioContext.createGain();
|
|
silence.gain.value = 0;
|
|
this.scriptProcessor.connect(silence);
|
|
silence.connect(this.audioContext.destination);
|
|
|
|
this.voxActive = false;
|
|
this.currentVolume = 0;
|
|
|
|
this.scriptProcessor.onaudioprocess = () => {
|
|
const array = new Uint8Array(this.analyser.frequencyBinCount);
|
|
this.analyser.getByteFrequencyData(array);
|
|
let values = 0;
|
|
for (let i = 0; i < array.length; i++) values += array[i];
|
|
const average = values / array.length;
|
|
this.currentVolume = average / 255;
|
|
|
|
if (this.settings.mode !== 'vox') {
|
|
this.voxActive = false;
|
|
return;
|
|
}
|
|
|
|
if (this.currentVolume > this.settings.voxThreshold) {
|
|
this.lastVoiceTime = Date.now();
|
|
if (!this.voxActive) {
|
|
this.voxActive = true;
|
|
this.updateMuteState();
|
|
}
|
|
} else {
|
|
if (this.voxActive && Date.now() - this.lastVoiceTime > this.voxHoldTime) {
|
|
this.voxActive = false;
|
|
this.updateMuteState();
|
|
}
|
|
}
|
|
};
|
|
console.log('VOX logic setup complete');
|
|
} catch (e) {
|
|
console.error('Failed to setup VOX:', e);
|
|
}
|
|
}
|
|
|
|
getVolume() {
|
|
return this.currentVolume || 0;
|
|
}
|
|
|
|
updateMuteState() {
|
|
if (!this.currentChannelId || !this.localStream) return;
|
|
let shouldTalk = (this.settings.mode === 'ptt') ? this.pttPressed : this.voxActive;
|
|
console.log('updateMuteState: shouldTalk =', shouldTalk, 'mode =', this.settings.mode);
|
|
if (this.isTalking !== shouldTalk) {
|
|
this.isTalking = shouldTalk;
|
|
this.setMute(!shouldTalk);
|
|
this.updateSpeakingUI(window.currentUserId, shouldTalk);
|
|
|
|
// Notify others
|
|
const msg = { type: 'voice_speaking', channel_id: this.currentChannelId, user_id: window.currentUserId, speaking: shouldTalk };
|
|
// ... (rest of method remains same, but I'll update it for clarity)
|
|
if (this.ws && this.ws.readyState === WebSocket.OPEN) {
|
|
this.ws.send(JSON.stringify(msg));
|
|
} else {
|
|
Object.keys(this.peers).forEach(pid => {
|
|
this.sendSignal(pid, msg);
|
|
});
|
|
}
|
|
}
|
|
}
|
|
|
|
setMute(mute) {
|
|
this.isMuted = mute;
|
|
if (this.localStream) {
|
|
console.log('Setting mute to:', mute);
|
|
this.localStream.getAudioTracks().forEach(track => { track.enabled = !mute; });
|
|
}
|
|
this.updateUserPanelButtons();
|
|
}
|
|
|
|
toggleMute() {
|
|
this.setMute(!this.isMuted);
|
|
}
|
|
|
|
toggleDeafen() {
|
|
this.isDeafened = !this.isDeafened;
|
|
console.log('Setting deafen to:', this.isDeafened);
|
|
Object.values(this.remoteAudios).forEach(audio => {
|
|
audio.muted = this.isDeafened;
|
|
if (!this.isDeafened) audio.volume = this.settings.outputVolume || 1.0;
|
|
});
|
|
// If we deafen, we usually also mute in Discord
|
|
if (this.isDeafened && !this.isMuted) {
|
|
this.setMute(true);
|
|
} else if (!this.isDeafened && this.isMuted) {
|
|
// Not necessarily unmute when undeafen, but often expected
|
|
// Let's just update UI
|
|
}
|
|
this.updateUserPanelButtons();
|
|
}
|
|
|
|
setOutputVolume(vol) {
|
|
this.settings.outputVolume = parseFloat(vol);
|
|
Object.values(this.remoteAudios).forEach(audio => {
|
|
audio.volume = this.settings.outputVolume;
|
|
});
|
|
}
|
|
|
|
setInputVolume(vol) {
|
|
this.settings.inputVolume = parseFloat(vol);
|
|
// We could use a GainNode here, but for simplicity we'll just store it.
|
|
// If we want to actually change the transmitted volume, we need to insert a GainNode in the stream.
|
|
}
|
|
|
|
async setInputDevice(deviceId) {
|
|
this.settings.inputDevice = deviceId;
|
|
if (this.currentChannelId && this.localStream) {
|
|
// Re-join or switch track
|
|
const constraints = {
|
|
audio: {
|
|
echoCancellation: this.settings.echoCancellation,
|
|
noiseSuppression: this.settings.noiseSuppression,
|
|
autoGainControl: true
|
|
},
|
|
video: false
|
|
};
|
|
if (deviceId !== 'default') {
|
|
constraints.audio.deviceId = { exact: deviceId };
|
|
}
|
|
const newStream = await navigator.mediaDevices.getUserMedia(constraints);
|
|
const newTrack = newStream.getAudioTracks()[0];
|
|
|
|
Object.values(this.peers).forEach(pc => {
|
|
const sender = pc.getSenders().find(s => s.track && s.track.kind === 'audio');
|
|
if (sender) sender.replaceTrack(newTrack);
|
|
});
|
|
|
|
this.localStream.getTracks().forEach(t => t.stop());
|
|
this.localStream = newStream;
|
|
this.setupVOX();
|
|
this.setMute(this.isMuted);
|
|
}
|
|
}
|
|
|
|
async setOutputDevice(deviceId) {
|
|
this.settings.outputDevice = deviceId;
|
|
Object.values(this.remoteAudios).forEach(audio => {
|
|
if (typeof audio.setSinkId === 'function') {
|
|
audio.setSinkId(deviceId).catch(e => console.error('setSinkId failed:', e));
|
|
}
|
|
});
|
|
}
|
|
|
|
async updateAudioConstraints() {
|
|
if (this.currentChannelId && this.localStream) {
|
|
console.log('Updating audio constraints:', this.settings.echoCancellation, this.settings.noiseSuppression);
|
|
const constraints = {
|
|
audio: {
|
|
echoCancellation: this.settings.echoCancellation,
|
|
noiseSuppression: this.settings.noiseSuppression,
|
|
autoGainControl: true
|
|
},
|
|
video: false
|
|
};
|
|
if (this.settings.inputDevice !== 'default') {
|
|
constraints.audio.deviceId = { exact: this.settings.inputDevice };
|
|
}
|
|
try {
|
|
const newStream = await navigator.mediaDevices.getUserMedia(constraints);
|
|
const newTrack = newStream.getAudioTracks()[0];
|
|
|
|
Object.values(this.peers).forEach(pc => {
|
|
const sender = pc.getSenders().find(s => s.track && s.track.kind === 'audio');
|
|
if (sender) sender.replaceTrack(newTrack);
|
|
});
|
|
|
|
this.localStream.getTracks().forEach(t => t.stop());
|
|
this.localStream = newStream;
|
|
this.setupVOX();
|
|
this.setMute(this.isMuted);
|
|
} catch (e) {
|
|
console.error('Failed to update audio constraints:', e);
|
|
}
|
|
}
|
|
}
|
|
|
|
updateUserPanelButtons() {
|
|
const btnMute = document.getElementById('btn-panel-mute');
|
|
const btnDeafen = document.getElementById('btn-panel-deafen');
|
|
|
|
if (btnMute) {
|
|
btnMute.classList.toggle('active', this.isMuted);
|
|
btnMute.style.color = this.isMuted ? '#f23f43' : 'var(--text-muted)';
|
|
btnMute.innerHTML = this.isMuted ?
|
|
'<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"><line x1="1" y1="1" x2="23" y2="23"></line><path d="M9 9v3a3 3 0 0 0 5.12 2.12M15 9.34V4a3 3 0 0 0-5.94-.6"></path><path d="M17 16.95A7 7 0 0 1 5 12v-2m14 0v2a7 7 0 0 1-.11 1.23"></path><line x1="12" y1="19" x2="12" y2="23"></line><line x1="8" y1="23" x2="16" y2="23"></line></svg>' :
|
|
'<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"><path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path><path d="M19 10v2a7 7 0 0 1-14 0v-2"></path><line x1="12" y1="19" x2="12" y2="23"></line><line x1="8" y1="23" x2="16" y2="23"></line></svg>';
|
|
}
|
|
|
|
if (btnDeafen) {
|
|
btnDeafen.classList.toggle('active', this.isDeafened);
|
|
btnDeafen.style.color = this.isDeafened ? '#f23f43' : 'var(--text-muted)';
|
|
btnDeafen.innerHTML = this.isDeafened ?
|
|
'<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"><line x1="1" y1="1" x2="23" y2="23"></line><path d="M8.85 4.11A9 9 0 1 1 20 12"></path><path d="M11.64 6.64A5 5 0 1 1 15 10"></path></svg>' :
|
|
'<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"><path d="M3 18v-6a9 9 0 0 1 18 0v6"></path><path d="M21 19a2 2 0 0 1-2 2h-1a2 2 0 0 1-2-2v-3a2 2 0 0 1 2-2h3zM3 19a2 2 0 0 0 2 2h1a2 2 0 0 0 2-2v-3a2 2 0 0 0-2-2H3z"></path></svg>';
|
|
}
|
|
}
|
|
|
|
leave() {
|
|
if (!this.currentChannelId) {
|
|
console.log('VoiceChannel.leave called but no active channel');
|
|
return;
|
|
}
|
|
console.log('Leaving voice channel:', this.currentChannelId, 'myPeerId:', this.myPeerId);
|
|
const cid = this.currentChannelId;
|
|
const pid = this.myPeerId;
|
|
|
|
sessionStorage.removeItem('activeVoiceChannel');
|
|
sessionStorage.removeItem('activeVoicePeerId');
|
|
if (this.pollInterval) clearInterval(this.pollInterval);
|
|
|
|
// Use keepalive for the leave fetch to ensure it reaches the server during page unload
|
|
fetch(`api_v1_voice.php?action=leave&room=${cid}&peer_id=${pid}`, { keepalive: true });
|
|
|
|
if (this.localStream) {
|
|
console.log('Stopping local stream tracks');
|
|
this.localStream.getTracks().forEach(track => {
|
|
track.stop();
|
|
console.log('Track stopped:', track.kind);
|
|
});
|
|
this.localStream = null;
|
|
}
|
|
if (this.analysisStream) {
|
|
this.analysisStream.getTracks().forEach(track => track.stop());
|
|
this.analysisStream = null;
|
|
}
|
|
|
|
if (this.scriptProcessor) {
|
|
try {
|
|
this.scriptProcessor.disconnect();
|
|
this.scriptProcessor.onaudioprocess = null;
|
|
} catch(e) {}
|
|
this.scriptProcessor = null;
|
|
}
|
|
if (this.microphone) {
|
|
try { this.microphone.disconnect(); } catch(e) {}
|
|
this.microphone = null;
|
|
}
|
|
if (this.audioContext && this.audioContext.state !== 'closed') {
|
|
// Keep AudioContext alive but suspended to reuse it
|
|
this.audioContext.suspend();
|
|
}
|
|
|
|
Object.values(this.peers).forEach(pc => pc.close());
|
|
Object.values(this.remoteAudios).forEach(audio => {
|
|
audio.pause();
|
|
audio.remove();
|
|
audio.srcObject = null;
|
|
});
|
|
this.peers = {};
|
|
this.remoteAudios = {};
|
|
this.participants = {};
|
|
this.currentChannelId = null;
|
|
this.myPeerId = null;
|
|
this.speakingUsers.clear();
|
|
|
|
// Also remove 'active' class from all voice items
|
|
document.querySelectorAll('.voice-item').forEach(el => el.classList.remove('active'));
|
|
|
|
this.updateVoiceUI();
|
|
}
|
|
|
|
updateVoiceUI() {
|
|
// We now use a global update mechanism for all channels
|
|
VoiceChannel.refreshAllVoiceUsers();
|
|
|
|
if (this.currentChannelId) {
|
|
if (!document.querySelector('.voice-controls')) {
|
|
const controls = document.createElement('div');
|
|
controls.className = 'voice-controls p-2 d-flex justify-content-between align-items-center border-top bg-dark';
|
|
controls.style.backgroundColor = '#232428';
|
|
controls.innerHTML = `
|
|
<div class="d-flex align-items-center">
|
|
<div class="voice-status-icon text-success me-2" style="font-size: 8px;">●</div>
|
|
<div class="small fw-bold" style="font-size: 11px; color: #248046;">Voice (${this.settings.mode.toUpperCase()})</div>
|
|
</div>
|
|
<div>
|
|
<button class="btn btn-sm text-muted" id="btn-voice-leave" title="Disconnect">
|
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"><path d="M10.68 13.31a16 16 0 0 0 3.41 2.6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7 2 2 0 0 1 1.72 2v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.42 19.42 0 0 1-3.33-2.67m-2.67-3.34a19.79 19.79 0 0 1-3.07-8.63A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91"></path><line x1="23" y1="1" x2="1" y2="23"></line></svg>
|
|
</button>
|
|
</div>
|
|
`;
|
|
const sidebar = document.querySelector('.channels-sidebar');
|
|
if (sidebar) sidebar.appendChild(controls);
|
|
const btnLeave = document.getElementById('btn-voice-leave');
|
|
if (btnLeave) btnLeave.onclick = () => this.leave();
|
|
}
|
|
} else {
|
|
const controls = document.querySelector('.voice-controls');
|
|
if (controls) controls.remove();
|
|
}
|
|
}
|
|
|
|
updateSpeakingUI(userId, isSpeaking) {
|
|
userId = String(userId);
|
|
if (isSpeaking) {
|
|
this.speakingUsers.add(userId);
|
|
} else {
|
|
this.speakingUsers.delete(userId);
|
|
}
|
|
|
|
const userEls = document.querySelectorAll(`.voice-user[data-user-id="${userId}"]`);
|
|
userEls.forEach(el => {
|
|
const avatar = el.querySelector('.message-avatar');
|
|
if (avatar) {
|
|
avatar.style.boxShadow = isSpeaking ? '0 0 0 2px #23a559' : 'none';
|
|
}
|
|
});
|
|
}
|
|
|
|
static async refreshAllVoiceUsers() {
|
|
try {
|
|
const resp = await fetch('api_v1_voice.php?action=list_all');
|
|
const data = await resp.json();
|
|
if (data.success) {
|
|
// Clear all lists first
|
|
document.querySelectorAll('.voice-users-list').forEach(el => el.innerHTML = '');
|
|
|
|
// Remove connected highlight from all voice items
|
|
document.querySelectorAll('.voice-item').forEach(el => {
|
|
el.classList.remove('connected');
|
|
});
|
|
|
|
// Populate based on data
|
|
Object.keys(data.channels).forEach(channelId => {
|
|
const voiceItem = document.querySelector(`.voice-item[data-channel-id="${channelId}"]`);
|
|
if (voiceItem) {
|
|
// Highlight channel as connected only if I am in it
|
|
if (window.voiceHandler && window.voiceHandler.currentChannelId == channelId) {
|
|
voiceItem.classList.add('connected');
|
|
}
|
|
|
|
const container = voiceItem.closest('.channel-item-container');
|
|
if (container) {
|
|
const listEl = container.querySelector('.voice-users-list');
|
|
if (listEl) {
|
|
data.channels[channelId].forEach(p => {
|
|
const pid = String(p.user_id);
|
|
const isSpeaking = window.voiceHandler && window.voiceHandler.speakingUsers.has(pid);
|
|
VoiceChannel.renderUserToUI(listEl, p.user_id, p.display_name || p.username, p.avatar_url, isSpeaking, p.is_muted, p.is_deafened);
|
|
});
|
|
}
|
|
}
|
|
}
|
|
});
|
|
}
|
|
} catch (e) {
|
|
console.error('Failed to refresh voice users:', e);
|
|
}
|
|
}
|
|
|
|
static renderUserToUI(container, userId, username, avatarUrl, isSpeaking = false, isMuted = false, isDeafened = false) {
|
|
const userEl = document.createElement('div');
|
|
userEl.className = 'voice-user small text-muted d-flex align-items-center mb-1';
|
|
userEl.dataset.userId = userId;
|
|
userEl.style.paddingLeft = '8px';
|
|
const avatarStyle = avatarUrl ? `background-image: url('${avatarUrl}'); background-size: cover;` : "background-color: #555;";
|
|
const boxShadow = isSpeaking ? 'box-shadow: 0 0 0 2px #23a559;' : '';
|
|
|
|
let icons = '';
|
|
if (isDeafened) {
|
|
icons += '<i class="fa-solid fa-volume-xmark ms-auto text-danger" style="font-size: 10px;"></i>';
|
|
} else if (isMuted) {
|
|
icons += '<i class="fa-solid fa-microphone-slash ms-auto text-danger" style="font-size: 10px;"></i>';
|
|
}
|
|
|
|
userEl.innerHTML = `
|
|
<div class="message-avatar me-2" style="width: 16px; height: 16px; border-radius: 50%; transition: box-shadow 0.2s; ${avatarStyle} ${boxShadow}"></div>
|
|
<span class="text-truncate" style="font-size: 13px; max-width: 100px;">${username}</span>
|
|
${icons}
|
|
`;
|
|
container.appendChild(userEl);
|
|
}
|
|
}
|