38443-vm/assets/js/voice.js
2026-02-17 15:50:11 +00:00

522 lines
21 KiB
JavaScript

console.log('voice.js loaded');
class VoiceChannel {
constructor(ws, settings) {
// ws is ignored now as we use PHP signaling, but kept for compatibility
this.settings = settings || { mode: 'vox', pttKey: 'v', voxThreshold: 0.1 };
console.log('VoiceChannel constructor called with settings:', this.settings);
this.localStream = null;
this.analysisStream = null;
this.peers = {}; // userId -> RTCPeerConnection
this.participants = {}; // userId -> {name}
this.currentChannelId = null;
this.myPeerId = null;
this.pollInterval = null;
this.remoteAudios = {}; // userId -> Audio element
this.audioContext = null;
this.analyser = null;
this.microphone = null;
this.scriptProcessor = null;
this.isTalking = false;
this.pttPressed = false;
this.voxActive = false;
this.lastVoiceTime = 0;
this.voxHoldTime = 500;
// Track who is speaking to persist across UI refreshes
this.speakingUsers = new Set();
this.setupPTTListeners();
window.addEventListener('beforeunload', () => this.leave());
}
setupPTTListeners() {
window.addEventListener('keydown', (e) => {
// Ignore if in input field
if (e.target.tagName === 'INPUT' || e.target.tagName === 'TEXTAREA') return;
if (this.settings.mode !== 'ptt') return;
const isMatch = e.key.toLowerCase() === this.settings.pttKey.toLowerCase() ||
(e.code && e.code.toLowerCase() === this.settings.pttKey.toLowerCase()) ||
(this.settings.pttKey === '0' && e.code === 'Numpad0');
if (isMatch) {
if (!this.pttPressed) {
console.log('PTT Key Pressed:', e.key, e.code, 'Expected:', this.settings.pttKey);
this.pttPressed = true;
this.updateMuteState();
}
}
});
window.addEventListener('keyup', (e) => {
if (this.settings.mode !== 'ptt') return;
const isMatch = e.key.toLowerCase() === this.settings.pttKey.toLowerCase() ||
(e.code && e.code.toLowerCase() === this.settings.pttKey.toLowerCase()) ||
(this.settings.pttKey === '0' && e.code === 'Numpad0');
if (isMatch) {
console.log('PTT Key Released:', e.key, e.code, 'Expected:', this.settings.pttKey);
this.pttPressed = false;
this.updateMuteState();
}
});
}
async join(channelId) {
console.log('VoiceChannel.join process started for channel:', channelId);
if (this.currentChannelId === channelId) {
console.log('Already in this channel');
return;
}
if (this.currentChannelId) {
console.log('Leaving previous channel:', this.currentChannelId);
this.leave();
}
this.currentChannelId = channelId;
try {
console.log('Requesting microphone access...');
this.localStream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false });
console.log('Microphone access granted');
this.setMute(true);
// Always setup VOX logic for volume meter and detection
this.setupVOX();
// Join via PHP
console.log('Calling API join...');
const url = `api_v1_voice.php?action=join&room=${channelId}&name=${encodeURIComponent(window.currentUsername || 'Unknown')}`;
const resp = await fetch(url);
const data = await resp.json();
console.log('API join response:', data);
if (data.success) {
this.myPeerId = data.peer_id;
console.log('Joined room with peer_id:', this.myPeerId);
// Start polling
this.startPolling();
this.updateVoiceUI();
} else {
console.error('API join failed:', data.error);
}
} catch (e) {
console.error('Failed to join voice:', e);
alert('Microphone access required for voice channels. Error: ' + e.message);
this.currentChannelId = null;
}
}
startPolling() {
if (this.pollInterval) clearInterval(this.pollInterval);
this.pollInterval = setInterval(() => this.poll(), 1000);
this.poll(); // Initial poll
}
async poll() {
if (!this.myPeerId || !this.currentChannelId) return;
try {
const resp = await fetch(`api_v1_voice.php?action=poll&room=${this.currentChannelId}&peer_id=${this.myPeerId}`);
const data = await resp.json();
if (data.success) {
// Update participants
const oldPs = Object.keys(this.participants);
this.participants = data.participants;
const newPs = Object.keys(this.participants);
// If new people joined, initiate offer if I'm the "older" one (not really necessary here, can just offer to anyone I don't have a peer for)
newPs.forEach(pid => {
if (pid !== this.myPeerId && !this.peers[pid]) {
console.log('New peer found via poll:', pid);
this.createPeerConnection(pid, true);
}
});
// Cleanup left peers
oldPs.forEach(pid => {
if (!this.participants[pid] && this.peers[pid]) {
console.log('Peer left:', pid);
this.peers[pid].close();
delete this.peers[pid];
}
});
// Handle incoming signals
if (data.signals && data.signals.length > 0) {
for (const sig of data.signals) {
await this.handleSignaling(sig);
}
}
this.updateVoiceUI();
}
} catch (e) {
console.error('Polling error:', e);
}
}
async sendSignal(to, data) {
if (!this.myPeerId || !this.currentChannelId) return;
await fetch(`api_v1_voice.php?action=signal&room=${this.currentChannelId}&peer_id=${this.myPeerId}&to=${to}&data=${encodeURIComponent(JSON.stringify(data))}`);
}
createPeerConnection(userId, isOfferor) {
if (this.peers[userId]) return this.peers[userId];
console.log('Creating PeerConnection for:', userId, 'as offeror:', isOfferor);
const pc = new RTCPeerConnection({
iceServers: [
{ urls: 'stun:stun.l.google.com:19302' },
{ urls: 'stun:stun1.l.google.com:19302' }
]
});
this.peers[userId] = pc;
pc.oniceconnectionstatechange = () => {
console.log(`ICE Connection State with ${userId}: ${pc.iceConnectionState}`);
};
if (this.localStream) {
this.localStream.getTracks().forEach(track => {
console.log(`Adding track ${track.kind} to peer ${userId}`);
pc.addTrack(track, this.localStream);
});
}
pc.onicecandidate = (event) => {
if (event.candidate) {
this.sendSignal(userId, { type: 'ice_candidate', candidate: event.candidate });
}
};
pc.ontrack = (event) => {
console.log('Received remote track from:', userId, event);
if (this.remoteAudios[userId]) {
this.remoteAudios[userId].pause();
this.remoteAudios[userId].remove();
this.remoteAudios[userId].srcObject = null;
}
const remoteAudio = new Audio();
remoteAudio.style.display = 'none';
remoteAudio.srcObject = event.streams[0];
document.body.appendChild(remoteAudio);
this.remoteAudios[userId] = remoteAudio;
remoteAudio.play().catch(e => console.warn('Autoplay prevented:', e));
};
if (isOfferor) {
pc.createOffer().then(offer => {
return pc.setLocalDescription(offer);
}).then(() => {
this.sendSignal(userId, { type: 'offer', offer: pc.localDescription });
});
}
return pc;
}
async handleSignaling(sig) {
const from = sig.from;
const data = sig.data;
console.log('Handling signaling from:', from, 'type:', data.type);
switch (data.type) {
case 'offer':
await this.handleOffer(from, data.offer);
break;
case 'answer':
await this.handleAnswer(from, data.answer);
break;
case 'ice_candidate':
await this.handleCandidate(from, data.candidate);
break;
case 'voice_speaking':
this.updateSpeakingUI(data.user_id, data.speaking);
break;
}
}
async handleOffer(from, offer) {
const pc = this.createPeerConnection(from, false);
await pc.setRemoteDescription(new RTCSessionDescription(offer));
const answer = await pc.createAnswer();
await pc.setLocalDescription(answer);
this.sendSignal(from, { type: 'answer', answer: pc.localDescription });
}
async handleAnswer(from, answer) {
const pc = this.peers[from];
if (pc) await pc.setRemoteDescription(new RTCSessionDescription(answer));
}
async handleCandidate(from, candidate) {
const pc = this.peers[from];
if (pc) await pc.addIceCandidate(new RTCIceCandidate(candidate));
}
setupVOX() {
if (!this.localStream) {
console.warn('Cannot setup VOX: no localStream');
return;
}
console.log('Setting up VOX logic...');
try {
if (!this.audioContext) {
this.audioContext = new (window.AudioContext || window.webkitAudioContext)();
}
// Re-ensure context is running
if (this.audioContext.state === 'suspended') {
this.audioContext.resume().then(() => console.log('AudioContext resumed'));
}
// Cleanup old nodes
if (this.scriptProcessor) {
this.scriptProcessor.onaudioprocess = null;
try { this.scriptProcessor.disconnect(); } catch(e) {}
}
if (this.microphone) {
try { this.microphone.disconnect(); } catch(e) {}
}
this.analyser = this.audioContext.createAnalyser();
this.analyser.fftSize = 512;
// Use a cloned stream for analysis so VOX works even when localStream is muted/disabled
if (this.analysisStream) {
this.analysisStream.getTracks().forEach(t => t.stop());
}
this.analysisStream = this.localStream.clone();
this.analysisStream.getAudioTracks().forEach(t => t.enabled = true); // Ensure analysis stream is NOT muted
this.microphone = this.audioContext.createMediaStreamSource(this.analysisStream);
this.scriptProcessor = this.audioContext.createScriptProcessor(2048, 1, 1);
this.microphone.connect(this.analyser);
this.analyser.connect(this.scriptProcessor);
// Avoid feedback: connect to a gain node with 0 volume then to destination
const silence = this.audioContext.createGain();
silence.gain.value = 0;
this.scriptProcessor.connect(silence);
silence.connect(this.audioContext.destination);
this.voxActive = false;
this.currentVolume = 0;
this.scriptProcessor.onaudioprocess = () => {
const array = new Uint8Array(this.analyser.frequencyBinCount);
this.analyser.getByteFrequencyData(array);
let values = 0;
for (let i = 0; i < array.length; i++) values += array[i];
const average = values / array.length;
this.currentVolume = average / 255;
if (this.settings.mode !== 'vox') {
this.voxActive = false;
return;
}
if (this.currentVolume > this.settings.voxThreshold) {
this.lastVoiceTime = Date.now();
if (!this.voxActive) {
this.voxActive = true;
this.updateMuteState();
}
} else {
if (this.voxActive && Date.now() - this.lastVoiceTime > this.voxHoldTime) {
this.voxActive = false;
this.updateMuteState();
}
}
};
console.log('VOX logic setup complete');
} catch (e) {
console.error('Failed to setup VOX:', e);
}
}
getVolume() {
return this.currentVolume || 0;
}
updateMuteState() {
if (!this.currentChannelId || !this.localStream) return;
let shouldTalk = (this.settings.mode === 'ptt') ? this.pttPressed : this.voxActive;
console.log('updateMuteState: shouldTalk =', shouldTalk, 'mode =', this.settings.mode);
if (this.isTalking !== shouldTalk) {
this.isTalking = shouldTalk;
this.setMute(!shouldTalk);
this.updateSpeakingUI(window.currentUserId, shouldTalk);
// Notify others
const msg = { type: 'voice_speaking', channel_id: this.currentChannelId, user_id: window.currentUserId, speaking: shouldTalk };
// ... (rest of method remains same, but I'll update it for clarity)
if (this.ws && this.ws.readyState === WebSocket.OPEN) {
this.ws.send(JSON.stringify(msg));
} else {
Object.keys(this.peers).forEach(pid => {
this.sendSignal(pid, msg);
});
}
}
}
setMute(mute) {
if (this.localStream) {
console.log('Setting mute to:', mute);
this.localStream.getAudioTracks().forEach(track => { track.enabled = !mute; });
}
}
leave() {
if (!this.currentChannelId) return;
console.log('Leaving voice channel:', this.currentChannelId);
if (this.pollInterval) clearInterval(this.pollInterval);
fetch(`api_v1_voice.php?action=leave&room=${this.currentChannelId}&peer_id=${this.myPeerId}`);
if (this.localStream) {
this.localStream.getTracks().forEach(track => track.stop());
this.localStream = null;
}
if (this.analysisStream) {
this.analysisStream.getTracks().forEach(track => track.stop());
this.analysisStream = null;
}
if (this.scriptProcessor) {
try {
this.scriptProcessor.disconnect();
this.scriptProcessor.onaudioprocess = null;
} catch(e) {}
this.scriptProcessor = null;
}
if (this.microphone) {
try { this.microphone.disconnect(); } catch(e) {}
this.microphone = null;
}
if (this.audioContext && this.audioContext.state !== 'closed') {
// Keep AudioContext alive but suspended to reuse it
this.audioContext.suspend();
}
Object.values(this.peers).forEach(pc => pc.close());
Object.values(this.remoteAudios).forEach(audio => {
audio.pause();
audio.remove();
audio.srcObject = null;
});
this.peers = {};
this.remoteAudios = {};
this.participants = {};
this.currentChannelId = null;
this.myPeerId = null;
this.speakingUsers.clear();
this.updateVoiceUI();
}
updateVoiceUI() {
// We now use a global update mechanism for all channels
VoiceChannel.refreshAllVoiceUsers();
if (this.currentChannelId) {
if (!document.querySelector('.voice-controls')) {
const controls = document.createElement('div');
controls.className = 'voice-controls p-2 d-flex justify-content-between align-items-center border-top bg-dark';
controls.style.backgroundColor = '#232428';
controls.innerHTML = `
<div class="d-flex align-items-center">
<div class="voice-status-icon text-success me-2" style="font-size: 8px;">●</div>
<div class="small fw-bold" style="font-size: 11px; color: #248046;">Voice (${this.settings.mode.toUpperCase()})</div>
</div>
<div>
<button class="btn btn-sm text-muted" id="btn-voice-leave" title="Disconnect">
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"><path d="M10.68 13.31a16 16 0 0 0 3.41 2.6l1.27-1.27a2 2 0 0 1 2.11-.45 12.84 12.84 0 0 0 2.81.7 2 2 0 0 1 1.72 2v3a2 2 0 0 1-2.18 2 19.79 19.79 0 0 1-8.63-3.07 19.42 19.42 0 0 1-3.33-2.67m-2.67-3.34a19.79 19.79 0 0 1-3.07-8.63A2 2 0 0 1 4.11 2h3a2 2 0 0 1 2 1.72 12.84 12.84 0 0 0 .7 2.81 2 2 0 0 1-.45 2.11L8.09 9.91"></path><line x1="23" y1="1" x2="1" y2="23"></line></svg>
</button>
</div>
`;
const sidebar = document.querySelector('.channels-sidebar');
if (sidebar) sidebar.appendChild(controls);
const btnLeave = document.getElementById('btn-voice-leave');
if (btnLeave) btnLeave.onclick = () => this.leave();
}
} else {
const controls = document.querySelector('.voice-controls');
if (controls) controls.remove();
}
}
updateSpeakingUI(userId, isSpeaking) {
if (isSpeaking) {
this.speakingUsers.add(userId);
} else {
this.speakingUsers.delete(userId);
}
const userEls = document.querySelectorAll(`.voice-user[data-user-id="${userId}"]`);
userEls.forEach(el => {
const avatar = el.querySelector('.message-avatar');
if (avatar) {
avatar.style.boxShadow = isSpeaking ? '0 0 0 2px #23a559' : 'none';
}
});
}
static async refreshAllVoiceUsers() {
try {
const resp = await fetch('api_v1_voice.php?action=list_all');
const data = await resp.json();
if (data.success) {
// Clear all lists first
document.querySelectorAll('.voice-users-list').forEach(el => el.innerHTML = '');
// Populate based on data
Object.keys(data.channels).forEach(channelId => {
// Fix: The voice-users-list is a sibling of the container of the voice-item
const voiceItem = document.querySelector(`.voice-item[data-channel-id="${channelId}"]`);
if (voiceItem) {
const container = voiceItem.closest('.channel-item-container');
if (container) {
const listEl = container.querySelector('.voice-users-list');
if (listEl) {
data.channels[channelId].forEach(p => {
const isSpeaking = window.voiceHandler && window.voiceHandler.speakingUsers.has(p.user_id);
VoiceChannel.renderUserToUI(listEl, p.user_id, p.display_name || p.username, p.avatar_url, isSpeaking);
});
}
}
}
});
}
} catch (e) {
console.error('Failed to refresh voice users:', e);
}
}
static renderUserToUI(container, userId, username, avatarUrl, isSpeaking = false) {
const userEl = document.createElement('div');
userEl.className = 'voice-user small text-muted d-flex align-items-center mb-1';
userEl.dataset.userId = userId;
userEl.style.paddingLeft = '8px';
const avatarStyle = avatarUrl ? `background-image: url('${avatarUrl}'); background-size: cover;` : "background-color: #555;";
const boxShadow = isSpeaking ? 'box-shadow: 0 0 0 2px #23a559;' : '';
userEl.innerHTML = `
<div class="message-avatar me-2" style="width: 16px; height: 16px; border-radius: 50%; transition: box-shadow 0.2s; ${avatarStyle} ${boxShadow}"></div>
<span style="font-size: 13px;">${username}</span>
`;
container.appendChild(userEl);
}
}