Demo Update 27

This commit is contained in:
2025-03-30 11:17:24 -04:00
parent ea7e742b96
commit 665a3b6ce3
2 changed files with 14 additions and 120 deletions

View File

@@ -274,7 +274,6 @@
const whisperStatus = document.getElementById('whisper-status');
const csmStatus = document.getElementById('csm-status');
const llmStatus = document.getElementById('llm-status');
const webrtcStatus = document.getElementById('webrtc-status');
const micAnimation = document.getElementById('mic-animation');
const loadingDiv = document.getElementById('loading');
const loadingText = document.getElementById('loading-text');
@@ -286,14 +285,7 @@
let isAiSpeaking = false;
let audioContext;
let mediaStream;
let audioRecorder;
let audioProcessor;
const audioChunks = [];
// WebRTC variables
let peerConnection;
let dataChannel;
let hasActiveConnection = false;
// Audio playback
let audioQueue = [];
@@ -302,7 +294,6 @@
// Configuration variables
let serverSampleRate = 24000;
let clientSampleRate = 44100;
let iceServers = [];
// Initialize the application
initApp();
@@ -329,7 +320,6 @@
updateConnectionStatus('disconnected');
isConnected = false;
cleanupAudio();
cleanupWebRTC();
});
socket.on('session_ready', (data) => {
@@ -337,11 +327,13 @@
updateModelStatus(data);
clientSampleRate = data.client_sample_rate;
serverSampleRate = data.server_sample_rate;
iceServers = data.ice_servers;
// Initialize WebRTC if models are available
if (data.whisper_available && data.llm_available) {
initializeWebRTC();
// Enable start button if models are available
if (data.whisper_available && data.csm_available) {
startButton.disabled = false;
addInfoMessage('Ready for conversation. Click "Start Listening" to begin.');
} else {
addInfoMessage('Some models are not available. Voice chat might not work properly.');
}
});
@@ -351,10 +343,6 @@
addInfoMessage('Ready for conversation. Click "Start Listening" to begin.');
});
socket.on('webrtc_signal', (data) => {
handleWebRTCSignal(data);
});
socket.on('transcription', (data) => {
console.log('Transcription:', data);
addUserMessage(data.text);
@@ -460,98 +448,6 @@
llmStatus.style.color = data.llm_available ? 'green' : 'red';
}
// Initialize WebRTC connection
function initializeWebRTC() {
if (!isConnected) return;
const configuration = {
iceServers: iceServers
};
peerConnection = new RTCPeerConnection(configuration);
// Create data channel for WebRTC communication
dataChannel = peerConnection.createDataChannel('audioData', {
ordered: true
});
dataChannel.onopen = () => {
console.log('WebRTC data channel open');
hasActiveConnection = true;
webrtcStatus.textContent = 'Connected';
webrtcStatus.style.color = 'green';
socket.emit('webrtc_connected', { status: 'connected' });
};
dataChannel.onclose = () => {
console.log('WebRTC data channel closed');
hasActiveConnection = false;
webrtcStatus.textContent = 'Disconnected';
webrtcStatus.style.color = 'red';
};
// Handle ICE candidates
peerConnection.onicecandidate = (event) => {
if (event.candidate) {
socket.emit('webrtc_signal', {
type: 'ice_candidate',
candidate: event.candidate
});
}
};
// Log ICE connection state changes
peerConnection.oniceconnectionstatechange = () => {
console.log('ICE connection state:', peerConnection.iceConnectionState);
};
// Create offer
peerConnection.createOffer()
.then(offer => peerConnection.setLocalDescription(offer))
.then(() => {
socket.emit('webrtc_signal', {
type: 'offer',
sdp: peerConnection.localDescription
});
})
.catch(error => {
console.error('Error creating WebRTC offer:', error);
webrtcStatus.textContent = 'Failed to Connect';
webrtcStatus.style.color = 'red';
});
}
// Handle WebRTC signals from the server
function handleWebRTCSignal(data) {
if (!peerConnection) return;
if (data.type === 'answer') {
peerConnection.setRemoteDescription(new RTCSessionDescription(data.sdp))
.catch(error => console.error('Error setting remote description:', error));
}
else if (data.type === 'ice_candidate') {
peerConnection.addIceCandidate(new RTCIceCandidate(data.candidate))
.catch(error => console.error('Error adding ICE candidate:', error));
}
}
// Clean up WebRTC connection
function cleanupWebRTC() {
if (dataChannel) {
dataChannel.close();
}
if (peerConnection) {
peerConnection.close();
}
dataChannel = null;
peerConnection = null;
hasActiveConnection = false;
webrtcStatus.textContent = 'Not Connected';
webrtcStatus.style.color = 'red';
}
// Toggle audio listening
function toggleListening() {
if (isListening) {
@@ -648,8 +544,6 @@
if (audioContext && audioContext.state !== 'closed') {
audioContext.close().catch(error => console.error('Error closing AudioContext:', error));
}
audioChunks.length = 0;
}
// Convert Float32Array to Int16Array for sending to server
@@ -669,7 +563,7 @@
// Convert to base64 for transmission
const base64Audio = arrayBufferToBase64(audioData.buffer);
// Send via Socket.IO (could use WebRTC's DataChannel for lower latency in production)
// Send via Socket.IO
socket.emit('audio_stream', { audio: base64Audio });
}