app.js•15.1 kB
/**
* Voice Code Assistant - Client Application
*/
// Configuration
const WS_URL = 'ws://localhost:3001';
const API_URL = 'http://localhost:3000';
// State
let ws = null;
let recognition = null;
let isListening = false;
let mode = 'push'; // push, continuous, wake
let currentVoice = 'rachel';
let wakeWord = 'hey assistant';
let conversationHistory = [];
let currentContext = {};
let isProcessing = false;
// Initialize WebSocket connection
function initWebSocket() {
ws = new WebSocket(WS_URL);
ws.onopen = () => {
console.log('Connected to Voice Assistant server');
updateStatus('scsStatus', true);
showNotification('Connected to server', 'success');
};
ws.onmessage = (event) => {
const message = JSON.parse(event.data);
handleServerMessage(message);
};
ws.onerror = (error) => {
console.error('WebSocket error:', error);
showNotification('Connection error', 'error');
};
ws.onclose = () => {
updateStatus('scsStatus', false);
showNotification('Disconnected from server', 'warning');
// Reconnect after 3 seconds
setTimeout(initWebSocket, 3000);
};
}
// Initialize Speech Recognition
function initSpeechRecognition() {
if (!('webkitSpeechRecognition' in window) && !('SpeechRecognition' in window)) {
showNotification('Speech recognition not supported in this browser', 'error');
return;
}
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
recognition = new SpeechRecognition();
recognition.continuous = true;
recognition.interimResults = true;
recognition.lang = 'en-US';
recognition.onstart = () => {
console.log('Speech recognition started');
updateStatus('voiceStatus', true);
document.getElementById('waveform').style.display = 'flex';
};
recognition.onresult = (event) => {
let finalTranscript = '';
let interimTranscript = '';
for (let i = event.resultIndex; i < event.results.length; i++) {
const transcript = event.results[i][0].transcript;
if (event.results[i].isFinal) {
finalTranscript += transcript;
} else {
interimTranscript += transcript;
}
}
// Update transcript display
const transcriptBox = document.getElementById('transcript');
transcriptBox.textContent = finalTranscript || interimTranscript;
// Process final transcript
if (finalTranscript) {
processSpeech(finalTranscript);
}
};
recognition.onerror = (event) => {
console.error('Speech recognition error:', event.error);
if (event.error === 'no-speech') {
// Ignore no-speech errors in continuous mode
if (mode === 'continuous') return;
}
showNotification(`Speech error: ${event.error}`, 'error');
stopListening();
};
recognition.onend = () => {
console.log('Speech recognition ended');
updateStatus('voiceStatus', false);
document.getElementById('waveform').style.display = 'none';
// Restart in continuous mode
if (mode === 'continuous' && isListening) {
setTimeout(() => {
if (isListening) startListening();
}, 100);
}
};
}
// Process speech input
function processSpeech(text) {
const lowerText = text.toLowerCase().trim();
// Check for wake word in wake mode
if (mode === 'wake') {
if (lowerText.includes(wakeWord.toLowerCase())) {
// Remove wake word and process command
const command = text.substring(text.toLowerCase().indexOf(wakeWord.toLowerCase()) + wakeWord.length).trim();
if (command) {
sendVoiceCommand(command);
} else {
showNotification('Listening...', 'info');
}
return;
}
// Ignore if no wake word
return;
}
// In push/continuous mode, process all speech
sendVoiceCommand(text);
}
// Send voice command to server
function sendVoiceCommand(text) {
if (!ws || ws.readyState !== WebSocket.OPEN) {
showNotification('Not connected to server', 'error');
return;
}
if (isProcessing) {
showNotification('Still processing previous request...', 'info');
return;
}
isProcessing = true;
// Add to conversation history
addMessage('user', text);
// Send to server
ws.send(JSON.stringify({
type: 'voice',
text: text,
context: currentContext
}));
// Show processing indicator
document.getElementById('transcript').textContent = 'Processing...';
}
// Handle server messages
function handleServerMessage(message) {
switch (message.type) {
case 'status':
handleStatusUpdate(message.data);
break;
case 'context_update':
handleContextUpdate(message.data);
break;
case 'response':
handleVoiceResponse(message.data);
break;
case 'audio':
handleAudioResponse(message.data);
break;
case 'error':
showNotification(message.error, 'error');
isProcessing = false;
break;
}
}
// Handle status updates
function handleStatusUpdate(status) {
updateStatus('scsStatus', status.scsMcp);
updateStatus('vscodeStatus', status.vscode);
if (status.elevenLabs) {
console.log('ElevenLabs connected');
}
}
// Handle context updates from VS Code
function handleContextUpdate(context) {
currentContext = context;
updateStatus('vscodeStatus', true);
// Update UI
document.getElementById('currentFile').textContent =
context.currentFile ? context.currentFile.split('/').pop() : 'No file';
document.getElementById('currentLanguage').textContent =
context.currentLanguage || '-';
document.getElementById('currentLine').textContent =
context.currentLine || '-';
document.getElementById('currentSymbol').textContent =
context.currentSymbol || '-';
}
// Handle voice response
function handleVoiceResponse(data) {
isProcessing = false;
// Add to conversation
addMessage('assistant', data.text);
// Update transcript
document.getElementById('transcript').textContent = data.text;
// Show code if available
if (data.code) {
showCodePanel(data.code, data.action);
}
// Play audio if auto-play enabled
if (document.getElementById('autoPlay').checked && data.audio) {
playAudio(data.audio);
}
}
// Handle audio response
function handleAudioResponse(audioData) {
if (document.getElementById('autoPlay').checked) {
playAudio(audioData);
}
}
// Play audio response
function playAudio(audioData) {
try {
const audio = new Audio('data:audio/mpeg;base64,' + audioData);
audio.play().catch(e => {
console.error('Failed to play audio:', e);
});
} catch (error) {
console.error('Audio playback error:', error);
}
}
// Toggle voice input
function toggleVoice() {
if (isListening) {
stopListening();
} else {
startListening();
}
}
// Start listening
function startListening() {
if (!recognition) {
initSpeechRecognition();
}
if (recognition) {
isListening = true;
recognition.start();
document.getElementById('micButton').classList.add('listening');
if (mode === 'continuous') {
document.getElementById('micButton').classList.add('continuous');
}
}
}
// Stop listening
function stopListening() {
if (recognition) {
isListening = false;
recognition.stop();
document.getElementById('micButton').classList.remove('listening', 'continuous');
}
}
// Set listening mode
function setMode(newMode) {
mode = newMode;
// Update UI
document.querySelectorAll('.mode-button').forEach(btn => {
btn.classList.remove('active');
});
document.getElementById(newMode + 'Mode').classList.add('active');
// Handle mode change
if (mode === 'continuous' && !isListening) {
startListening();
} else if (mode !== 'continuous' && isListening) {
stopListening();
}
showNotification(`Switched to ${newMode} mode`, 'info');
}
// Quick action handler
function quickAction(action) {
const commands = {
review: 'review this code',
explain: 'explain what this does',
similar: 'find similar code patterns',
test: 'generate tests for this',
debt: 'analyze technical debt',
model: 'what model are you using'
};
const command = commands[action];
if (command) {
sendVoiceCommand(command);
}
}
// Add message to conversation
function addMessage(type, text) {
const message = {
type: type,
text: text,
time: new Date().toLocaleTimeString()
};
conversationHistory.push(message);
// Update UI
const historyDiv = document.getElementById('conversationHistory');
const messageDiv = document.createElement('div');
messageDiv.className = `message ${type}`;
messageDiv.innerHTML = `
<div>${text}</div>
<div class="message-time">${message.time}</div>
`;
historyDiv.appendChild(messageDiv);
// Scroll to bottom
historyDiv.scrollTop = historyDiv.scrollHeight;
}
// Show code panel
function showCodePanel(code, action) {
// Create a modal or panel to show code
const modal = document.createElement('div');
modal.style.cssText = `
position: fixed;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
background: white;
padding: 2rem;
border-radius: 10px;
box-shadow: 0 10px 30px rgba(0,0,0,0.3);
max-width: 80%;
max-height: 80%;
overflow: auto;
z-index: 2000;
`;
modal.innerHTML = `
<h3>Analysis: ${action}</h3>
<pre style="background: #f8f9fa; padding: 1rem; border-radius: 5px; overflow-x: auto;">
${escapeHtml(code)}
</pre>
<button onclick="this.parentElement.remove()" style="
margin-top: 1rem;
padding: 0.5rem 1rem;
background: linear-gradient(135deg, #667eea, #764ba2);
color: white;
border: none;
border-radius: 5px;
cursor: pointer;
">Close</button>
`;
document.body.appendChild(modal);
}
// Export conversation
function exportConversation() {
const content = conversationHistory.map(msg =>
`[${msg.time}] ${msg.type.toUpperCase()}: ${msg.text}`
).join('\n\n');
const blob = new Blob([content], { type: 'text/plain' });
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = `conversation-${new Date().toISOString().slice(0, 10)}.txt`;
a.click();
URL.revokeObjectURL(url);
showNotification('Conversation exported', 'success');
}
// Toggle settings panel
function toggleSettings() {
const panel = document.getElementById('settingsPanel');
panel.classList.toggle('open');
}
// Select voice
function selectVoice(voice) {
currentVoice = voice;
// Update UI
document.querySelectorAll('.voice-option').forEach(opt => {
opt.classList.remove('selected');
});
event.target.classList.add('selected');
// Save preference
localStorage.setItem('voiceAssistant.voice', voice);
showNotification(`Voice changed to ${voice}`, 'info');
}
// Update status indicator
function updateStatus(elementId, connected) {
const element = document.getElementById(elementId);
if (element) {
if (connected) {
element.classList.add('connected');
} else {
element.classList.remove('connected');
}
}
}
// Show notification
function showNotification(message, type = 'info') {
// Create notification element
const notification = document.createElement('div');
notification.style.cssText = `
position: fixed;
top: 20px;
right: 20px;
padding: 1rem 1.5rem;
background: ${type === 'error' ? '#dc3545' : type === 'success' ? '#28a745' : type === 'warning' ? '#ffc107' : '#17a2b8'};
color: white;
border-radius: 5px;
box-shadow: 0 5px 15px rgba(0,0,0,0.2);
z-index: 3000;
animation: slideIn 0.3s ease;
`;
notification.textContent = message;
document.body.appendChild(notification);
// Remove after 3 seconds
setTimeout(() => {
notification.style.animation = 'slideOut 0.3s ease';
setTimeout(() => notification.remove(), 300);
}, 3000);
}
// Escape HTML
function escapeHtml(text) {
const div = document.createElement('div');
div.textContent = text;
return div.innerHTML;
}
// Load saved preferences
function loadPreferences() {
const savedVoice = localStorage.getItem('voiceAssistant.voice');
if (savedVoice) {
currentVoice = savedVoice;
}
const savedWakeWord = localStorage.getItem('voiceAssistant.wakeWord');
if (savedWakeWord) {
wakeWord = savedWakeWord;
document.getElementById('wakeWord').value = wakeWord;
}
}
// Save wake word preference
document.getElementById('wakeWord')?.addEventListener('change', (e) => {
wakeWord = e.target.value;
localStorage.setItem('voiceAssistant.wakeWord', wakeWord);
});
// Initialize on load
window.addEventListener('load', () => {
loadPreferences();
initWebSocket();
initSpeechRecognition();
// Check for VS Code connection periodically
setInterval(() => {
if (ws && ws.readyState === WebSocket.OPEN) {
ws.send(JSON.stringify({ type: 'ping' }));
}
}, 30000);
});
// Add keyboard shortcuts
document.addEventListener('keydown', (e) => {
// Ctrl/Cmd + Shift + V: Toggle voice
if ((e.ctrlKey || e.metaKey) && e.shiftKey && e.key === 'V') {
e.preventDefault();
toggleVoice();
}
// Ctrl/Cmd + Shift + M: Change mode
if ((e.ctrlKey || e.metaKey) && e.shiftKey && e.key === 'M') {
e.preventDefault();
const modes = ['push', 'continuous', 'wake'];
const currentIndex = modes.indexOf(mode);
const nextMode = modes[(currentIndex + 1) % modes.length];
setMode(nextMode);
}
});
// Add animation styles
const style = document.createElement('style');
style.textContent = `
@keyframes slideOut {
to {
opacity: 0;
transform: translateX(100%);
}
}
`;
document.head.appendChild(style);