extension.tsโข13.6 kB
import * as vscode from 'vscode';
import * as WebSocket from 'ws';
import * as path from 'path';
let ws: WebSocket | null = null;
let statusBarItem: vscode.StatusBarItem;
let isConnected = false;
let lastContext: any = {};
export function activate(context: vscode.ExtensionContext) {
console.log('Voice Assistant extension activated');
// Create status bar item
statusBarItem = vscode.window.createStatusBarItem(
vscode.StatusBarAlignment.Right,
100
);
statusBarItem.text = "$(unmute) Voice Assistant";
statusBarItem.tooltip = "Click to connect/disconnect voice assistant";
statusBarItem.command = 'voiceAssistant.toggle';
statusBarItem.show();
// Register commands
const toggleCommand = vscode.commands.registerCommand(
'voiceAssistant.toggle',
toggleConnection
);
const askCommand = vscode.commands.registerCommand(
'voiceAssistant.ask',
askQuestion
);
const reviewCommand = vscode.commands.registerCommand(
'voiceAssistant.reviewCurrent',
() => sendCommand('review this code')
);
const findSimilarCommand = vscode.commands.registerCommand(
'voiceAssistant.findSimilar',
() => sendCommand('find similar code')
);
const explainCommand = vscode.commands.registerCommand(
'voiceAssistant.explain',
() => sendCommand('explain this')
);
// Media commands
const screenshotCommand = vscode.commands.registerCommand(
'voiceAssistant.screenshot',
takeScreenshot
);
const startRecordingCommand = vscode.commands.registerCommand(
'voiceAssistant.startRecording',
startRecording
);
const stopRecordingCommand = vscode.commands.registerCommand(
'voiceAssistant.stopRecording',
stopRecording
);
const showMediaHistoryCommand = vscode.commands.registerCommand(
'voiceAssistant.showMediaHistory',
showMediaHistory
);
const annotateLastCaptureCommand = vscode.commands.registerCommand(
'voiceAssistant.annotateLastCapture',
annotateLastCapture
);
// Register event listeners
const onDidChangeActiveEditor = vscode.window.onDidChangeActiveTextEditor(
updateContext
);
const onDidChangeSelection = vscode.window.onDidChangeTextEditorSelection(
(e) => {
if (e.textEditor === vscode.window.activeTextEditor) {
updateContext();
}
}
);
const onDidSaveDocument = vscode.workspace.onDidSaveTextDocument(
(document) => {
if (document === vscode.window.activeTextEditor?.document) {
sendContextUpdate('save');
}
}
);
// Add disposables
context.subscriptions.push(
statusBarItem,
toggleCommand,
askCommand,
reviewCommand,
findSimilarCommand,
explainCommand,
screenshotCommand,
startRecordingCommand,
stopRecordingCommand,
showMediaHistoryCommand,
annotateLastCaptureCommand,
onDidChangeActiveEditor,
onDidChangeSelection,
onDidSaveDocument
);
// Auto-connect on activation
const config = vscode.workspace.getConfiguration('voiceAssistant');
if (config.get('autoConnect')) {
connectToServer();
}
}
// Toggle connection
async function toggleConnection() {
if (isConnected) {
disconnectFromServer();
} else {
connectToServer();
}
}
// Connect to voice assistant server
function connectToServer() {
try {
const config = vscode.workspace.getConfiguration('voiceAssistant');
const serverUrl = config.get<string>('serverUrl') || 'ws://localhost:3001';
ws = new WebSocket(serverUrl + '/vscode');
ws.on('open', () => {
isConnected = true;
updateStatusBar(true);
vscode.window.showInformationMessage('Connected to Voice Assistant');
updateContext();
});
ws.on('message', (data: WebSocket.Data) => {
handleServerMessage(JSON.parse(data.toString()));
});
ws.on('error', (error: Error) => {
console.error('WebSocket error:', error);
vscode.window.showErrorMessage(`Voice Assistant error: ${error.message}`);
});
ws.on('close', () => {
isConnected = false;
updateStatusBar(false);
ws = null;
// Auto-reconnect after 5 seconds
setTimeout(() => {
if (!isConnected) {
connectToServer();
}
}, 5000);
});
} catch (error: any) {
vscode.window.showErrorMessage(`Failed to connect: ${error.message}`);
}
}
// Disconnect from server
function disconnectFromServer() {
if (ws) {
ws.close();
ws = null;
}
isConnected = false;
updateStatusBar(false);
}
// Update status bar
function updateStatusBar(connected: boolean, customText?: string) {
if (connected) {
statusBarItem.text = customText || "$(unmute) Voice Connected";
statusBarItem.backgroundColor = new vscode.ThemeColor('statusBarItem.prominentBackground');
} else {
statusBarItem.text = "$(mute) Voice Disconnected";
statusBarItem.backgroundColor = undefined;
}
}
// Handle messages from server
function handleServerMessage(message: any) {
switch (message.type) {
case 'response':
handleVoiceResponse(message.data);
break;
case 'status':
console.log('Server status:', message.data);
break;
case 'screenshot_saved':
vscode.window.showInformationMessage('Screenshot saved');
break;
case 'recording_started':
updateStatusBar(true, '๐ด Recording');
break;
case 'recording_stopped':
updateStatusBar(true);
vscode.window.showInformationMessage('Recording saved');
break;
case 'error':
vscode.window.showErrorMessage(`Voice Assistant: ${message.error}`);
break;
}
}
// Handle voice response
async function handleVoiceResponse(data: any) {
const { text, code, action } = data;
// Show notification with response
const choice = await vscode.window.showInformationMessage(
text,
'Show Details',
'Insert Code',
'Open Panel'
);
if (choice === 'Show Details' && code) {
showCodeDetails(code, action);
} else if (choice === 'Insert Code' && code) {
insertCode(code);
} else if (choice === 'Open Panel') {
openVoicePanel();
}
}
// Show code details in a new panel
function showCodeDetails(code: string, action: string) {
const panel = vscode.window.createWebviewPanel(
'voiceAssistantDetails',
`Voice Assistant: ${action}`,
vscode.ViewColumn.Two,
{}
);
panel.webview.html = `
<!DOCTYPE html>
<html>
<head>
<style>
body { font-family: var(--vscode-font-family); padding: 20px; }
pre { background: var(--vscode-textBlockQuote-background);
padding: 15px; border-radius: 5px; overflow-x: auto; }
h2 { color: var(--vscode-foreground); }
</style>
</head>
<body>
<h2>Analysis Results</h2>
<pre><code>${escapeHtml(code)}</code></pre>
</body>
</html>
`;
}
// Insert code at cursor position
function insertCode(code: string) {
const editor = vscode.window.activeTextEditor;
if (editor) {
const position = editor.selection.active;
editor.edit(editBuilder => {
editBuilder.insert(position, code);
});
}
}
// Open voice panel in browser
function openVoicePanel() {
vscode.env.openExternal(vscode.Uri.parse('http://localhost:3000'));
}
// Update context based on current editor state
function updateContext() {
const editor = vscode.window.activeTextEditor;
if (!editor) return;
const document = editor.document;
const selection = editor.selection;
const context = {
currentFile: document.fileName,
currentDirectory: path.dirname(document.fileName),
currentLanguage: document.languageId,
currentLine: selection.active.line + 1,
currentSymbol: getSymbolAtPosition(document, selection.active),
selectedCode: !selection.isEmpty
? document.getText(selection)
: null,
currentFunction: getCurrentFunction(document, selection.active),
fileSize: document.getText().length,
lineCount: document.lineCount
};
// Only send if context changed
if (JSON.stringify(context) !== JSON.stringify(lastContext)) {
lastContext = context;
sendContextUpdate('change', context);
}
}
// Send context update to server
function sendContextUpdate(event: string, context?: any) {
if (!ws || ws.readyState !== WebSocket.OPEN) return;
const payload = {
type: 'context',
event: event,
data: context || lastContext,
timestamp: Date.now()
};
ws.send(JSON.stringify(payload));
}
// Send command to server
function sendCommand(command: string) {
if (!ws || ws.readyState !== WebSocket.OPEN) {
vscode.window.showWarningMessage('Voice Assistant not connected');
return;
}
const payload = {
type: 'command',
command: command,
context: lastContext
};
ws.send(JSON.stringify(payload));
}
// Ask a question via input box
async function askQuestion() {
const question = await vscode.window.showInputBox({
prompt: 'Ask the Voice Assistant',
placeHolder: 'e.g., "What does this function do?"'
});
if (question) {
sendCommand(question);
}
}
// Get symbol at position
function getSymbolAtPosition(
document: vscode.TextDocument,
position: vscode.Position
): string | null {
const wordRange = document.getWordRangeAtPosition(position);
if (wordRange) {
return document.getText(wordRange);
}
return null;
}
// Get current function context
function getCurrentFunction(
document: vscode.TextDocument,
position: vscode.Position
): string | null {
// Simple heuristic - get surrounding 10 lines
const startLine = Math.max(0, position.line - 5);
const endLine = Math.min(document.lineCount - 1, position.line + 5);
const range = new vscode.Range(
new vscode.Position(startLine, 0),
new vscode.Position(endLine,
document.lineAt(endLine).text.length)
);
return document.getText(range);
}
// Escape HTML for webview
function escapeHtml(text: string): string {
return text
.replace(/&/g, '&')
.replace(/</g, '<')
.replace(/>/g, '>')
.replace(/"/g, '"')
.replace(/'/g, ''');
}
// Media command handlers
async function takeScreenshot() {
if (!ws || ws.readyState !== WebSocket.OPEN) {
vscode.window.showWarningMessage('Voice Assistant not connected');
return;
}
const editor = vscode.window.activeTextEditor;
const context = {
...lastContext,
selectedText: editor?.document.getText(editor.selection) || null
};
ws.send(JSON.stringify({
type: 'screenshot',
method: 'vscode',
context: context
}));
}
let isRecording = false;
async function startRecording() {
if (!ws || ws.readyState !== WebSocket.OPEN) {
vscode.window.showWarningMessage('Voice Assistant not connected');
return;
}
if (isRecording) {
vscode.window.showWarningMessage('Recording already in progress');
return;
}
ws.send(JSON.stringify({
type: 'start_recording',
context: lastContext
}));
isRecording = true;
}
async function stopRecording() {
if (!ws || ws.readyState !== WebSocket.OPEN) {
vscode.window.showWarningMessage('Voice Assistant not connected');
return;
}
if (!isRecording) {
vscode.window.showWarningMessage('No recording in progress');
return;
}
ws.send(JSON.stringify({
type: 'stop_recording'
}));
isRecording = false;
}
function showMediaHistory() {
// Open media history in browser
const config = vscode.workspace.getConfiguration('voiceAssistant');
const serverUrl = config.get<string>('serverUrl') || 'http://localhost:3000';
const httpUrl = serverUrl.replace('ws://', 'http://').replace('wss://', 'https://').replace(':3001', ':3000');
vscode.env.openExternal(vscode.Uri.parse(`${httpUrl}/media-ui.html`));
}
async function annotateLastCapture() {
if (!ws || ws.readyState !== WebSocket.OPEN) {
vscode.window.showWarningMessage('Voice Assistant not connected');
return;
}
const annotation = await vscode.window.showInputBox({
prompt: 'Add annotation to last capture',
placeHolder: 'Enter your annotation...'
});
if (annotation) {
ws.send(JSON.stringify({
type: 'annotate_last',
annotation: annotation,
context: lastContext
}));
vscode.window.showInformationMessage('Annotation added');
}
}
export function deactivate() {
disconnectFromServer();
}