Skip to main content
Glama

SRT Translation MCP Server

by omd0
test-metadata-detection.js•3.32 kB
#!/usr/bin/env node /** * Test script to demonstrate the improved detect_conversations function * that returns metadata only (no actual text content) */ import { spawn } from 'child_process'; import fs from 'fs'; async function testMetadataDetection() { console.log('Testing improved detect_conversations function...\n'); // Read a sample SRT file const srtContent = fs.readFileSync('/home/omd/Documents/Mix/SRT-MCP/example.srt', 'utf8'); // Start MCP server const serverProcess = spawn('node', ['dist/mcp/server.js'], { stdio: ['pipe', 'pipe', 'inherit'] }); try { // Send MCP request to detect conversations const request = { jsonrpc: '2.0', id: 1, method: 'tools/call', params: { name: 'detect_conversations', arguments: { content: srtContent } } }; console.log('Sending detect_conversations request...\n'); serverProcess.stdin.write(JSON.stringify(request) + '\n'); serverProcess.stdin.end(); // Wait for response let responseData = ''; serverProcess.stdout.on('data', (data) => { responseData += data.toString(); }); await new Promise((resolve) => { serverProcess.stdout.on('end', resolve); }); // Parse and display response const response = JSON.parse(responseData); console.log('Response received:'); console.log('==================\n'); if (response.result && response.result.content) { const metadata = JSON.parse(response.result.content[0].text); console.log('šŸ“Š METADATA SUMMARY:'); console.log(`Total chunks: ${metadata.chunkCount}`); console.log(`Total duration: ${(metadata.totalDuration / 1000).toFixed(2)} seconds`); console.log(`Language distribution:`, metadata.languageDistribution); console.log(`Speaker distribution:`, metadata.speakerDistribution); console.log('\nšŸ“ CHUNK DETAILS (first 3 chunks):'); metadata.chunks.slice(0, 3).forEach((chunk, index) => { console.log(`\nChunk ${index + 1}:`); console.log(` ID: ${chunk.id}`); console.log(` Time: ${chunk.startTime} - ${chunk.endTime} (${(chunk.duration / 1000).toFixed(2)}s)`); console.log(` Subtitles: ${chunk.subtitleCount}`); console.log(` Speaker: ${chunk.speaker || 'none'}`); console.log(` Language: ${chunk.languageInfo.primary} (confidence: ${chunk.languageInfo.confidence})`); console.log(` Content type: ${chunk.contentType}`); console.log(` Complexity: ${chunk.complexity}`); console.log(` Priority: ${chunk.translationPriority}`); console.log(` Topic keywords: ${chunk.topicKeywords.join(', ')}`); console.log(` Language indicators: ${chunk.languageInfo.indicators.join(', ')}`); }); console.log('\nāœ… SUCCESS: Function returns metadata only - no actual text content!'); console.log(' This allows AI to make informed translation decisions without being overwhelmed by text.'); } else { console.log('āŒ Error:', response.error || 'Unknown error'); } } catch (error) { console.error('āŒ Test failed:', error.message); } finally { serverProcess.kill(); } } // Run the test testMetadataDetection().catch(console.error);

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/omd0/srt-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server