Skip to main content
Glama

Model Context Protocol Server

by hyen43
stdio.js1.28 kB
import {getStreamOutput} from '../io/contents.js'; import {waitForStream, isInputFileDescriptor} from './wait-stream.js'; // Read the contents of `subprocess.std*` and|or wait for its completion export const waitForStdioStreams = ({subprocess, encoding, buffer, maxBuffer, lines, stripFinalNewline, verboseInfo, streamInfo}) => subprocess.stdio.map((stream, fdNumber) => waitForSubprocessStream({ stream, fdNumber, encoding, buffer: buffer[fdNumber], maxBuffer: maxBuffer[fdNumber], lines: lines[fdNumber], allMixed: false, stripFinalNewline, verboseInfo, streamInfo, })); // Read the contents of `subprocess.std*` or `subprocess.all` and|or wait for its completion export const waitForSubprocessStream = async ({stream, fdNumber, encoding, buffer, maxBuffer, lines, allMixed, stripFinalNewline, verboseInfo, streamInfo}) => { if (!stream) { return; } const onStreamEnd = waitForStream(stream, fdNumber, streamInfo); if (isInputFileDescriptor(streamInfo, fdNumber)) { await onStreamEnd; return; } const [output] = await Promise.all([ getStreamOutput({ stream, onStreamEnd, fdNumber, encoding, buffer, maxBuffer, lines, allMixed, stripFinalNewline, verboseInfo, streamInfo, }), onStreamEnd, ]); return output; };

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/hyen43/mcpServer'

If you have feedback or need assistance with the MCP directory API, please join our Discord server