Skip to main content
Glama

Model Context Protocol Server

by hyen43
buffer-messages.js1.15 kB
import {checkIpcMaxBuffer} from '../io/max-buffer.js'; import {shouldLogIpc, logIpcOutput} from '../verbose/ipc.js'; import {getFdSpecificValue} from '../arguments/specific.js'; import {loopOnMessages} from './get-each.js'; // Iterate through IPC messages sent by the subprocess export const waitForIpcOutput = async ({ subprocess, buffer: bufferArray, maxBuffer: maxBufferArray, ipc, ipcOutput, verboseInfo, }) => { if (!ipc) { return ipcOutput; } const isVerbose = shouldLogIpc(verboseInfo); const buffer = getFdSpecificValue(bufferArray, 'ipc'); const maxBuffer = getFdSpecificValue(maxBufferArray, 'ipc'); for await (const message of loopOnMessages({ anyProcess: subprocess, channel: subprocess.channel, isSubprocess: false, ipc, shouldAwait: false, reference: true, })) { if (buffer) { checkIpcMaxBuffer(subprocess, ipcOutput, maxBuffer); ipcOutput.push(message); } if (isVerbose) { logIpcOutput(message, verboseInfo); } } return ipcOutput; }; export const getBufferedIpcOutput = async (ipcOutputPromise, ipcOutput) => { await Promise.allSettled([ipcOutputPromise]); return ipcOutput; };

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/hyen43/mcpServer'

If you have feedback or need assistance with the MCP directory API, please join our Discord server