Skip to main content
Glama

Model Context Protocol Server

by hyen43
iterable.js951 B
import {BINARY_ENCODINGS} from '../arguments/encoding-option.js'; import {getFromStream} from '../arguments/fd-options.js'; import {iterateOnSubprocessStream} from '../io/iterate.js'; // Convert the subprocess to an async iterable export const createIterable = (subprocess, encoding, { from, binary: binaryOption = false, preserveNewlines = false, } = {}) => { const binary = binaryOption || BINARY_ENCODINGS.has(encoding); const subprocessStdout = getFromStream(subprocess, from); const onStdoutData = iterateOnSubprocessStream({ subprocessStdout, subprocess, binary, shouldEncode: true, encoding, preserveNewlines, }); return iterateOnStdoutData(onStdoutData, subprocessStdout, subprocess); }; const iterateOnStdoutData = async function * (onStdoutData, subprocessStdout, subprocess) { try { yield * onStdoutData; } finally { if (subprocessStdout.readable) { subprocessStdout.destroy(); } await subprocess; } };

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/hyen43/mcpServer'

If you have feedback or need assistance with the MCP directory API, please join our Discord server