Skip to main content
Glama
createBufferedReadable.jsโ€ข2.1 kB
import { Readable } from "node:stream"; import { ByteArrayCollector } from "./ByteArrayCollector"; import { createBufferedReadableStream, flush, merge, modeOf, sizeOf } from "./createBufferedReadableStream"; import { isReadableStream } from "./stream-type-check"; export function createBufferedReadable(upstream, size, logger) { if (isReadableStream(upstream)) { return createBufferedReadableStream(upstream, size, logger); } const downstream = new Readable({ read() { } }); let streamBufferingLoggedWarning = false; let bytesSeen = 0; const buffers = [ "", new ByteArrayCollector((size) => new Uint8Array(size)), new ByteArrayCollector((size) => Buffer.from(new Uint8Array(size))), ]; let mode = -1; upstream.on("data", (chunk) => { const chunkMode = modeOf(chunk, true); if (mode !== chunkMode) { if (mode >= 0) { downstream.push(flush(buffers, mode)); } mode = chunkMode; } if (mode === -1) { downstream.push(chunk); return; } const chunkSize = sizeOf(chunk); bytesSeen += chunkSize; const bufferSize = sizeOf(buffers[mode]); if (chunkSize >= size && bufferSize === 0) { downstream.push(chunk); } else { const newSize = merge(buffers, mode, chunk); if (!streamBufferingLoggedWarning && bytesSeen > size * 2) { streamBufferingLoggedWarning = true; logger?.warn(`@smithy/util-stream - stream chunk size ${chunkSize} is below threshold of ${size}, automatically buffering.`); } if (newSize >= size) { downstream.push(flush(buffers, mode)); } } }); upstream.on("end", () => { if (mode !== -1) { const remainder = flush(buffers, mode); if (sizeOf(remainder) > 0) { downstream.push(remainder); } } downstream.push(null); }); return downstream; }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/consigcody94/office-whisperer'

If you have feedback or need assistance with the MCP directory API, please join our Discord server