Skip to main content
Glama
by microsoft
echomodel.ts936 B
import { LanguageModel } from "./chat" import { renderMessagesToMarkdown } from "./chatrender" import { deleteEmptyValues } from "./cleaners" import { MODEL_PROVIDER_ECHO } from "./constants" export const EchoModel = Object.freeze<LanguageModel>({ id: MODEL_PROVIDER_ECHO, completer: async (req, connection, options) => { const { messages, model, ...rest } = req const { partialCb, inner } = options const text = `## Messages ${await renderMessagesToMarkdown(messages, { textLang: "markdown", assistant: true, system: true, user: true, })} ## Request \`\`\`json ${JSON.stringify(deleteEmptyValues({ messages, ...rest }), null, 2)} \`\`\` ` partialCb?.({ responseChunk: text, tokensSoFar: 0, responseSoFar: text, inner, }) return { finishReason: "stop", text, } }, })

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/microsoft/genaiscript'

If you have feedback or need assistance with the MCP directory API, please join our Discord server