Skip to main content
Glama
embeddings.ts1.85 kB
import OpenAIClient, { type OpenAIClientConfig } from "@ideadesignmedia/open-ai.js"; import { logErr } from "./util.js"; export interface EmbeddingProvider { embedDocument(text: string): Promise<number[]>; embedQuery(text: string): Promise<number[]>; } export type OpenAiEmbeddingOptions = { apiKey?: string; model?: string; baseURL?: string; }; function extractVector(result: { data?: Array<{ embedding?: number[] }> }, context: string): number[] { const vector = result?.data?.[0]?.embedding; if (!Array.isArray(vector) || vector.length === 0) { throw new Error(`No embedding returned for ${context}`); } return vector; } export function createOpenAiEmbeddingProvider(opts: OpenAiEmbeddingOptions = {}): EmbeddingProvider { const apiKey = opts.apiKey || process.env.MEMORY_EMBEDDING_KEY; if (!apiKey) throw new Error("Missing embedding API key. Set MEMORY_EMBEDDING_KEY."); const config: OpenAIClientConfig = { key: apiKey }; if (opts.baseURL || process.env.MEMORY_EMBEDDING_BASE_URL) { config.host = opts.baseURL || process.env.MEMORY_EMBEDDING_BASE_URL; } const client = new OpenAIClient(config); const model = opts.model || process.env.MEMORY_EMBED_MODEL || "text-embedding-3-small"; return { async embedDocument(text: string) { const res = await client.getEmbedding(text, model); return extractVector(res, "document"); }, async embedQuery(text: string) { const res = await client.getEmbedding(text, model); return extractVector(res, "query"); }, }; } export function createDefaultEmbeddingProvider(): EmbeddingProvider | undefined { try { return createOpenAiEmbeddingProvider(); } catch (err) { const msg = err instanceof Error ? err.message : String(err); logErr("info: embedding provider disabled:", msg); return undefined; } }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ideadesignmedia/memory-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server