Skip to main content
Glama
openai.py1.87 kB
import os import warnings from typing import Literal from openai import OpenAI from selfmemory.configs.embeddings.base import BaseEmbedderConfig from selfmemory.embeddings.base import EmbeddingBase class OpenAIEmbedding(EmbeddingBase): def __init__(self, config: BaseEmbedderConfig | None = None): super().__init__(config) self.config.model = self.config.model or "text-embedding-3-small" self.config.embedding_dims = self.config.embedding_dims or 1536 api_key = self.config.api_key or os.getenv("OPENAI_API_KEY") base_url = ( self.config.openai_base_url or os.getenv("OPENAI_API_BASE") or os.getenv("OPENAI_BASE_URL") or "https://api.openai.com/v1" ) if os.environ.get("OPENAI_API_BASE"): warnings.warn( "The environment variable 'OPENAI_API_BASE' is deprecated and will be removed in the 0.1.80. " "Please use 'OPENAI_BASE_URL' instead.", DeprecationWarning, stacklevel=2, ) self.client = OpenAI(api_key=api_key, base_url=base_url) def embed( self, text, memory_action: Literal["add", "search", "update"] | None = None ): """ Get the embedding for the given text using OpenAI. Args: text (str): The text to embed. memory_action (optional): The type of embedding to use. Must be one of "add", "search", or "update". Defaults to None. Returns: list: The embedding vector. """ text = text.replace("\n", " ") return ( self.client.embeddings.create( input=[text], model=self.config.model, dimensions=self.config.embedding_dims, ) .data[0] .embedding )

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/shrijayan/SelfMemory'

If you have feedback or need assistance with the MCP directory API, please join our Discord server