We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/VinnyCarter05/investing-mcp'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
generator.py•1.61 KiB
"""Embedding generation for semantic search."""
class EmbeddingGenerator:
"""Generate embeddings for text using sentence-transformers.
Uses lazy loading to defer the slow sentence_transformers import
until embeddings are actually needed.
"""
def __init__(self, model_name: str = "all-MiniLM-L6-v2"):
"""Initialize embedding model configuration.
Args:
model_name: Name of sentence-transformers model
"""
self.model_name = model_name
self._model = None
@property
def model(self):
"""Lazy-load the sentence transformer model."""
if self._model is None:
from sentence_transformers import SentenceTransformer
self._model = SentenceTransformer(self.model_name)
return self._model
def encode(self, text: str) -> list[float]:
"""Generate embedding for a single text.
Args:
text: Input text
Returns:
Embedding vector as list of floats
"""
embedding = self.model.encode(text)
return embedding.tolist()
def encode_batch(self, texts: list[str]) -> list[list[float]]:
"""Generate embeddings for multiple texts.
Args:
texts: List of input texts
Returns:
List of embedding vectors
"""
embeddings = self.model.encode(texts)
return [emb.tolist() for emb in embeddings]
def get_dimension(self) -> int:
"""Get embedding dimension.
Returns:
Dimension of embedding vectors
"""
return self.model.get_sentence_embedding_dimension()