Skip to main content
Glama
embeddings.py1.22 kB
"""Ollama embedding client wrapper.""" from typing import List import ollama class EmbeddingClient: """Client for generating embeddings via Ollama.""" def __init__( self, base_url: str, model: str = "nomic-embed-text" ): self.base_url = base_url self.model = model self.client = ollama.Client(host=base_url) async def embed_text(self, text: str) -> List[float]: """Generate embedding for a single text.""" response = self.client.embeddings( model=self.model, prompt=text ) return response["embedding"] async def embed_batch(self, texts: List[str]) -> List[List[float]]: """Generate embeddings for multiple texts.""" embeddings = [] for text in texts: embedding = await self.embed_text(text) embeddings.append(embedding) return embeddings def get_embedding_dimension(self) -> int: """Get the dimension of embeddings for this model.""" # nomic-embed-text produces 768-dimensional vectors if self.model == "nomic-embed-text": return 768 # Add other models as needed return 768

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/timerickson/personal-rag-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server