import { OpenAICompatibleEmbedder } from "./openai-compatible"
import { IEmbedder, EmbeddingResponse, EmbedderInfo } from "../interfaces/embedder"
import { GEMINI_MAX_ITEM_TOKENS } from "../constants"
/**
* Gemini embedder implementation that wraps the OpenAI Compatible embedder
* with configuration for Google's Gemini embedding API.
*
* Supported models:
* - text-embedding-004 (dimension: 768)
* - gemini-embedding-001 (dimension: 2048)
*/
export class GeminiEmbedder implements IEmbedder {
private readonly openAICompatibleEmbedder: OpenAICompatibleEmbedder
private static readonly GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai/"
private static readonly DEFAULT_MODEL = "gemini-embedding-001"
private readonly modelId: string
/**
* Creates a new Gemini embedder
* @param apiKey The Gemini API key for authentication
* @param modelId The model ID to use (defaults to gemini-embedding-001)
*/
constructor(apiKey: string, modelId?: string) {
if (!apiKey) {
throw new Error("API key is required for Gemini embedder")
}
// Use provided model or default
this.modelId = modelId || GeminiEmbedder.DEFAULT_MODEL
// Create an OpenAI Compatible embedder with Gemini's configuration
this.openAICompatibleEmbedder = new OpenAICompatibleEmbedder(
GeminiEmbedder.GEMINI_BASE_URL,
apiKey,
this.modelId,
GEMINI_MAX_ITEM_TOKENS,
)
}
/**
* Creates embeddings for the given texts using Gemini's embedding API
* @param texts Array of text strings to embed
* @param model Optional model identifier (uses constructor model if not provided)
* @returns Promise resolving to embedding response
*/
async createEmbeddings(texts: string[], model?: string): Promise<EmbeddingResponse> {
try {
// Use the provided model or fall back to the instance's model
const modelToUse = model || this.modelId
return await this.openAICompatibleEmbedder.createEmbeddings(texts, modelToUse)
} catch (error) {
// TelemetryService calls removed as per requirements
throw error
}
}
/**
* Validates the Gemini embedder configuration by delegating to the underlying OpenAI-compatible embedder
* @returns Promise resolving to validation result with success status and optional error message
*/
async validateConfiguration(): Promise<{ valid: boolean; error?: string }> {
try {
// Delegate validation to the OpenAI-compatible embedder
// The error messages will be specific to Gemini since we're using Gemini's base URL
return await this.openAICompatibleEmbedder.validateConfiguration()
} catch (error) {
// TelemetryService calls removed as per requirements
throw error
}
}
/**
* Returns information about this embedder
*/
get embedderInfo(): EmbedderInfo {
return {
name: "gemini",
}
}
/**
* Gets the optimal batch size for this Gemini embedder
*/
get optimalBatchSize(): number {
// Return recommended batch size for Gemini
return 40
}
}