Skip to main content
Glama

AutoDev Codebase MCP Server

by anrgct
service-factory.spec.ts15.4 kB
import { vitest, describe, it, expect, beforeEach } from "vitest" import type { MockedClass, MockedFunction } from "vitest" import { CodeIndexServiceFactory } from "../service-factory" import { CodeIndexConfigManager } from "../config-manager" import { CacheManager } from "../cache-manager" import { OpenAiEmbedder } from "../embedders/openai" import { CodeIndexOllamaEmbedder } from "../embedders/ollama" import { OpenAICompatibleEmbedder } from "../embedders/openai-compatible" import { QdrantVectorStore } from "../vector-store/qdrant-client" // Mock the embedders and vector store vitest.mock("../embedders/openai") vitest.mock("../embedders/ollama") vitest.mock("../embedders/openai-compatible") vitest.mock("../vector-store/qdrant-client") // Mock the embedding models module vitest.mock("../../../shared/embeddingModels", () => ({ getDefaultModelId: vitest.fn(), getModelDimension: vitest.fn(), })) const MockedOpenAiEmbedder = OpenAiEmbedder as MockedClass<typeof OpenAiEmbedder> const MockedCodeIndexOllamaEmbedder = CodeIndexOllamaEmbedder as MockedClass<typeof CodeIndexOllamaEmbedder> const MockedOpenAICompatibleEmbedder = OpenAICompatibleEmbedder as MockedClass<typeof OpenAICompatibleEmbedder> const MockedQdrantVectorStore = QdrantVectorStore as MockedClass<typeof QdrantVectorStore> // Import the mocked functions import { getDefaultModelId, getModelDimension } from "../../../shared/embeddingModels" const mockGetDefaultModelId = getDefaultModelId as MockedFunction<typeof getDefaultModelId> const mockGetModelDimension = getModelDimension as MockedFunction<typeof getModelDimension> describe("CodeIndexServiceFactory", () => { let factory: CodeIndexServiceFactory let mockConfigManager: any let mockCacheManager: any beforeEach(() => { vitest.clearAllMocks() mockConfigManager = { getConfig: vitest.fn(), } mockCacheManager = {} factory = new CodeIndexServiceFactory(mockConfigManager, "/test/workspace", mockCacheManager) }) describe("createEmbedder", () => { it("should pass model ID to OpenAI embedder when using OpenAI provider", () => { // Arrange const testModelId = "text-embedding-3-large" const testConfig = { embedderProvider: "openai", modelId: testModelId, openAiOptions: { openAiNativeApiKey: "test-api-key", }, } mockConfigManager.getConfig.mockReturnValue(testConfig as any) // Act factory.createEmbedder() // Assert expect(MockedOpenAiEmbedder).toHaveBeenCalledWith({ openAiNativeApiKey: "test-api-key", openAiEmbeddingModelId: testModelId, }) }) it("should pass model ID to Ollama embedder when using Ollama provider", () => { // Arrange const testModelId = "nomic-embed-text:latest" const testConfig = { embedderProvider: "ollama", modelId: testModelId, ollamaOptions: { ollamaBaseUrl: "http://localhost:11434", }, } mockConfigManager.getConfig.mockReturnValue(testConfig as any) // Act factory.createEmbedder() // Assert expect(MockedCodeIndexOllamaEmbedder).toHaveBeenCalledWith({ ollamaBaseUrl: "http://localhost:11434", ollamaModelId: testModelId, }) }) it("should handle undefined model ID for OpenAI embedder", () => { // Arrange const testConfig = { embedderProvider: "openai", modelId: undefined, openAiOptions: { openAiNativeApiKey: "test-api-key", }, } mockConfigManager.getConfig.mockReturnValue(testConfig as any) // Act factory.createEmbedder() // Assert expect(MockedOpenAiEmbedder).toHaveBeenCalledWith({ openAiNativeApiKey: "test-api-key", openAiEmbeddingModelId: undefined, }) }) it("should handle undefined model ID for Ollama embedder", () => { // Arrange const testConfig = { embedderProvider: "ollama", modelId: undefined, ollamaOptions: { ollamaBaseUrl: "http://localhost:11434", }, } mockConfigManager.getConfig.mockReturnValue(testConfig as any) // Act factory.createEmbedder() // Assert expect(MockedCodeIndexOllamaEmbedder).toHaveBeenCalledWith({ ollamaBaseUrl: "http://localhost:11434", ollamaModelId: undefined, }) }) it("should throw error when OpenAI API key is missing", () => { // Arrange const testConfig = { embedderProvider: "openai", modelId: "text-embedding-3-large", openAiOptions: { openAiNativeApiKey: undefined, }, } mockConfigManager.getConfig.mockReturnValue(testConfig as any) // Act & Assert expect(() => factory.createEmbedder()).toThrow("OpenAI configuration missing for embedder creation") }) it("should throw error when Ollama base URL is missing", () => { // Arrange const testConfig = { embedderProvider: "ollama", modelId: "nomic-embed-text:latest", ollamaOptions: { ollamaBaseUrl: undefined, }, } mockConfigManager.getConfig.mockReturnValue(testConfig as any) // Act & Assert expect(() => factory.createEmbedder()).toThrow("Ollama configuration missing for embedder creation") }) it("should pass model ID to OpenAI Compatible embedder when using OpenAI Compatible provider", () => { // Arrange const testModelId = "text-embedding-3-large" const testConfig = { embedderProvider: "openai-compatible", modelId: testModelId, openAiCompatibleOptions: { baseUrl: "https://api.example.com/v1", apiKey: "test-api-key", }, } mockConfigManager.getConfig.mockReturnValue(testConfig as any) // Act factory.createEmbedder() // Assert expect(MockedOpenAICompatibleEmbedder).toHaveBeenCalledWith( "https://api.example.com/v1", "test-api-key", testModelId, ) }) it("should handle undefined model ID for OpenAI Compatible embedder", () => { // Arrange const testConfig = { embedderProvider: "openai-compatible", modelId: undefined, openAiCompatibleOptions: { baseUrl: "https://api.example.com/v1", apiKey: "test-api-key", }, } mockConfigManager.getConfig.mockReturnValue(testConfig as any) // Act factory.createEmbedder() // Assert expect(MockedOpenAICompatibleEmbedder).toHaveBeenCalledWith( "https://api.example.com/v1", "test-api-key", undefined, ) }) it("should throw error when OpenAI Compatible base URL is missing", () => { // Arrange const testConfig = { embedderProvider: "openai-compatible", modelId: "text-embedding-3-large", openAiCompatibleOptions: { baseUrl: undefined, apiKey: "test-api-key", }, } mockConfigManager.getConfig.mockReturnValue(testConfig as any) // Act & Assert expect(() => factory.createEmbedder()).toThrow( "OpenAI Compatible configuration missing for embedder creation", ) }) it("should throw error when OpenAI Compatible API key is missing", () => { // Arrange const testConfig = { embedderProvider: "openai-compatible", modelId: "text-embedding-3-large", openAiCompatibleOptions: { baseUrl: "https://api.example.com/v1", apiKey: undefined, }, } mockConfigManager.getConfig.mockReturnValue(testConfig as any) // Act & Assert expect(() => factory.createEmbedder()).toThrow( "OpenAI Compatible configuration missing for embedder creation", ) }) it("should throw error when OpenAI Compatible options are missing", () => { // Arrange const testConfig = { embedderProvider: "openai-compatible", modelId: "text-embedding-3-large", openAiCompatibleOptions: undefined, } mockConfigManager.getConfig.mockReturnValue(testConfig as any) // Act & Assert expect(() => factory.createEmbedder()).toThrow( "OpenAI Compatible configuration missing for embedder creation", ) }) it("should throw error for invalid embedder provider", () => { // Arrange const testConfig = { embedderProvider: "invalid-provider", modelId: "some-model", } mockConfigManager.getConfig.mockReturnValue(testConfig as any) // Act & Assert expect(() => factory.createEmbedder()).toThrow("Invalid embedder type configured: invalid-provider") }) }) describe("createVectorStore", () => { beforeEach(() => { vitest.clearAllMocks() mockGetDefaultModelId.mockReturnValue("default-model") }) it("should use config.modelId for OpenAI provider", () => { // Arrange const testModelId = "text-embedding-3-large" const testConfig = { embedderProvider: "openai", modelId: testModelId, qdrantUrl: "http://localhost:6333", qdrantApiKey: "test-key", } mockConfigManager.getConfig.mockReturnValue(testConfig as any) mockGetModelDimension.mockReturnValue(3072) // Act factory.createVectorStore() // Assert expect(mockGetModelDimension).toHaveBeenCalledWith("openai", testModelId) expect(MockedQdrantVectorStore).toHaveBeenCalledWith( "/test/workspace", "http://localhost:6333", 3072, "test-key", ) }) it("should use config.modelId for Ollama provider", () => { // Arrange const testModelId = "nomic-embed-text:latest" const testConfig = { embedderProvider: "ollama", modelId: testModelId, qdrantUrl: "http://localhost:6333", qdrantApiKey: "test-key", } mockConfigManager.getConfig.mockReturnValue(testConfig as any) mockGetModelDimension.mockReturnValue(768) // Act factory.createVectorStore() // Assert expect(mockGetModelDimension).toHaveBeenCalledWith("ollama", testModelId) expect(MockedQdrantVectorStore).toHaveBeenCalledWith( "/test/workspace", "http://localhost:6333", 768, "test-key", ) }) it("should use config.modelId for OpenAI Compatible provider", () => { // Arrange const testModelId = "text-embedding-3-large" const testConfig = { embedderProvider: "openai-compatible", modelId: testModelId, qdrantUrl: "http://localhost:6333", qdrantApiKey: "test-key", } mockConfigManager.getConfig.mockReturnValue(testConfig as any) mockGetModelDimension.mockReturnValue(3072) // Act factory.createVectorStore() // Assert expect(mockGetModelDimension).toHaveBeenCalledWith("openai-compatible", testModelId) expect(MockedQdrantVectorStore).toHaveBeenCalledWith( "/test/workspace", "http://localhost:6333", 3072, "test-key", ) }) it("should prioritize manual modelDimension over getModelDimension for OpenAI Compatible provider", () => { // Arrange const testModelId = "custom-model" const manualDimension = 1024 const testConfig = { embedderProvider: "openai-compatible", modelId: testModelId, openAiCompatibleOptions: { modelDimension: manualDimension, }, qdrantUrl: "http://localhost:6333", qdrantApiKey: "test-key", } mockConfigManager.getConfig.mockReturnValue(testConfig as any) mockGetModelDimension.mockReturnValue(768) // This should be ignored // Act factory.createVectorStore() // Assert expect(mockGetModelDimension).not.toHaveBeenCalled() expect(MockedQdrantVectorStore).toHaveBeenCalledWith( "/test/workspace", "http://localhost:6333", manualDimension, "test-key", ) }) it("should fall back to getModelDimension when manual modelDimension is not set for OpenAI Compatible", () => { // Arrange const testModelId = "custom-model" const testConfig = { embedderProvider: "openai-compatible", modelId: testModelId, openAiCompatibleOptions: { baseUrl: "https://api.example.com/v1", apiKey: "test-key", }, qdrantUrl: "http://localhost:6333", qdrantApiKey: "test-key", } mockConfigManager.getConfig.mockReturnValue(testConfig as any) mockGetModelDimension.mockReturnValue(768) // Act factory.createVectorStore() // Assert expect(mockGetModelDimension).toHaveBeenCalledWith("openai-compatible", testModelId) expect(MockedQdrantVectorStore).toHaveBeenCalledWith( "/test/workspace", "http://localhost:6333", 768, "test-key", ) }) it("should throw error when manual modelDimension is invalid for OpenAI Compatible", () => { // Arrange const testModelId = "custom-model" const testConfig = { embedderProvider: "openai-compatible", modelId: testModelId, openAiCompatibleOptions: { modelDimension: 0, // Invalid dimension }, qdrantUrl: "http://localhost:6333", qdrantApiKey: "test-key", } mockConfigManager.getConfig.mockReturnValue(testConfig as any) mockGetModelDimension.mockReturnValue(undefined) // Act & Assert expect(() => factory.createVectorStore()).toThrow( "Could not determine vector dimension for model 'custom-model' with provider 'openai-compatible'. Please ensure the 'Embedding Dimension' is correctly set in the OpenAI-Compatible provider settings.", ) }) it("should throw error when both manual dimension and getModelDimension fail for OpenAI Compatible", () => { // Arrange const testModelId = "unknown-model" const testConfig = { embedderProvider: "openai-compatible", modelId: testModelId, openAiCompatibleOptions: { baseUrl: "https://api.example.com/v1", apiKey: "test-key", }, qdrantUrl: "http://localhost:6333", qdrantApiKey: "test-key", } mockConfigManager.getConfig.mockReturnValue(testConfig as any) mockGetModelDimension.mockReturnValue(undefined) // Act & Assert expect(() => factory.createVectorStore()).toThrow( "Could not determine vector dimension for model 'unknown-model' with provider 'openai-compatible'. Please ensure the 'Embedding Dimension' is correctly set in the OpenAI-Compatible provider settings.", ) }) it("should use default model when config.modelId is undefined", () => { // Arrange const testConfig = { embedderProvider: "openai", modelId: undefined, qdrantUrl: "http://localhost:6333", qdrantApiKey: "test-key", } mockConfigManager.getConfig.mockReturnValue(testConfig as any) mockGetModelDimension.mockReturnValue(1536) // Act factory.createVectorStore() // Assert expect(mockGetModelDimension).toHaveBeenCalledWith("openai", "default-model") expect(MockedQdrantVectorStore).toHaveBeenCalledWith( "/test/workspace", "http://localhost:6333", 1536, "test-key", ) }) it("should throw error when vector dimension cannot be determined", () => { // Arrange const testConfig = { embedderProvider: "openai", modelId: "unknown-model", qdrantUrl: "http://localhost:6333", qdrantApiKey: "test-key", } mockConfigManager.getConfig.mockReturnValue(testConfig as any) mockGetModelDimension.mockReturnValue(undefined) // Act & Assert expect(() => factory.createVectorStore()).toThrow( "Could not determine vector dimension for model 'unknown-model' with provider 'openai'. Check model profiles or configuration.", ) }) it("should throw error when Qdrant URL is missing", () => { // Arrange const testConfig = { embedderProvider: "openai", modelId: "text-embedding-3-small", qdrantUrl: undefined, qdrantApiKey: "test-key", } mockConfigManager.getConfig.mockReturnValue(testConfig as any) mockGetModelDimension.mockReturnValue(1536) // Act & Assert expect(() => factory.createVectorStore()).toThrow("Qdrant URL missing for vector store creation") }) }) })

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/anrgct/autodev-codebase'

If you have feedback or need assistance with the MCP directory API, please join our Discord server