import { describe, it, expect, mock } from 'bun:test';
import { AnthropicProvider, LLMClient, createLLMClient } from '@/llm/client.js';
// Mock Anthropic SDK
const mockAnthropicMessage = {
content: [{ type: 'text', text: 'Mock response from Claude' }],
};
const mockAnthropicClient = {
messages: {
create: mock().mockResolvedValue(mockAnthropicMessage),
},
};
// Mock the Anthropic constructor
mock.module('@anthropic-ai/sdk', () => ({
default: mock().mockImplementation(() => mockAnthropicClient),
}));
describe('llm providers', () => {
describe('anthropic provider', () => {
it('should create provider with valid config', () => {
const config = {
provider: 'anthropic' as const,
model: 'claude-3-haiku-20240307' as const,
apiKey: 'test-key',
maxTokens: 1000,
temperature: 0.1,
enabled: true,
};
const provider = new AnthropicProvider(config);
expect(provider.name).toBe('anthropic');
expect(provider.isAvailable()).toBe(true);
});
it('should throw error without api key', () => {
const config = {
provider: 'anthropic' as const,
model: 'claude-3-haiku-20240307' as const,
apiKey: undefined,
maxTokens: 1000,
temperature: 0.1,
enabled: true,
};
expect(() => new AnthropicProvider(config)).toThrow('Anthropic API key is required');
});
it('should analyze content successfully', async () => {
const config = {
provider: 'anthropic' as const,
model: 'claude-3-haiku-20240307' as const,
apiKey: 'test-key',
maxTokens: 1000,
temperature: 0.1,
enabled: true,
};
const provider = new AnthropicProvider(config);
const result = await provider.analyze('test content', 'test prompt');
expect(result).toBe('Mock response from Claude');
expect(mockAnthropicClient.messages.create).toHaveBeenCalledWith({
model: 'claude-3-haiku-20240307',
max_tokens: 1000,
temperature: 0.1,
messages: [
{
role: 'user',
content: 'test prompt\n\nContent to analyze:\ntest content',
},
],
});
});
it('should handle api errors', async () => {
const errorClient = {
messages: {
create: mock().mockRejectedValue(new Error('API Error')),
},
};
// Mock a new instance that returns the error client
const MockAnthropic = mock().mockImplementation(() => errorClient);
mock.module('@anthropic-ai/sdk', () => ({ default: MockAnthropic }));
const config = {
provider: 'anthropic' as const,
model: 'claude-3-haiku-20240307' as const,
apiKey: 'test-key',
maxTokens: 1000,
temperature: 0.1,
enabled: true,
};
const provider = new AnthropicProvider(config);
await expect(provider.analyze('test', 'prompt')).rejects.toThrow('API Error');
});
});
describe('llm client', () => {
it('should create client with anthropic provider', () => {
const config = {
provider: 'anthropic' as const,
model: 'claude-3-haiku-20240307' as const,
apiKey: 'test-key',
maxTokens: 1000,
temperature: 0.1,
enabled: true,
};
const client = new LLMClient(config);
expect(client.isAvailable()).toBe(true);
expect(client.getProviderName()).toBe('anthropic');
});
it('should handle disabled client', () => {
const config = {
provider: 'anthropic' as const,
model: 'claude-3-haiku-20240307' as const,
apiKey: 'test-key',
maxTokens: 1000,
temperature: 0.1,
enabled: false,
};
const client = new LLMClient(config);
expect(client.isAvailable()).toBe(false);
});
it('should return null when provider unavailable', async () => {
const config = {
provider: 'anthropic' as const,
model: 'claude-3-haiku-20240307' as const,
apiKey: undefined,
maxTokens: 1000,
temperature: 0.1,
enabled: true,
};
const client = new LLMClient(config);
const result = await client.analyze('test', 'prompt');
expect(result).toBeNull();
});
});
describe('client factory', () => {
it('should create client with api key', () => {
const client = createLLMClient('test-api-key');
expect(client.isAvailable()).toBe(true);
});
it('should create client with env var', () => {
process.env.ANTHROPIC_API_KEY = 'env-api-key';
const client = createLLMClient();
expect(client.isAvailable()).toBe(true);
delete process.env.ANTHROPIC_API_KEY;
});
it('should create disabled client without api key', () => {
delete process.env.ANTHROPIC_API_KEY;
const client = createLLMClient();
expect(client.isAvailable()).toBe(false);
});
});
});