import Anthropic from '@anthropic-ai/sdk';
import { logger } from '@/utils/logger.js';
import type { LLMProvider, LLMConfig } from './types.js';
export class AnthropicProvider implements LLMProvider {
name = 'anthropic';
private client: Anthropic;
private config: LLMConfig;
constructor(config: LLMConfig) {
this.config = config;
if (!config.apiKey) {
throw new Error('Anthropic API key is required');
}
this.client = new Anthropic({
apiKey: config.apiKey,
});
}
async analyze(content: string, prompt: string): Promise<string> {
try {
logger.debug(`Sending request to Claude ${this.config.model}`);
const message = await this.client.messages.create({
model: this.config.model,
max_tokens: this.config.maxTokens,
temperature: this.config.temperature,
messages: [
{
role: 'user',
content: `${prompt}\n\nContent to analyze:\n${content}`,
},
],
});
const response = message.content[0];
if (response.type === 'text') {
logger.debug(`Received response from Claude: ${response.text.length} characters`);
return response.text;
}
throw new Error('Unexpected response format from Claude');
} catch (error) {
logger.error('Error calling Anthropic API:', error);
throw error;
}
}
isAvailable(): boolean {
return Boolean(this.config.apiKey && this.config.enabled);
}
}
export class LLMClient {
private provider: LLMProvider | null = null;
private config: LLMConfig;
constructor(config: LLMConfig) {
this.config = config;
this.initializeProvider();
}
private initializeProvider(): void {
try {
if (!this.config.enabled) {
logger.info('LLM integration disabled in configuration');
return;
}
switch (this.config.provider) {
case 'anthropic':
this.provider = new AnthropicProvider(this.config);
break;
default:
throw new Error(`Unsupported LLM provider: ${this.config.provider}`);
}
if (this.provider?.isAvailable()) {
logger.info(`Initialized LLM provider: ${this.provider.name}`);
} else {
logger.warn('LLM provider not available, will skip LLM analysis');
this.provider = null;
}
} catch (error) {
logger.error('Failed to initialize LLM provider:', error);
this.provider = null;
}
}
async analyze(content: string, prompt: string): Promise<string | null> {
if (!this.provider) {
logger.debug('LLM provider not available, skipping analysis');
return null;
}
try {
return await this.provider.analyze(content, prompt);
} catch (error) {
logger.error('LLM analysis failed:', error);
return null;
}
}
isAvailable(): boolean {
return this.provider?.isAvailable() ?? false;
}
getProviderName(): string | null {
return this.provider?.name ?? null;
}
}
export function createLLMClient(apiKey?: string): LLMClient {
const config: LLMConfig = {
provider: 'anthropic',
model: 'claude-3-haiku-20240307', // default to haiku for cost efficiency
apiKey: apiKey || process.env.ANTHROPIC_API_KEY,
maxTokens: 2048,
temperature: 0.1, // low temperature for consistent analysis
enabled: Boolean(apiKey || process.env.ANTHROPIC_API_KEY),
};
return new LLMClient(config);
}