// LLM Adapter (T047)
// Stub implementation - returns null if no API key available
// Interface: generateSummary(chunks, question)
import { ValidationError } from '../utils/errors.mjs';
export function createLLMAdapter({ apiKey, logger, endpoint } = {}) {
logger = logger || { log: () => {} };
// If no API key, return null adapter (offline mode)
if (!apiKey) {
logger.log('llm.disabled', { reason: 'no_api_key' });
return null;
}
async function generateSummary(chunks, question) {
if (!Array.isArray(chunks) || !question) {
throw new ValidationError('chunks array and question required');
}
if (chunks.length === 0) {
return {
summary: 'No relevant documentation found for this query.',
confidence: 0,
model: 'none'
};
}
try {
// Stub implementation - in real version this would call OpenAI/Anthropic/etc
// For now, create a simple template-based response
const titles = chunks.map(c => c.title || 'Unknown').filter(t => t !== 'Unknown');
const uniqueTitles = [...new Set(titles)];
const summary = `Based on the retrieved documentation (${uniqueTitles.join(', ')}), ` +
`here's what I found regarding "${question}": ` +
`The relevant procedures and guidelines have been identified from ${chunks.length} source(s). ` +
`Please refer to the citations below for detailed steps and specific instructions. ` +
`Note: This summary is generated from the provided documentation only and does not include external information.`;
logger.log('llm.generate', {
chunkCount: chunks.length,
summaryLength: summary.length,
question: '[REDACTED]'
});
return {
summary,
confidence: 0.7, // Mock confidence
model: 'stub-llm-v1'
};
} catch (error) {
logger.log('llm.error', { error: error.message });
throw error;
}
}
return Object.freeze({
generateSummary,
isAvailable: () => true
});
}
// Factory function that checks environment
export function createLLMAdapterFromEnv(env = process.env, logger) {
const apiKey = env.LLM_API_KEY || env.OPENAI_API_KEY || env.ANTHROPIC_API_KEY;
const endpoint = env.LLM_ENDPOINT;
return createLLMAdapter({ apiKey, logger, endpoint });
}
export default { createLLMAdapter, createLLMAdapterFromEnv };