/**
* OpenAI Service
* Centralizes access to OpenAI models used by the FitSlot MCP server
*/
import OpenAI from 'openai';
import { logger } from '../utils/logger.js';
import { BioimpedanceData, SupportFAQ } from '../types/index.js';
export interface OpenAIServiceConfig {
apiKey?: string;
model?: string;
organization?: string;
project?: string;
temperature?: number;
maxTokens?: number;
}
export interface BioimpedanceInsightFallback {
analysis: string;
recommendations: string[];
}
export class OpenAIService {
private readonly client?: OpenAI;
private readonly model: string;
private readonly temperature: number;
private readonly maxTokens: number;
constructor(config: OpenAIServiceConfig) {
this.model = config.model ?? 'gpt-4o-mini';
this.temperature = config.temperature ?? 0.2;
this.maxTokens = config.maxTokens ?? 400;
if (config.apiKey) {
this.client = new OpenAI({
apiKey: config.apiKey,
organization: config.organization,
project: config.project
});
logger.info('OpenAI service configured', {
model: this.model,
temperature: this.temperature,
hasOrganization: !!config.organization,
hasProject: !!config.project
});
} else {
logger.warn('OpenAI API key not provided. Falling back to local responses.');
}
}
/**
* Indicates whether the OpenAI client is configured
*/
isConfigured(): boolean {
return !!this.client;
}
/**
* Generate a natural language support answer enriched by OpenAI
*/
async generateSupportAnswer(params: {
query: string;
relatedFAQs: SupportFAQ[];
suggestedActions: string[];
}): Promise<string | undefined> {
if (!this.client) {
return undefined;
}
try {
const faqsSummary = params.relatedFAQs
.map((faq, index) => `${index + 1}. Q: ${faq.question}\n A: ${faq.answer}`)
.join('\n');
const completion = await this.client.chat.completions.create({
model: this.model,
temperature: this.temperature,
max_tokens: Math.min(this.maxTokens, 320),
messages: [
{
role: 'system',
content: 'Você é um assistente de suporte do FitSlot. Responda em português brasileiro com objetividade e empatia. '
+ 'Sugira próximos passos práticos usando no máximo 2 frases curtas.'
},
{
role: 'user',
content: [
`Pergunta do usuário: ${params.query}`,
params.relatedFAQs.length > 0
? `FAQs relevantes:\n${faqsSummary}`
: 'Nenhuma FAQ relevante encontrada.',
params.suggestedActions.length > 0
? `Ações sugeridas no app: ${params.suggestedActions.join(', ')}`
: 'Sem ações sugeridas.'
].join('\n\n')
}
]
});
const aiMessage = completion.choices[0]?.message?.content?.trim();
return aiMessage || undefined;
} catch (error) {
logger.error('Failed to generate support answer with OpenAI', error);
return undefined;
}
}
/**
* Generate analysis and recommendations for bioimpedance data using OpenAI
*/
async generateBioimpedanceInsights(
data: Omit<BioimpedanceData, 'analysis' | 'recommendations'>,
fallback: BioimpedanceInsightFallback
): Promise<BioimpedanceInsightFallback> {
if (!this.client) {
return fallback;
}
try {
const completion = await this.client.chat.completions.create({
model: this.model,
temperature: this.temperature,
max_tokens: this.maxTokens,
response_format: { type: 'json_object' },
messages: [
{
role: 'system',
content: 'Você é um especialista em nutrição e composição corporal. Gere uma análise personalizada em português '
+ 'com base nos dados de bioimpedância e proponha até 5 recomendações práticas e seguras. '
+ 'Responda em JSON com as chaves "analysis" (string) e "recommendations" (array de strings).'
+ 'Mantenha recomendações alinhadas às diretrizes da OMS e evite orientações médicas específicas.'
},
{
role: 'user',
content: JSON.stringify({
dados: data,
analise_base: fallback.analysis,
recomendacoes_base: fallback.recommendations
})
}
]
});
const content = completion.choices[0]?.message?.content;
if (!content) {
return fallback;
}
const parsed = JSON.parse(content) as BioimpedanceInsightFallback;
if (!parsed.analysis || !Array.isArray(parsed.recommendations)) {
return fallback;
}
return {
analysis: parsed.analysis,
recommendations: parsed.recommendations
};
} catch (error) {
logger.error('Failed to generate bioimpedance insights with OpenAI', error);
return fallback;
}
}
}