Skip to main content
Glama
index.ts2.98 kB
import { readAsset } from 'utils:asset'; import type { AIConfig, AIOptions } from '@intlayer/ai'; import { generateText } from '@intlayer/ai'; import { getLocaleName } from '@intlayer/core'; import type { KeyPath } from '@intlayer/types'; import { type Locale, Locales } from '@intlayer/types'; import { logger } from '@logger'; import type { Tag } from '@/types/tag.types'; export type AuditDictionaryFieldOptions = { fileContent: string; locales: Locale[]; keyPath: KeyPath[]; tags: Tag[]; aiConfig: AIConfig; applicationContext?: string; }; export type AuditDictionaryFieldResultData = { fileContent: string; tokenUsed: number; }; // The prompt template to send to the AI model const CHAT_GPT_PROMPT = readAsset('./PROMPT.md'); export const aiDefaultOptions: AIOptions = { // Keep default options }; /** * Format a locale with its name. * * @param locale - The locale to format. * @returns A string in the format "locale: name", e.g. "en: English". */ const formatLocaleWithName = (locale: Locale): string => { return `${locale}: ${getLocaleName(locale, Locales.ENGLISH)}`; }; /** * Formats tag instructions for the AI prompt. * * @param tags - Array of tags to format * @returns A formatted string with tag instructions */ const formatTagInstructions = (tags: Tag[]): string => { if (!tags || tags.length === 0) { return ''; } return `Based on the dictionary content, identify specific tags from the list below that would be relevant: ${tags.map(({ key, description }) => `- ${key}: ${description}`).join('\n\n')}`; }; /** * Audits a content declaration file by constructing a prompt for AI models. * The prompt includes details about the project's locales, file paths of content declarations, * and requests for identifying issues or inconsistencies. */ export const auditDictionaryField = async ({ fileContent, applicationContext, locales, keyPath, tags, aiConfig, }: AuditDictionaryFieldOptions): Promise< AuditDictionaryFieldResultData | undefined > => { // Prepare the prompt for AI by replacing placeholders with actual values. const prompt = CHAT_GPT_PROMPT.replace( '{{otherLocales}}', `{${locales.map(formatLocaleWithName).join(', ')}}` ) .replace('{{keyPath}}', JSON.stringify(keyPath)) .replace('{{applicationContext}}', applicationContext ?? '') .replace('{{tagsInstructions}}', formatTagInstructions(tags)); if (!aiConfig) { logger.error('Failed to configure AI model'); return undefined; } // Use the AI SDK to generate the completion const { text: newContent, usage } = await generateText({ ...aiConfig, messages: [ { role: 'system', content: prompt }, { role: 'user', content: ['**File to Audit:**', fileContent].join('\n'), }, ], }); logger.info(`${usage?.totalTokens ?? 0} tokens used in the request`); return { fileContent: newContent, tokenUsed: usage?.totalTokens ?? 0, }; };

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/aymericzip/intlayer'

If you have feedback or need assistance with the MCP directory API, please join our Discord server