Skip to main content
Glama
customQuery.ts975 B
import { generateText } from 'ai'; import type { AIConfig, AIOptions, Messages } from './aiSdk'; export type CustomQueryOptions = { messages: Messages; aiConfig: AIConfig; }; export type CustomQueryResultData = { fileContent: string; tokenUsed: number; }; export const aiDefaultOptions: AIOptions = { model: 'gpt-4o-mini', // Keep default options }; /** * CustomQuery a content declaration file by constructing a prompt for AI models. * The prompt includes details about the project's locales, file paths of content declarations, * and requests for identifying issues or inconsistencies. */ export const customQuery = async ({ messages, aiConfig, }: CustomQueryOptions): Promise<CustomQueryResultData | undefined> => { // Use the AI SDK to generate the completion const { text: newContent, usage } = await generateText({ ...aiConfig, messages, }); return { fileContent: newContent, tokenUsed: usage?.totalTokens ?? 0, }; };

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/aymericzip/intlayer'

If you have feedback or need assistance with the MCP directory API, please join our Discord server