Skip to main content
Glama
by sinch
openai.int.test.ts1.7 kB
import OpenAI from "openai"; import { Tool } from 'openai/resources/responses/responses'; import { toolTestCases } from '../toolsTestCases'; import { listTools } from '../list-tools'; import { MAX_TOKENS, TEMPERATURE, TIMEOUT } from '../configuration'; const transformToOpenAIFormat = (tools: any[]): Tool[] => { return tools.map((t) => ({ type: 'function', name: t.name, description: t.description, parameters: JSON.stringify(t.inputSchema) === JSON.stringify({ type: "object" }) ? null : t.inputSchema, strict: false, })); } const targetModel = process.env.TARGET_MODEL || 'gpt-4o-mini'; describe(`Tool invocation tests - Open AI - ${targetModel}`, () => { let tools: Tool[]; let openai: OpenAI; beforeAll(async () => { tools = transformToOpenAIFormat(await listTools()); openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY, }); }); it.each(toolTestCases)( 'should handle prompt "%s"', async ({ prompt, expectedToolName, expectedArguments }) => { const response = await openai.responses.create({ model: targetModel, temperature: TEMPERATURE, max_output_tokens: MAX_TOKENS, input: prompt, tools, tool_choice: 'auto', }); const toolCall = response.output.find( (obj) => obj.type === "function_call" ); if (!expectedToolName) { expect(toolCall).toBeUndefined(); return; } expect(toolCall).toBeDefined(); expect(toolCall!.name).toEqual(expectedToolName); if (expectedArguments) { expect(toolCall!.arguments).toEqual(JSON.stringify(expectedArguments)); } }, TIMEOUT ); });

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/sinch/sinch-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server