Skip to main content
Glama
by sinch
anthropic.int.test.ts1.66 kB
import Anthropic from '@anthropic-ai/sdk'; import { listTools } from '../list-tools'; import { toolTestCases } from '../toolsTestCases'; import { MAX_TOKENS, TEMPERATURE, TIMEOUT } from '../configuration'; const transformToAnthropicFormat = (tools: any[]): Anthropic.Messages.Tool[] => { return tools.map((t) => ({ name: t.name, description: t.description, input_schema: t.inputSchema, })); } const targetModel = process.env.TARGET_MODEL || 'claude-3-7-sonnet-latest'; describe(`Tool invocation tests - Anthropic - ${targetModel}`, () => { let tools: Anthropic.Messages.Tool[]; let anthropic: Anthropic; beforeAll(async () => { tools = transformToAnthropicFormat(await listTools()); anthropic = new Anthropic({ apiKey: process.env.ANTHROPIC_API_KEY, }); }); it.each(toolTestCases)( 'should handle prompt "%s"', async ({ prompt, expectedToolName, expectedArguments }) => { const response = await anthropic.messages.create({ model: targetModel, max_tokens: MAX_TOKENS, messages: [ {role: 'user', content: prompt} ], tools, temperature: TEMPERATURE, tool_choice: { type: 'auto', }, }); const toolCall = response.content.find( (obj) => obj.type === "tool_use" ); if (!expectedToolName) { expect(toolCall).toBeUndefined(); return; } expect(toolCall).toBeDefined(); expect(toolCall!.name).toEqual(expectedToolName); if (expectedArguments) { expect(toolCall!.input).toEqual(expectedArguments); } }, TIMEOUT ); });

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/sinch/sinch-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server