Skip to main content
Glama

Sentry MCP

Official
by getsentry
agent.ts1.48 kB
import { z } from "zod"; import type { Tool } from "ai"; import { ConfigurationError } from "../../errors"; import { callEmbeddedAgent, type ToolCall, } from "../../internal/agents/callEmbeddedAgent"; import { systemPrompt } from "./config"; /** * Output schema for the embedded agent. * The agent returns the final result string directly from its tool calls. */ const outputSchema = z.object({ result: z .string() .describe( "The final result from your tool calls that answers the user's request", ), }); export interface UseSentryAgentOptions { request: string; tools: Record<string, Tool>; } /** * use_sentry agent - executes natural language requests using Sentry MCP tools * This returns the final result AND the tool calls made by the agent */ export async function useSentryAgent(options: UseSentryAgentOptions): Promise<{ result: z.infer<typeof outputSchema>; toolCalls: ToolCall[]; }> { if (!process.env.OPENAI_API_KEY) { throw new ConfigurationError( "OPENAI_API_KEY environment variable is required for use_sentry tool", ); } // Frame the request to make clear we're asking the agent to use tools // Don't just pass the raw request as it might trigger safety responses const prompt = options.request; // Use callEmbeddedAgent with all pre-wrapped MCP tools return await callEmbeddedAgent({ system: systemPrompt, prompt: prompt, tools: options.tools, schema: outputSchema, }); }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/getsentry/sentry-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server