Skip to main content
Glama

@arizeai/phoenix-mcp

Official
by Arize-ai
instrumentation.ts2.04 kB
import { diag, DiagConsoleLogger, DiagLogLevel } from "@opentelemetry/api"; import { OTLPTraceExporter } from "@opentelemetry/exporter-trace-otlp-proto"; import { resourceFromAttributes } from "@opentelemetry/resources"; import { NodeTracerProvider, SpanProcessor, } from "@opentelemetry/sdk-trace-node"; import { SEMRESATTRS_PROJECT_NAME } from "@arizeai/openinference-semantic-conventions"; import { HeadersOptions } from "openapi-fetch"; import { OpenInferenceBatchSpanProcessor, OpenInferenceSimpleSpanProcessor, } from "@arizeai/openinference-vercel"; /** * Creates a provider that exports traces to Phoenix. */ export function createProvider({ projectName, baseUrl, headers, useBatchSpanProcessor = true, diagLogLevel, }: { projectName: string; headers: HeadersOptions; /** * Whether to use batching for the span processor. * @default true */ useBatchSpanProcessor: boolean; /** * The base URL of the Phoenix. Doesn't include the /v1/traces path. */ baseUrl: string; /** * The diag log level to set for the built in DiagConsoleLogger instance. * Omit to disable built in logging. */ diagLogLevel?: DiagLogLevel; }) { if (diagLogLevel) { diag.setLogger(new DiagConsoleLogger(), diagLogLevel); } const exporter = new OTLPTraceExporter({ url: `${baseUrl}/v1/traces`, headers: Array.isArray(headers) ? Object.fromEntries(headers) : headers, }); let spanProcessor: SpanProcessor; if (useBatchSpanProcessor) { spanProcessor = new OpenInferenceBatchSpanProcessor({ exporter }); } else { spanProcessor = new OpenInferenceSimpleSpanProcessor({ exporter }); } const provider = new NodeTracerProvider({ resource: resourceFromAttributes({ [SEMRESATTRS_PROJECT_NAME]: projectName, }), spanProcessors: [spanProcessor], }); return provider; } /** * For dry runs we create a provider that doesn't export traces. */ export function createNoOpProvider() { const provider = new NodeTracerProvider({}); return provider; }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Arize-ai/phoenix'

If you have feedback or need assistance with the MCP directory API, please join our Discord server