Skip to main content
Glama

@arizeai/phoenix-mcp

Official
by Arize-ai
instrumentor.py1.25 kB
from typing import Any from openinference.instrumentation.anthropic import AnthropicInstrumentor from openinference.instrumentation.anthropic._wrappers import _MessagesWrapper from wrapt import wrap_function_wrapper # Our default instrumentor does not capture computer use, so we are using a Beta version which adds support for computer use. class AnthropicBetaInstrumentor(AnthropicInstrumentor): def _instrument(self, **kwargs: Any) -> None: super(AnthropicBetaInstrumentor, self)._instrument() from anthropic.resources.beta.messages import Messages self._original_beta_messages_create = Messages.create wrap_function_wrapper( module="anthropic.resources.beta.messages", name="Messages.create", wrapper=_MessagesWrapper(tracer=self._tracer), ) def _uninstrument(self, **kwargs: Any) -> None: super(AnthropicBetaInstrumentor, self)._uninstrument() from anthropic.resources.beta.messages import Messages if ( hasattr(self, "_original_beta_messages_create") and self._original_beta_messages_create is not None ): Messages.create = self._original_beta_messages_create # type: ignore[method-assign]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Arize-ai/phoenix'

If you have feedback or need assistance with the MCP directory API, please join our Discord server