Skip to main content
Glama

@arizeai/phoenix-mcp

Official
by Arize-ai
PromptModelConfigurationCard.tsx1.89 kB
import { graphql, useFragment } from "react-relay"; import { Card, Disclosure, DisclosureGroup, DisclosurePanel, DisclosureTrigger, } from "@phoenix/components"; import { PromptModelConfigurationCard__main$key } from "@phoenix/pages/prompt/__generated__/PromptModelConfigurationCard__main.graphql"; import { PromptInvocationParameters } from "@phoenix/pages/prompt/PromptInvocationParameters"; import { PromptLLM } from "@phoenix/pages/prompt/PromptLLM"; import { PromptResponseFormat } from "@phoenix/pages/prompt/PromptResponseFormat"; import { PromptTools } from "@phoenix/pages/prompt/PromptTools"; export function PromptModelConfigurationCard({ promptVersion: promptVersionFragment, }: { promptVersion: PromptModelConfigurationCard__main$key; }) { const promptVersion = useFragment<PromptModelConfigurationCard__main$key>( graphql` fragment PromptModelConfigurationCard__main on PromptVersion { model: modelName provider: modelProvider ...PromptLLM__main ...PromptInvocationParameters__main ...PromptTools__main ...PromptResponseFormatFragment } `, promptVersionFragment ); return ( <Card title="Model Configuration" collapsible> <DisclosureGroup defaultExpandedKeys={[ "llm", "invocation-parameters", "tools", "response-format", ]} > <PromptLLM promptVersion={promptVersion} /> <Disclosure id="invocation-parameters"> <DisclosureTrigger>Invocation Parameters</DisclosureTrigger> <DisclosurePanel> <PromptInvocationParameters promptVersion={promptVersion} /> </DisclosurePanel> </Disclosure> <PromptTools promptVersion={promptVersion} /> <PromptResponseFormat promptVersion={promptVersion} /> </DisclosureGroup> </Card> ); }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Arize-ai/phoenix'

If you have feedback or need assistance with the MCP directory API, please join our Discord server