Skip to main content
Glama

@arizeai/phoenix-mcp

Official
by Arize-ai
PromptLLM.tsx1.57 kB
import { PropsWithChildren } from "react"; import { graphql, useFragment } from "react-relay"; import { Disclosure, DisclosurePanel, DisclosureTrigger, Flex, List, ListItem, Text, View, } from "@phoenix/components"; import { ModelProviders } from "@phoenix/constants/generativeConstants"; import { PromptLLM__main$key } from "@phoenix/pages/prompt/__generated__/PromptLLM__main.graphql"; const ModelProviderItem = ({ keyName, children, }: PropsWithChildren<{ keyName: string; }>) => ( <ListItem> <View paddingStart="size-100" paddingEnd="size-100"> <Flex direction="row" justifyContent="space-between"> <Text size="XS" color="text-700"> {keyName} </Text> <Text size="XS">{children}</Text> </Flex> </View> </ListItem> ); type PromptLLMProps = { promptVersion: PromptLLM__main$key; }; export function PromptLLM({ promptVersion }: PromptLLMProps) { const data = useFragment<PromptLLM__main$key>( graphql` fragment PromptLLM__main on PromptVersion { model: modelName provider: modelProvider } `, promptVersion ); return ( <Disclosure id="llm"> <DisclosureTrigger>LLM</DisclosureTrigger> <DisclosurePanel> <List size="S"> <ModelProviderItem keyName="Model">{data.model}</ModelProviderItem> <ModelProviderItem keyName="Provider"> {ModelProviders[data.provider as ModelProvider] ?? data.provider} </ModelProviderItem> </List> </DisclosurePanel> </Disclosure> ); }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Arize-ai/phoenix'

If you have feedback or need assistance with the MCP directory API, please join our Discord server