Skip to main content
Glama

@arizeai/phoenix-mcp

Official
by Arize-ai
schema.graphql81.6 kB
directive @oneOf on INPUT_OBJECT input AddAnnotationConfigToProjectInput { projectId: ID! annotationConfigId: ID! } type AddAnnotationConfigToProjectPayload { query: Query! project: Project! } input AddDatasetExamplesToDatasetSplitsInput { datasetSplitIds: [ID!]! exampleIds: [ID!]! } type AddDatasetExamplesToDatasetSplitsMutationPayload { query: Query! examples: [DatasetExample!]! } input AddExamplesToDatasetInput { datasetId: ID! examples: [DatasetExampleInput!]! datasetVersionDescription: String datasetVersionMetadata: JSON } input AddSpansToDatasetInput { datasetId: ID! spanIds: [ID!]! datasetVersionDescription: String datasetVersionMetadata: JSON } interface Annotation { """Name of the annotation, e.g. 'helpfulness' or 'relevance'.""" name: String! """Value of the annotation in the form of a numeric score.""" score: Float """ Value of the annotation in the form of a string, e.g. 'helpful' or 'not helpful'. Note that the label is not necessarily binary. """ label: String """ The annotator's explanation for the annotation result (i.e. score or label, or both) given to the subject. """ explanation: String """The date and time when the annotation was created.""" createdAt: DateTime! """The date and time when the annotation was last updated.""" updatedAt: DateTime! } union AnnotationConfig = CategoricalAnnotationConfig | ContinuousAnnotationConfig | FreeformAnnotationConfig interface AnnotationConfigBase { name: String! description: String annotationType: AnnotationType! } """A connection to a list of items.""" type AnnotationConfigConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [AnnotationConfigEdge!]! } """An edge in a connection.""" type AnnotationConfigEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: AnnotationConfig! } input AnnotationConfigInput @oneOf { categorical: CategoricalAnnotationConfigInput continuous: ContinuousAnnotationConfigInput freeform: FreeformAnnotationConfigInput } input AnnotationFilter { include: AnnotationFilterCondition exclude: AnnotationFilterCondition } input AnnotationFilterCondition { names: [String!] sources: [AnnotationSource!] userIds: [ID] } enum AnnotationSource { API APP } type AnnotationSummary { name: String! count: Int! labels: [String!]! labelFractions: [LabelFraction!]! meanScore: Float scoreCount: Int! labelCount: Int! } enum AnnotationType { CATEGORICAL CONTINUOUS FREEFORM } enum AnnotatorKind { LLM HUMAN CODE } interface ApiKey { """Name of the API key.""" name: String! """Description of the API key.""" description: String """The date and time the API key was created.""" createdAt: DateTime! """The date and time the API key will expire.""" expiresAt: DateTime } enum AuthMethod { LOCAL OAUTH2 } union Bin = NominalBin | IntervalBin | MissingValueBin type BooleanInvocationParameter implements InvocationParameterBase { invocationName: String! canonicalName: CanonicalParameterName label: String! required: Boolean! invocationInputField: InvocationInputField! defaultValue: Boolean } type BoundedFloatInvocationParameter implements InvocationParameterBase { invocationName: String! canonicalName: CanonicalParameterName label: String! required: Boolean! invocationInputField: InvocationInputField! defaultValue: Float minValue: Float! maxValue: Float! } enum CanonicalParameterName { TEMPERATURE MAX_COMPLETION_TOKENS STOP_SEQUENCES TOP_P RANDOM_SEED TOOL_CHOICE RESPONSE_FORMAT REASONING_EFFORT ANTHROPIC_EXTENDED_THINKING } type CategoricalAnnotationConfig implements Node & AnnotationConfigBase { """The Globally Unique ID of this object""" id: ID! name: String! description: String annotationType: AnnotationType! optimizationDirection: OptimizationDirection! values: [CategoricalAnnotationValue!]! } input CategoricalAnnotationConfigInput { name: String! description: String = null optimizationDirection: OptimizationDirection! values: [CategoricalAnnotationConfigValueInput!]! } input CategoricalAnnotationConfigValueInput { label: String! score: Float = null } type CategoricalAnnotationValue { label: String! score: Float } type ChatCompletionFunctionCall { name: String! arguments: String! } input ChatCompletionInput { messages: [ChatCompletionMessageInput!]! model: GenerativeModelInput! invocationParameters: [InvocationParameterInput!]! = [] tools: [JSON!] credentials: [GenerativeCredentialInput!] template: PromptTemplateOptions promptName: Identifier = null repetitions: Int! } input ChatCompletionMessageInput { role: ChatCompletionMessageRole! """The content of the message as JSON to support various kinds of text""" content: JSON! = "" """The tool calls that were made in the message""" toolCalls: [JSON!] """ The ID that corresponds to a prior tool call. Used to link a tool message to a pre-existing tool call. """ toolCallId: String } enum ChatCompletionMessageRole { USER SYSTEM TOOL AI } type ChatCompletionMutationError { message: String! } type ChatCompletionMutationPayload { content: String toolCalls: [ChatCompletionToolCall!]! span: Span! errorMessage: String } union ChatCompletionMutationPayloadChatCompletionMutationError = ChatCompletionMutationPayload | ChatCompletionMutationError input ChatCompletionOverDatasetInput { messages: [ChatCompletionMessageInput!]! model: GenerativeModelInput! invocationParameters: [InvocationParameterInput!]! = [] tools: [JSON!] credentials: [GenerativeCredentialInput!] templateFormat: PromptTemplateFormat! = MUSTACHE repetitions: Int! datasetId: ID! datasetVersionId: ID = null splitIds: [ID!] = null experimentName: String = null experimentDescription: String = null experimentMetadata: JSON = {} promptName: Identifier = null } type ChatCompletionOverDatasetMutationExamplePayload { datasetExampleId: ID! repetitionNumber: Int! experimentRunId: ID! result: ChatCompletionMutationPayloadChatCompletionMutationError! } type ChatCompletionOverDatasetMutationPayload { datasetId: ID! datasetVersionId: ID! experimentId: ID! examples: [ChatCompletionOverDatasetMutationExamplePayload!]! } type ChatCompletionSubscriptionError implements ChatCompletionSubscriptionPayload { datasetExampleId: ID repetitionNumber: Int message: String! } type ChatCompletionSubscriptionExperiment implements ChatCompletionSubscriptionPayload { datasetExampleId: ID repetitionNumber: Int experiment: Experiment! } interface ChatCompletionSubscriptionPayload { datasetExampleId: ID repetitionNumber: Int } type ChatCompletionSubscriptionResult implements ChatCompletionSubscriptionPayload { datasetExampleId: ID repetitionNumber: Int span: Span experimentRun: ExperimentRun } type ChatCompletionToolCall { id: String! function: ChatCompletionFunctionCall! } input ChatPromptVersionInput { description: String = null templateFormat: PromptTemplateFormat! template: PromptChatTemplateInput! invocationParameters: JSON! = {} tools: [ToolDefinitionInput!]! = [] responseFormat: ResponseFormatInput = null modelProvider: ModelProvider! modelName: String! } input ClearProjectInput { id: ID! """The time up to which to purge data. Time is right-open /non-inclusive.""" endTime: DateTime } input ClonePromptInput { name: Identifier! description: String = null promptId: ID! } type Cluster { """The ID of the cluster""" id: ID! """The event IDs of the points in the cluster""" eventIds: [ID!]! """Ratio of primary points over reference points""" driftRatio: Float """Ratio of primary points over corpus points""" primaryToCorpusRatio: Float """ Data quality metric summarized by the respective datasets of the clustered events """ dataQualityMetric(metric: DataQualityMetricInput!): DatasetValues! """ Performance metric summarized by the respective datasets of the clustered events """ performanceMetric(metric: PerformanceMetricInput!): DatasetValues! } input ClusterInput { eventIds: [ID!]! id: ID } union ContentPart = TextContentPart | ToolCallContentPart | ToolResultContentPart input ContentPartInput @oneOf { text: TextContentValueInput toolCall: ToolCallContentValueInput toolResult: ToolResultContentValueInput } type ContinuousAnnotationConfig implements Node & AnnotationConfigBase { """The Globally Unique ID of this object""" id: ID! name: String! description: String annotationType: AnnotationType! optimizationDirection: OptimizationDirection! lowerBound: Float upperBound: Float } input ContinuousAnnotationConfigInput { name: String! description: String = null optimizationDirection: OptimizationDirection! lowerBound: Float = null upperBound: Float = null } type CostBreakdown { """ Total number of tokens, including tokens for which no cost was computed. """ tokens: Float cost: Float } input CreateAnnotationConfigInput { annotationConfig: AnnotationConfigInput! } type CreateAnnotationConfigPayload { query: Query! annotationConfig: AnnotationConfig! } input CreateApiKeyInput { name: String! description: String expiresAt: DateTime } input CreateChatPromptInput { name: Identifier! description: String = null promptVersion: ChatPromptVersionInput! } input CreateChatPromptVersionInput { promptId: ID! promptVersion: ChatPromptVersionInput! tags: [SetPromptVersionTagInput!] = null } input CreateDatasetInput { name: String! description: String metadata: JSON } input CreateDatasetLabelInput { name: String! description: String color: String! } type CreateDatasetLabelMutationPayload { datasetLabel: DatasetLabel! } input CreateDatasetSplitInput { name: String! description: String color: String! metadata: JSON } input CreateDatasetSplitWithExamplesInput { name: String! description: String color: String! metadata: JSON exampleIds: [ID!]! } input CreateModelMutationInput { name: String! provider: String = null namePattern: String! costs: [TokenPriceInput!]! startTime: DateTime = null } type CreateModelMutationPayload { model: GenerativeModel! query: Query! } input CreateProjectInput { name: String! description: String gradientStartColor: String gradientEndColor: String } input CreateProjectSessionAnnotationInput { projectSessionId: ID! name: String! annotatorKind: AnnotatorKind! = HUMAN label: String = null score: Float = null explanation: String = null metadata: JSON! = {} source: AnnotationSource! = APP identifier: String } input CreateProjectTraceRetentionPolicyInput { name: String! cronExpression: CronExpression! rule: ProjectTraceRetentionRuleInput! addProjects: [ID!] } input CreatePromptLabelInput { name: String! description: String = null color: String! } input CreateSpanAnnotationInput { spanId: ID! name: String! annotatorKind: AnnotatorKind! label: String = null score: Float = null explanation: String = null metadata: JSON! = {} source: AnnotationSource! identifier: String } input CreateSpanNoteInput { spanId: ID! note: String! } type CreateSystemApiKeyMutationPayload { jwt: String! apiKey: SystemApiKey! query: Query! } input CreateTraceAnnotationInput { traceId: ID! name: String! annotatorKind: AnnotatorKind! label: String = null score: Float = null explanation: String = null metadata: JSON! = {} source: AnnotationSource! identifier: String } input CreateUserApiKeyInput { name: String! description: String expiresAt: DateTime } type CreateUserApiKeyMutationPayload { jwt: String! apiKey: UserApiKey! query: Query! } input CreateUserInput { email: String! username: String! password: String role: UserRoleInput! sendWelcomeEmail: Boolean = false authMethod: AuthMethod = LOCAL } scalar CronExpression enum DataQualityMetric { cardinality percentEmpty mean sum min max count p01 p25 p50 p75 p99 } input DataQualityMetricInput { metric: DataQualityMetric! columnName: String } type DataQualityTimeSeries implements TimeSeries { data: [TimeSeriesDataPoint!]! } type Dataset implements Node { """The Globally Unique ID of this object""" id: ID! name: String! description: String metadata: JSON! createdAt: DateTime! updatedAt: DateTime! versions(first: Int = 50, last: Int, after: String, before: String, sort: DatasetVersionSort): DatasetVersionConnection! """ Number of examples in a specific version if version is specified, or in the latest version if version is not specified. """ exampleCount(datasetVersionId: ID, splitIds: [ID!]): Int! examples(datasetVersionId: ID, splitIds: [ID!], first: Int = 50, last: Int, after: String, before: String, filter: String): DatasetExampleConnection! splits: [DatasetSplit!]! """ Number of experiments for a specific version if version is specified, or for all versions if version is not specified. """ experimentCount(datasetVersionId: ID): Int! experiments(first: Int = 50, last: Int, after: String, before: String, filterCondition: String, filterIds: [ID!]): ExperimentConnection! experimentAnnotationSummaries: [DatasetExperimentAnnotationSummary!]! labels: [DatasetLabel!]! lastUpdatedAt: DateTime } enum DatasetColumn { createdAt name } """A connection to a list of items.""" type DatasetConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [DatasetEdge!]! } """An edge in a connection.""" type DatasetEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: Dataset! } type DatasetExample implements Node { """The Globally Unique ID of this object""" id: ID! createdAt: DateTime! revision(datasetVersionId: ID): DatasetExampleRevision! span: Span experimentRuns(first: Int = 50, last: Int, after: String, before: String, experimentIds: [ID!]): ExperimentRunConnection! experimentRepeatedRunGroups(experimentIds: [ID!]!): [ExperimentRepeatedRunGroup!]! datasetSplits: [DatasetSplit!]! } """A connection to a list of items.""" type DatasetExampleConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [DatasetExampleEdge!]! } """An edge in a connection.""" type DatasetExampleEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: DatasetExample! } input DatasetExampleInput { input: JSON! output: JSON! metadata: JSON! spanId: ID } input DatasetExamplePatch { exampleId: ID! input: JSON output: JSON metadata: JSON } type DatasetExampleRevision implements ExampleRevision { input: JSON! output: JSON! metadata: JSON! revisionKind: RevisionKind! createdAt: DateTime! } type DatasetExperimentAnnotationSummary { annotationName: String! minScore: Float maxScore: Float } """A filter for datasets""" input DatasetFilter { col: DatasetFilterColumn = null value: String = null filterLabels: [String!] } enum DatasetFilterColumn { name } type DatasetLabel implements Node { """The Globally Unique ID of this object""" id: ID! name: String! description: String color: String! } """A connection to a list of items.""" type DatasetLabelConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [DatasetLabelEdge!]! } """An edge in a connection.""" type DatasetLabelEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: DatasetLabel! } type DatasetMutationPayload { dataset: Dataset! } """The sort key and direction for dataset connections""" input DatasetSort { col: DatasetColumn! dir: SortDir! } type DatasetSplit implements Node { """The Globally Unique ID of this object""" id: ID! name: String! description: String metadata: JSON! color: String! createdAt: DateTime! updatedAt: DateTime! } """A connection to a list of items.""" type DatasetSplitConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [DatasetSplitEdge!]! } """An edge in a connection.""" type DatasetSplitEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: DatasetSplit! } type DatasetSplitMutationPayload { datasetSplit: DatasetSplit! query: Query! } type DatasetSplitMutationPayloadWithExamples { datasetSplit: DatasetSplit! query: Query! examples: [DatasetExample!]! } type DatasetValues { primaryValue: Float referenceValue: Float } type DatasetVersion implements Node { """The Globally Unique ID of this object""" id: ID! description: String metadata: JSON! createdAt: DateTime! } enum DatasetVersionColumn { createdAt } """A connection to a list of items.""" type DatasetVersionConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [DatasetVersionEdge!]! } """An edge in a connection.""" type DatasetVersionEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: DatasetVersion! } """The sort key and direction for dataset version connections""" input DatasetVersionSort { col: DatasetVersionColumn! dir: SortDir! } """Date with time (isoformat)""" scalar DateTime type DbTableStats { tableName: String! numBytes: Float! } input DeleteAnnotationConfigsInput { ids: [ID!]! } type DeleteAnnotationConfigsPayload { query: Query! annotationConfigs: [AnnotationConfig!]! } input DeleteAnnotationsInput { annotationIds: [ID!]! } input DeleteApiKeyInput { id: ID! } type DeleteApiKeyMutationPayload { apiKeyId: ID! query: Query! } input DeleteDatasetExamplesInput { exampleIds: [ID!]! datasetVersionDescription: String datasetVersionMetadata: JSON } input DeleteDatasetInput { datasetId: ID! } input DeleteDatasetLabelsInput { datasetLabelIds: [ID!]! } type DeleteDatasetLabelsMutationPayload { datasetLabels: [DatasetLabel!]! } input DeleteDatasetSplitInput { datasetSplitIds: [ID!]! } type DeleteDatasetSplitsMutationPayload { datasetSplits: [DatasetSplit!]! query: Query! } input DeleteExperimentsInput { experimentIds: [ID!]! } input DeleteModelMutationInput { id: ID! } type DeleteModelMutationPayload { model: GenerativeModel! query: Query! } input DeleteProjectTraceRetentionPolicyInput { id: ID! } input DeletePromptInput { promptId: ID! } input DeletePromptLabelsInput { promptLabelIds: [ID!]! } type DeletePromptMutationPayload { query: Query! } input DeletePromptVersionTagInput { promptVersionTagId: ID! } input DeleteUsersInput { userIds: [ID!]! } type Dimension implements Node { """The Globally Unique ID of this object""" id: ID! """The name of the dimension (a.k.a. the column name)""" name: String! """ Whether the dimension represents a feature, tag, prediction, or actual. """ type: DimensionType! """The data type of the column. Categorical or numeric.""" dataType: DimensionDataType! """Whether the dimension data is continuous or discrete.""" shape: DimensionShape! driftMetric(metric: ScalarDriftMetric!, timeRange: TimeRange): Float dataQualityMetric( metric: DataQualityMetric! timeRange: TimeRange """The inferences (primary or reference) to query""" inferencesRole: InferencesRole = primary ): Float """ Returns the observed categories of a categorical dimension (usually a dimension of string values) as a list of unique string labels sorted in lexicographical order. Missing values are excluded. Non-categorical dimensions return an empty list. """ categories: [String!]! """ Returns the time series of the specified metric for data within a time range. Data points are generated starting at the end time and are separated by the sampling interval. Each data point is labeled by the end instant and contains data from their respective evaluation windows. """ dataQualityTimeSeries( metric: DataQualityMetric! timeRange: TimeRange! granularity: Granularity! """The inferences (primary or reference) to query""" inferencesRole: InferencesRole = primary ): DataQualityTimeSeries! """ The time series of the specified metric for data within a time range. Data points are generated starting at the end time and are separated by the sampling interval. Each data point is labeled by the end instant and contains data from their respective evaluation windows. """ driftTimeSeries(metric: ScalarDriftMetric!, timeRange: TimeRange!, granularity: Granularity!): DriftTimeSeries! """ The segments across both inference sets and returns the counts per segment """ segmentsComparison(primaryTimeRange: TimeRange): Segments! } """A connection to a list of items.""" type DimensionConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [DimensionEdge!]! } enum DimensionDataType { categorical numeric } """An edge in a connection.""" type DimensionEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: Dimension! } input DimensionFilter { types: [DimensionType!] shapes: [DimensionShape!] dataTypes: [DimensionDataType!] } input DimensionInput { name: String! type: DimensionType! } enum DimensionShape { continuous discrete } enum DimensionType { feature tag prediction actual } type DimensionWithValue { dimension: Dimension! """The string representation of the dimension's value""" value: String } type DocumentAnnotation implements Node & Annotation { """The Globally Unique ID of this object""" id: ID! """Name of the annotation, e.g. 'helpfulness' or 'relevance'.""" name: String! """Value of the annotation in the form of a numeric score.""" score: Float """ Value of the annotation in the form of a string, e.g. 'helpful' or 'not helpful'. Note that the label is not necessarily binary. """ label: String """ The annotator's explanation for the annotation result (i.e. score or label, or both) given to the subject. """ explanation: String """The date and time when the annotation was created.""" createdAt: DateTime! """The date and time when the annotation was last updated.""" updatedAt: DateTime! annotatorKind: AnnotatorKind! metadata: JSON! documentPosition: Int! identifier: String! source: AnnotationSource! span: Span! user: User } """ Summarization of retrieval metrics: Average NDCG@K, Average Precision@K, Mean Reciprocal Rank, Hit Rate, etc. """ type DocumentEvaluationSummary { evaluationName: String! averageNdcg(k: Int): Float countNdcg(k: Int): Int! averagePrecision(k: Int): Float countPrecision(k: Int): Int! meanReciprocalRank: Float countReciprocalRank: Int! hitRate: Float countHit: Int! } """ A collection of retrieval metrics computed on a list of document evaluation scores: NDCG@K, Precision@K, Reciprocal Rank, etc. """ type DocumentRetrievalMetrics { evaluationName: String! """ Normalized Discounted Cumulative Gain (NDCG) at `k` with log base 2 discounting. If `k` is None, it's set to the length of the scores. If `k` < 1, return 0.0. """ ndcg(k: Int): Float """ Precision at `k`, defined as the fraction of truthy scores among first `k` positions (1-based index). If `k` is None, then it's set to the length of the scores. If `k` < 1, return 0.0. """ precision(k: Int): Float """ Return `1/R` where `R` is the rank of the first hit, i.e. the 1-based index position of first truthy score, e.g. score=1. If a non-finite value (e.g. `NaN`) is encountered before the first (finite) truthy score, then return `NaN`, otherwise if no truthy score is found (or if the count of scores is zero), return 0.0. """ reciprocalRank: Float """ Return 1.0 if any score is truthy (i.e. is a hit), e.g. score=1. Otherwise, return `NaN` if any score is non-finite (e.g. `NaN`), or return 0.0 if all scores are falsy, e.g. all scores are 0. """ hit: Float } type DriftTimeSeries implements TimeSeries { data: [TimeSeriesDataPoint!]! } type EmbeddingDimension implements Node { """The Globally Unique ID of this object""" id: ID! name: String! """ Computes a drift metric between all reference data and the primary data belonging to the input time range (inclusive of the time range start and exclusive of the time range end). Returns None if no reference dataset exists, if no primary data exists in the input time range, or if the input time range is invalid. """ driftMetric(metric: VectorDriftMetric!, timeRange: TimeRange): Float """ Computes a retrieval metric between corpus data and the primary data belonging to the input time range (inclusive of the time range start and exclusive of the time range end). Returns None if no reference dataset exists, if no primary data exists in the input time range, or if the input time range is invalid. """ retrievalMetric(metric: VectorDriftMetric!, timeRange: TimeRange): Float """ Returns the time series of the specified metric for data within timeRange. Data points are generated starting at the end time, are separated by the sampling interval. Each data point is labeled by the end instant of and contains data from their respective evaluation window. """ dataQualityTimeSeries( metric: DataQualityMetric! timeRange: TimeRange! granularity: Granularity! """The dataset (primary or reference) to query""" inferencesRole: InferencesRole = primary ): DataQualityTimeSeries! """ Computes a drift time-series between the primary and reference datasets. The output drift time-series contains one data point for each whole hour in the input time range (inclusive of the time range start and exclusive of the time range end). Each data point contains the drift metric value between all reference data and the primary data within the evaluation window ending at the corresponding time. Returns None if no reference dataset exists or if the input time range is invalid. """ driftTimeSeries(metric: VectorDriftMetric!, timeRange: TimeRange!, granularity: Granularity!): DriftTimeSeries! """ Computes a retrieval metric between the primary and corpus datasets. The output time-series contains one data point for each whole hour in the input time range (inclusive of the time range start and exclusive of the time range end). Each data point contains the metric value between all corpus data and the primary data within the evaluation window ending at the corresponding time. Returns None if no corpus dataset exists or if the input time range is invalid. """ retrievalMetricTimeSeries(metric: VectorDriftMetric!, timeRange: TimeRange!, granularity: Granularity!): DriftTimeSeries! UMAPPoints( """The time range of the primary dataset to generate the UMAP points for""" timeRange: TimeRange! """UMAP target dimension hyperparameter. Must be 2 or 3""" nComponents: Int = 3 """UMAP minimum distance hyperparameter""" minDist: Float! = 0 """UMAP N neighbors hyperparameter""" nNeighbors: Int! = 30 """UMAP N samples""" nSamples: Int! = 500 """HDBSCAN minimum cluster size""" minClusterSize: Int! = 10 """HDBSCAN minimum samples""" clusterMinSamples: Int! = 1 """HDBSCAN cluster selection epsilon""" clusterSelectionEpsilon: Float! = 0 ): UMAPPoints! } """A connection to a list of items.""" type EmbeddingDimensionConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [EmbeddingDimensionEdge!]! } """An edge in a connection.""" type EmbeddingDimensionEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: EmbeddingDimension! } type EmbeddingMetadata { predictionId: String rawData: String linkToData: String } enum EvalAttr { score label } input EvalResultKey { name: String! attr: EvalAttr! } type Event { id: ID! eventMetadata: EventMetadata! dimensions: [DimensionWithValue!]! """The prompt and response pair associated with the event""" promptAndResponse: PromptResponse """The text of the document if the event is a retrieved document record""" documentText: String } type EventMetadata { predictionId: String predictionScore: Float predictionLabel: String actualScore: Float actualLabel: String } interface ExampleRevision { input: JSON! output: JSON! metadata: JSON! } type Experiment implements Node { """The Globally Unique ID of this object""" id: ID! name: String! projectName: String description: String repetitions: Int! datasetVersionId: ID! metadata: JSON! createdAt: DateTime! updatedAt: DateTime! """Sequence number (1-based) of experiments belonging to the same dataset""" sequenceNumber: Int! runs(first: Int = 50, after: String, sort: ExperimentRunSort): ExperimentRunConnection! runCount: Int! annotationSummaries: [ExperimentAnnotationSummary!]! errorRate: Float averageRunLatencyMs: Float project: Project lastUpdatedAt: DateTime costSummary: SpanCostSummary! costDetailSummaryEntries: [SpanCostDetailSummaryEntry!]! datasetSplits: DatasetSplitConnection! } type ExperimentAnnotationSummary { annotationName: String! minScore: Float maxScore: Float meanScore: Float count: Int! errorCount: Int! } type ExperimentComparison implements Node { """The Globally Unique ID of this object""" id: ID! example: DatasetExample! repeatedRunGroups: [ExperimentRepeatedRunGroup!]! } """A connection to a list of items.""" type ExperimentComparisonConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [ExperimentComparisonEdge!]! } """An edge in a connection.""" type ExperimentComparisonEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: ExperimentComparison! } """A connection to a list of items.""" type ExperimentConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [ExperimentEdge!]! } """An edge in a connection.""" type ExperimentEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: Experiment! } type ExperimentMutationPayload { experiments: [Experiment!]! } type ExperimentRepeatedRunGroup implements Node { """The Globally Unique ID of this object""" id: ID! runs: [ExperimentRun!]! experimentId: ID! averageLatencyMs: Float costSummary: SpanCostSummary! costDetailSummaryEntries: [SpanCostDetailSummaryEntry!]! annotationSummaries: [ExperimentRepeatedRunGroupAnnotationSummary!]! } type ExperimentRepeatedRunGroupAnnotationSummary { annotationName: String! meanScore: Float } type ExperimentRun implements Node { """The Globally Unique ID of this object""" id: ID! experimentId: ID! repetitionNumber: Int! traceId: String output: JSON startTime: DateTime! endTime: DateTime! error: String latencyMs: Float! annotations(first: Int = 50, last: Int, after: String, before: String): ExperimentRunAnnotationConnection! trace: Trace example: DatasetExample! costSummary: SpanCostSummary! costDetailSummaryEntries: [SpanCostDetailSummaryEntry!]! } type ExperimentRunAnnotation implements Node { """The Globally Unique ID of this object""" id: ID! name: String! annotatorKind: ExperimentRunAnnotatorKind! label: String score: Float explanation: String error: String metadata: JSON! startTime: DateTime! endTime: DateTime! traceId: String trace: Trace } """A connection to a list of items.""" type ExperimentRunAnnotationConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [ExperimentRunAnnotationEdge!]! } """An edge in a connection.""" type ExperimentRunAnnotationEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: ExperimentRunAnnotation! } enum ExperimentRunAnnotatorKind { LLM HUMAN CODE } input ExperimentRunColumn @oneOf { metric: ExperimentRunMetric annotationName: String } """A connection to a list of items.""" type ExperimentRunConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [ExperimentRunEdge!]! } """An edge in a connection.""" type ExperimentRunEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: ExperimentRun! } enum ExperimentRunMetric { latencyMs } type ExperimentRunMetricComparison { """ The number of runs in which the base experiment improved on the best run in any compare experiment. """ numRunsImproved: Int! """ The number of runs in which the base experiment regressed on the best run in any compare experiment. """ numRunsRegressed: Int! """ The number of runs in which the base experiment is equal to the best run in any compare experiment. """ numRunsEqual: Int! """ The number of runs in the base experiment that could not be compared, either because the base experiment run was missing a value or because all compare experiment runs were missing values. """ numRunsWithoutComparison: Int! } type ExperimentRunMetricComparisons { latency: ExperimentRunMetricComparison! totalTokenCount: ExperimentRunMetricComparison! promptTokenCount: ExperimentRunMetricComparison! completionTokenCount: ExperimentRunMetricComparison! totalCost: ExperimentRunMetricComparison! promptCost: ExperimentRunMetricComparison! completionCost: ExperimentRunMetricComparison! } """The sort key and direction for experiment run connections""" input ExperimentRunSort { col: ExperimentRunColumn! dir: SortDir! } type ExportedFile { """File name without the file extension.""" fileName: String! } type FloatInvocationParameter implements InvocationParameterBase { invocationName: String! canonicalName: CanonicalParameterName label: String! required: Boolean! invocationInputField: InvocationInputField! defaultValue: Float } type FreeformAnnotationConfig implements Node & AnnotationConfigBase { """The Globally Unique ID of this object""" id: ID! name: String! description: String annotationType: AnnotationType! } input FreeformAnnotationConfigInput { name: String! description: String = null } type FunctionCallChunk implements ChatCompletionSubscriptionPayload { datasetExampleId: ID repetitionNumber: Int name: String! arguments: String! } type Functionality { """Model inferences are available for analysis""" modelInferences: Boolean! } input GenerativeCredentialInput { envVarName: String! value: String! } type GenerativeModel implements Node & ModelInterface { """The Globally Unique ID of this object""" id: ID! name: String! providerKey: GenerativeProviderKey provider: String namePattern: String! kind: GenerativeModelKind! createdAt: DateTime! updatedAt: DateTime! startTime: DateTime tokenPrices: [TokenPrice!]! costSummary(projectId: ID, timeRange: TimeRange): SpanCostSummary! costDetailSummaryEntries: [SpanCostDetailSummaryEntry!]! lastUsedAt: DateTime } input GenerativeModelInput { providerKey: GenerativeProviderKey! name: String! baseUrl: String endpoint: String apiVersion: String region: String customHeaders: JSON } enum GenerativeModelKind { CUSTOM BUILT_IN } type GenerativeProvider { name: String! key: GenerativeProviderKey! dependencies: [String!]! dependenciesInstalled: Boolean! """The credential requirements for the provider""" credentialRequirements: [GenerativeProviderCredentialConfig!]! """Whether the credentials are set on the server for the provider""" credentialsSet: Boolean! } type GenerativeProviderCredentialConfig { envVarName: String! isRequired: Boolean! } enum GenerativeProviderKey { OPENAI ANTHROPIC AZURE_OPENAI GOOGLE DEEPSEEK XAI OLLAMA AWS } """ Granularity specifies the distance between points in a time-series and the duration of time (i.e. evaluation window) by which data is aggregated for each data point. By convention all time intervals are right-open intervals, i.e. the end instant of the evaluation window is excluded from the interval. As a matter of standardization, each point in a time-series aggregates data corresponding to an interval of time (i.e. the evaluation window) ending at the point's timestamp, and each time-series enumerates its points starting from the end instant of the TimeRange. """ input Granularity { """ Specifies the length of time by which the data are grouped for aggregation. Each point in a time-series will have the same evaluation_window, but the evaluation_window for each point can overlap in real time. For example, when the points are 24 hours apart but the eval window is 72 hours, it means that each point in the time-series is aggregating 72 hours worth of data ending at the point's timestamp. """ evaluationWindowMinutes: Int! """ Specifies the time interval between each point in the time-series. All points in the time-series are separated by the same length of time, and are generated starting from the end time of the time range. """ samplingIntervalMinutes: Int! } scalar Identifier type InferenceModel { dimensions(first: Int = 50, last: Int, after: String, before: String, include: DimensionFilter, exclude: DimensionFilter): DimensionConnection! primaryInferences: Inferences! referenceInferences: Inferences corpusInferences: Inferences embeddingDimensions(first: Int = 50, last: Int, after: String, before: String): EmbeddingDimensionConnection! """Returns exported file names sorted by descending modification time.""" exportedFiles: [ExportedFile!]! performanceMetric( metric: PerformanceMetricInput! timeRange: TimeRange """The inferences (primary or reference) to query""" inferencesRole: InferencesRole = primary ): Float """ Returns the time series of the specified metric for data within a time range. Data points are generated starting at the end time and are separated by the sampling interval. Each data point is labeled by the end instant and contains data from their respective evaluation windows. """ performanceTimeSeries( metric: PerformanceMetricInput! timeRange: TimeRange! granularity: Granularity! """The inferences (primary or reference) to query""" inferencesRole: InferencesRole = primary ): PerformanceTimeSeries! } type Inferences { """The start bookend of the data""" startTime: DateTime! """The end bookend of the data""" endTime: DateTime! """The record count of the data""" recordCount: Int! """Returns a human friendly name for the inferences.""" name: String! events(eventIds: [ID!]!, dimensions: [DimensionInput!]): [Event!]! } enum InferencesRole { primary reference } input InputCoordinate2D { x: Float! y: Float! } input InputCoordinate3D { x: Float! y: Float! z: Float! } type IntInvocationParameter implements InvocationParameterBase { invocationName: String! canonicalName: CanonicalParameterName label: String! required: Boolean! invocationInputField: InvocationInputField! defaultValue: Int } type IntervalBin { range: NumericRange! } enum InvocationInputField { value_int value_float value_bool value_string value_json value_string_list value_boolean } union InvocationParameter = IntInvocationParameter | FloatInvocationParameter | BoundedFloatInvocationParameter | StringInvocationParameter | JSONInvocationParameter | StringListInvocationParameter | BooleanInvocationParameter interface InvocationParameterBase { invocationName: String! canonicalName: CanonicalParameterName label: String! required: Boolean! } input InvocationParameterInput { invocationName: String! canonicalName: CanonicalParameterName = null valueInt: Int valueFloat: Float valueBool: Boolean valueString: String valueJson: JSON valueStringList: [String!] valueBoolean: Boolean } """ The `JSON` scalar type represents JSON values as specified by [ECMA-404](https://ecma-international.org/wp-content/uploads/ECMA-404_2nd_edition_december_2017.pdf). """ scalar JSON @specifiedBy(url: "https://ecma-international.org/wp-content/uploads/ECMA-404_2nd_edition_december_2017.pdf") type JSONInvocationParameter implements InvocationParameterBase { invocationName: String! canonicalName: CanonicalParameterName label: String! required: Boolean! invocationInputField: InvocationInputField! defaultValue: JSON } type LabelFraction { label: String! fraction: Float! } enum MimeType { text json } type MissingValueBin { name: String } interface ModelInterface { name: String! providerKey: GenerativeProviderKey } enum ModelProvider { OPENAI AZURE_OPENAI ANTHROPIC GOOGLE DEEPSEEK XAI OLLAMA AWS } input ModelsInput { providerKey: GenerativeProviderKey modelName: String = null } type Mutation { createAnnotationConfig(input: CreateAnnotationConfigInput!): CreateAnnotationConfigPayload! updateAnnotationConfig(input: UpdateAnnotationConfigInput!): UpdateAnnotationConfigPayload! deleteAnnotationConfigs(input: DeleteAnnotationConfigsInput!): DeleteAnnotationConfigsPayload! addAnnotationConfigToProject(input: [AddAnnotationConfigToProjectInput!]!): AddAnnotationConfigToProjectPayload! removeAnnotationConfigFromProject(input: [RemoveAnnotationConfigFromProjectInput!]!): RemoveAnnotationConfigFromProjectPayload! createSystemApiKey(input: CreateApiKeyInput!): CreateSystemApiKeyMutationPayload! createUserApiKey(input: CreateUserApiKeyInput!): CreateUserApiKeyMutationPayload! deleteSystemApiKey(input: DeleteApiKeyInput!): DeleteApiKeyMutationPayload! deleteUserApiKey(input: DeleteApiKeyInput!): DeleteApiKeyMutationPayload! chatCompletionOverDataset(input: ChatCompletionOverDatasetInput!): ChatCompletionOverDatasetMutationPayload! chatCompletion(input: ChatCompletionInput!): ChatCompletionMutationPayload! createDatasetLabel(input: CreateDatasetLabelInput!): CreateDatasetLabelMutationPayload! updateDatasetLabel(input: UpdateDatasetLabelInput!): UpdateDatasetLabelMutationPayload! deleteDatasetLabels(input: DeleteDatasetLabelsInput!): DeleteDatasetLabelsMutationPayload! setDatasetLabels(input: SetDatasetLabelsInput!): SetDatasetLabelsMutationPayload! unsetDatasetLabels(input: UnsetDatasetLabelsInput!): UnsetDatasetLabelsMutationPayload! createDataset(input: CreateDatasetInput!): DatasetMutationPayload! patchDataset(input: PatchDatasetInput!): DatasetMutationPayload! addSpansToDataset(input: AddSpansToDatasetInput!): DatasetMutationPayload! addExamplesToDataset(input: AddExamplesToDatasetInput!): DatasetMutationPayload! deleteDataset(input: DeleteDatasetInput!): DatasetMutationPayload! patchDatasetExamples(input: PatchDatasetExamplesInput!): DatasetMutationPayload! deleteDatasetExamples(input: DeleteDatasetExamplesInput!): DatasetMutationPayload! createDatasetSplit(input: CreateDatasetSplitInput!): DatasetSplitMutationPayload! patchDatasetSplit(input: PatchDatasetSplitInput!): DatasetSplitMutationPayload! deleteDatasetSplits(input: DeleteDatasetSplitInput!): DeleteDatasetSplitsMutationPayload! addDatasetExamplesToDatasetSplits(input: AddDatasetExamplesToDatasetSplitsInput!): AddDatasetExamplesToDatasetSplitsMutationPayload! removeDatasetExamplesFromDatasetSplits(input: RemoveDatasetExamplesFromDatasetSplitsInput!): RemoveDatasetExamplesFromDatasetSplitsMutationPayload! createDatasetSplitWithExamples(input: CreateDatasetSplitWithExamplesInput!): DatasetSplitMutationPayloadWithExamples! deleteExperiments(input: DeleteExperimentsInput!): ExperimentMutationPayload! """ Given a list of event ids, export the corresponding data subset in Parquet format. File name is optional, but if specified, should be without file extension. By default the exported file name is current timestamp. """ exportEvents(eventIds: [ID!]!, fileName: String): ExportedFile! """ Given a list of clusters, export the corresponding data subset in Parquet format. File name is optional, but if specified, should be without file extension. By default the exported file name is current timestamp. """ exportClusters(clusters: [ClusterInput!]!, fileName: String): ExportedFile! createModel(input: CreateModelMutationInput!): CreateModelMutationPayload! updateModel(input: UpdateModelMutationInput!): UpdateModelMutationPayload! deleteModel(input: DeleteModelMutationInput!): DeleteModelMutationPayload! createProject(input: CreateProjectInput!): ProjectMutationPayload! deleteProject(id: ID!): Query! clearProject(input: ClearProjectInput!): Query! createProjectTraceRetentionPolicy(input: CreateProjectTraceRetentionPolicyInput!): ProjectTraceRetentionPolicyMutationPayload! patchProjectTraceRetentionPolicy(input: PatchProjectTraceRetentionPolicyInput!): ProjectTraceRetentionPolicyMutationPayload! deleteProjectTraceRetentionPolicy(input: DeleteProjectTraceRetentionPolicyInput!): ProjectTraceRetentionPolicyMutationPayload! createChatPrompt(input: CreateChatPromptInput!): Prompt! createChatPromptVersion(input: CreateChatPromptVersionInput!): Prompt! deletePrompt(input: DeletePromptInput!): DeletePromptMutationPayload! clonePrompt(input: ClonePromptInput!): Prompt! patchPrompt(input: PatchPromptInput!): Prompt! deletePromptVersionTag(input: DeletePromptVersionTagInput!): PromptVersionTagMutationPayload! setPromptVersionTag(input: SetPromptVersionTagInput!): PromptVersionTagMutationPayload! createPromptLabel(input: CreatePromptLabelInput!): PromptLabelMutationPayload! patchPromptLabel(input: PatchPromptLabelInput!): PromptLabelMutationPayload! deletePromptLabels(input: DeletePromptLabelsInput!): PromptLabelDeleteMutationPayload! setPromptLabels(input: SetPromptLabelsInput!): PromptLabelAssociationMutationPayload! unsetPromptLabels(input: UnsetPromptLabelsInput!): PromptLabelAssociationMutationPayload! createSpanAnnotations(input: [CreateSpanAnnotationInput!]!): SpanAnnotationMutationPayload! createSpanNote(annotationInput: CreateSpanNoteInput!): SpanAnnotationMutationPayload! patchSpanAnnotations(input: [PatchAnnotationInput!]!): SpanAnnotationMutationPayload! deleteSpanAnnotations(input: DeleteAnnotationsInput!): SpanAnnotationMutationPayload! createProjectSessionAnnotations(input: CreateProjectSessionAnnotationInput!): ProjectSessionAnnotationMutationPayload! updateProjectSessionAnnotations(input: UpdateAnnotationInput!): ProjectSessionAnnotationMutationPayload! deleteProjectSessionAnnotation(id: ID!): ProjectSessionAnnotationMutationPayload! createTraceAnnotations(input: [CreateTraceAnnotationInput!]!): TraceAnnotationMutationPayload! patchTraceAnnotations(input: [PatchAnnotationInput!]!): TraceAnnotationMutationPayload! deleteTraceAnnotations(input: DeleteAnnotationsInput!): TraceAnnotationMutationPayload! deleteTraces(traceIds: [ID!]!): Query! transferTracesToProject(traceIds: [ID!]!, projectId: ID!): Query! createUser(input: CreateUserInput!): UserMutationPayload! patchUser(input: PatchUserInput!): UserMutationPayload! patchViewer(input: PatchViewerInput!): UserMutationPayload! deleteUsers(input: DeleteUsersInput!): Void } """An object with a Globally Unique ID""" interface Node { """The Globally Unique ID of this object""" id: ID! } type NominalBin { name: String! } type NumericRange { start: Float! end: Float! } enum OptimizationDirection { MINIMIZE MAXIMIZE NONE } """Information to aid in pagination.""" type PageInfo { """When paginating forwards, are there more items?""" hasNextPage: Boolean! """When paginating backwards, are there more items?""" hasPreviousPage: Boolean! """When paginating backwards, the cursor to continue.""" startCursor: String """When paginating forwards, the cursor to continue.""" endCursor: String } input PatchAnnotationInput { annotationId: ID! name: String annotatorKind: AnnotatorKind label: String score: Float explanation: String metadata: JSON identifier: String source: AnnotationSource } input PatchDatasetExamplesInput { patches: [DatasetExamplePatch!]! versionDescription: String versionMetadata: JSON } input PatchDatasetInput { datasetId: ID! name: String description: String metadata: JSON } input PatchDatasetSplitInput { datasetSplitId: ID! name: String description: String color: String metadata: JSON } input PatchProjectTraceRetentionPolicyInput { id: ID! name: String cronExpression: CronExpression rule: ProjectTraceRetentionRuleInput addProjects: [ID!] removeProjects: [ID!] } input PatchPromptInput { promptId: ID! description: String! } input PatchPromptLabelInput { promptLabelId: ID! name: String = null description: String = null } input PatchUserInput { userId: ID! newRole: UserRoleInput newUsername: String newPassword: String } input PatchViewerInput { newUsername: String newPassword: String currentPassword: String } enum PerformanceMetric { accuracyScore } input PerformanceMetricInput { metric: PerformanceMetric! } type PerformanceTimeSeries implements TimeSeries { data: [TimeSeriesDataPoint!]! } type PlaygroundModel implements ModelInterface { name: String! providerKey: GenerativeProviderKey! } type Point2D { x: Float! y: Float! } union Point2DPoint3D = Point2D | Point3D type Point3D { x: Float! y: Float! z: Float! } type Project implements Node { """The Globally Unique ID of this object""" id: ID! name: String! gradientStartColor: String! gradientEndColor: String! startTime: DateTime endTime: DateTime recordCount(timeRange: TimeRange, filterCondition: String, sessionFilterCondition: String): Int! traceCount(timeRange: TimeRange, filterCondition: String, sessionFilterCondition: String): Int! tokenCountTotal(timeRange: TimeRange, filterCondition: String): Float! tokenCountPrompt(timeRange: TimeRange, filterCondition: String): Float! tokenCountCompletion(timeRange: TimeRange, filterCondition: String): Float! costSummary(timeRange: TimeRange, filterCondition: String, sessionFilterCondition: String): SpanCostSummary! latencyMsQuantile(probability: Float!, timeRange: TimeRange, filterCondition: String, sessionFilterCondition: String): Float spanLatencyMsQuantile(probability: Float!, timeRange: TimeRange, filterCondition: String, sessionFilterCondition: String): Float trace(traceId: ID!): Trace spans(timeRange: TimeRange, first: Int = 30, last: Int, after: String, before: String, sort: SpanSort, rootSpansOnly: Boolean, filterCondition: String, orphanSpanAsRootSpan: Boolean = true): SpanConnection! sessions(timeRange: TimeRange, first: Int = 30, after: String, sort: ProjectSessionSort, filterIoSubstring: String, sessionId: String): ProjectSessionConnection! """ Names of all available annotations for traces. (The list contains no duplicates.) """ traceAnnotationsNames: [String!]! """ Names of all available annotations for spans. (The list contains no duplicates.) """ spanAnnotationNames: [String!]! """ Names of all available annotations for sessions. (The list contains no duplicates.) """ sessionAnnotationNames: [String!]! """Names of available document evaluations.""" documentEvaluationNames(spanId: ID): [String!]! traceAnnotationSummary(annotationName: String!, filterCondition: String, sessionFilterCondition: String, timeRange: TimeRange): AnnotationSummary spanAnnotationSummary(annotationName: String!, timeRange: TimeRange, filterCondition: String, sessionFilterCondition: String): AnnotationSummary documentEvaluationSummary(evaluationName: String!, timeRange: TimeRange, filterCondition: String): DocumentEvaluationSummary streamingLastUpdatedAt: DateTime validateSpanFilterCondition(condition: String!): ValidationResult! annotationConfigs(first: Int = 50, last: Int = null, after: String = null, before: String = null): AnnotationConfigConnection! traceRetentionPolicy: ProjectTraceRetentionPolicy! createdAt: DateTime! updatedAt: DateTime! spanCountTimeSeries(timeRange: TimeRange!, timeBinConfig: TimeBinConfig, filterCondition: String): SpanCountTimeSeries! traceCountTimeSeries(timeRange: TimeRange!, timeBinConfig: TimeBinConfig): TraceCountTimeSeries! traceCountByStatusTimeSeries(timeRange: TimeRange!, timeBinConfig: TimeBinConfig): TraceCountByStatusTimeSeries! traceLatencyMsPercentileTimeSeries(timeRange: TimeRange!, timeBinConfig: TimeBinConfig): TraceLatencyPercentileTimeSeries! traceTokenCountTimeSeries(timeRange: TimeRange!, timeBinConfig: TimeBinConfig): TraceTokenCountTimeSeries! traceTokenCostTimeSeries(timeRange: TimeRange!, timeBinConfig: TimeBinConfig): TraceTokenCostTimeSeries! spanAnnotationScoreTimeSeries(timeRange: TimeRange!, timeBinConfig: TimeBinConfig): SpanAnnotationScoreTimeSeries! topModelsByCost(timeRange: TimeRange!): [GenerativeModel!]! topModelsByTokenCount(timeRange: TimeRange!): [GenerativeModel!]! } enum ProjectColumn { name endTime } """A connection to a list of items.""" type ProjectConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [ProjectEdge!]! } """An edge in a connection.""" type ProjectEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: Project! } """The filter key and value for project connections""" input ProjectFilter { col: ProjectFilterColumn! value: String! } enum ProjectFilterColumn { name } type ProjectMutationPayload { project: Project! query: Query! } type ProjectSession implements Node { """The Globally Unique ID of this object""" id: ID! sessionId: String! startTime: DateTime! endTime: DateTime! project: Project! numTraces: Int! numTracesWithError: Int! firstInput: SpanIOValue lastOutput: SpanIOValue tokenUsage: TokenUsage! traces(first: Int = 50, last: Int, after: String, before: String): TraceConnection! traceLatencyMsQuantile(probability: Float!): Float costSummary: SpanCostSummary! costDetailSummaryEntries: [SpanCostDetailSummaryEntry!]! sessionAnnotations: [ProjectSessionAnnotation!]! """Summarizes each annotation (by name) associated with the session""" sessionAnnotationSummaries(filter: AnnotationFilter = null): [AnnotationSummary!]! } enum ProjectSessionAnnoAttr { score label } input ProjectSessionAnnoResultKey { name: String! attr: ProjectSessionAnnoAttr! } type ProjectSessionAnnotation implements Node { """The Globally Unique ID of this object""" id: ID! name: String! annotatorKind: AnnotatorKind! label: String score: Float explanation: String metadata: JSON! identifier: String! source: AnnotationSource! projectSessionId: ID! user: User } type ProjectSessionAnnotationMutationPayload { projectSessionAnnotation: ProjectSessionAnnotation! query: Query! } enum ProjectSessionColumn { startTime endTime tokenCountTotal numTraces costTotal } """A connection to a list of items.""" type ProjectSessionConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [ProjectSessionEdge!]! } """An edge in a connection.""" type ProjectSessionEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: ProjectSession! } """The sort key and direction for ProjectSession connections.""" input ProjectSessionSort { col: ProjectSessionColumn annoResultKey: ProjectSessionAnnoResultKey dir: SortDir! } """The sort key and direction for project connections""" input ProjectSort { col: ProjectColumn! dir: SortDir! } type ProjectTraceRetentionPolicy implements Node { """The Globally Unique ID of this object""" id: ID! name: String! cronExpression: CronExpression! rule: TraceRetentionRule! projects(first: Int = 100, last: Int, after: String, before: String): ProjectConnection! } """A connection to a list of items.""" type ProjectTraceRetentionPolicyConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [ProjectTraceRetentionPolicyEdge!]! } """An edge in a connection.""" type ProjectTraceRetentionPolicyEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: ProjectTraceRetentionPolicy! } type ProjectTraceRetentionPolicyMutationPayload { query: Query! node: ProjectTraceRetentionPolicy! } input ProjectTraceRetentionRuleInput @oneOf { maxDays: ProjectTraceRetentionRuleMaxDaysInput maxCount: ProjectTraceRetentionRuleMaxCountInput maxDaysOrCount: ProjectTraceRetentionRuleMaxDaysOrCountInput } input ProjectTraceRetentionRuleMaxCountInput { maxCount: Int! } input ProjectTraceRetentionRuleMaxDaysInput { maxDays: Float! } input ProjectTraceRetentionRuleMaxDaysOrCountInput { maxDays: Float! maxCount: Int! } type Prompt implements Node { """The Globally Unique ID of this object""" id: ID! sourcePromptId: ID name: Identifier! description: String createdAt: DateTime! version(versionId: ID = null, tagName: Identifier = null): PromptVersion! versionTags: [PromptVersionTag!]! promptVersions(first: Int = 50, last: Int, after: String, before: String): PromptVersionConnection! sourcePrompt: Prompt labels: [PromptLabel!]! } type PromptChatTemplate { messages: [PromptMessage!]! } input PromptChatTemplateInput { messages: [PromptMessageInput!]! } """A connection to a list of items.""" type PromptConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [PromptEdge!]! } """An edge in a connection.""" type PromptEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: Prompt! } """The filter key and value for prompt connections""" input PromptFilter { col: PromptFilterColumn! value: String! } enum PromptFilterColumn { name } type PromptLabel implements Node { """The Globally Unique ID of this object""" id: ID! name: Identifier! description: String color: String! } type PromptLabelAssociationMutationPayload { query: Query! } """A connection to a list of items.""" type PromptLabelConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [PromptLabelEdge!]! } type PromptLabelDeleteMutationPayload { deletedPromptLabelIds: [ID!]! query: Query! } """An edge in a connection.""" type PromptLabelEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: PromptLabel! } type PromptLabelMutationPayload { promptLabels: [PromptLabel!]! query: Query! } type PromptMessage { role: PromptMessageRole! content: [ContentPart!]! } input PromptMessageInput { role: String! content: [ContentPartInput!]! } enum PromptMessageRole { USER SYSTEM AI TOOL } type PromptResponse { """The prompt submitted to the LLM""" prompt: String """The response generated by the LLM""" response: String } type PromptStringTemplate { template: String! } union PromptTemplate = PromptStringTemplate | PromptChatTemplate enum PromptTemplateFormat { MUSTACHE F_STRING NONE } input PromptTemplateOptions { variables: JSON! format: PromptTemplateFormat! } enum PromptTemplateType { STRING CHAT } type PromptVersion implements Node { """The Globally Unique ID of this object""" id: ID! description: String templateType: PromptTemplateType! templateFormat: PromptTemplateFormat! template: PromptTemplate! invocationParameters: JSON tools: [ToolDefinition!]! responseFormat: ResponseFormat modelName: String! modelProvider: ModelProvider! metadata: JSON! createdAt: DateTime! tags: [PromptVersionTag!]! user: User previousVersion: PromptVersion """ Sequence number (1-based) of prompt versions belonging to the same prompt """ sequenceNumber: Int! } """A connection to a list of items.""" type PromptVersionConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [PromptVersionEdge!]! } """An edge in a connection.""" type PromptVersionEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: PromptVersion! } type PromptVersionTag implements Node { """The Globally Unique ID of this object""" id: ID! promptVersionId: ID! name: Identifier! description: String user: User } type PromptVersionTagMutationPayload { promptVersionTag: PromptVersionTag prompt: Prompt! query: Query! } type Query { modelProviders: [GenerativeProvider!]! generativeModels: [GenerativeModel!]! playgroundModels(input: ModelsInput = null): [PlaygroundModel!]! modelInvocationParameters(input: ModelsInput = null): [InvocationParameter!]! users(first: Int = 50, last: Int, after: String, before: String): UserConnection! userRoles: [UserRole!]! userApiKeys: [UserApiKey!]! systemApiKeys: [SystemApiKey!]! projects(first: Int = 50, last: Int, after: String, before: String, sort: ProjectSort, filter: ProjectFilter): ProjectConnection! projectsLastUpdatedAt: DateTime datasets(first: Int = 50, last: Int, after: String, before: String, sort: DatasetSort, filter: DatasetFilter): DatasetConnection! datasetsLastUpdatedAt: DateTime compareExperiments(baseExperimentId: ID!, compareExperimentIds: [ID!]!, first: Int = 50, after: String, filterCondition: String): ExperimentComparisonConnection! experimentRunMetricComparisons(baseExperimentId: ID!, compareExperimentIds: [ID!]!): ExperimentRunMetricComparisons! validateExperimentRunFilterCondition(condition: String!, experimentIds: [ID!]!): ValidationResult! functionality: Functionality! model: InferenceModel! node(id: ID!): Node! viewer: User prompts(first: Int = 50, last: Int, after: String, before: String, filter: PromptFilter, labelIds: [ID!]): PromptConnection! promptLabels(first: Int = 50, last: Int, after: String, before: String): PromptLabelConnection! datasetLabels(first: Int = 50, last: Int, after: String, before: String): DatasetLabelConnection! datasetSplits(first: Int = 50, last: Int, after: String, before: String): DatasetSplitConnection! annotationConfigs(first: Int = 50, last: Int = null, after: String = null, before: String = null): AnnotationConfigConnection! clusters(clusters: [ClusterInput!]!): [Cluster!]! hdbscanClustering( """Event ID of the coordinates""" eventIds: [ID!]! """Point coordinates. Must be either 2D or 3D.""" coordinates2d: [InputCoordinate2D!] """Point coordinates. Must be either 2D or 3D.""" coordinates3d: [InputCoordinate3D!] """HDBSCAN minimum cluster size""" minClusterSize: Int! = 10 """HDBSCAN minimum samples""" clusterMinSamples: Int! = 1 """HDBSCAN cluster selection epsilon""" clusterSelectionEpsilon: Float! = 0 ): [Cluster!]! defaultProjectTraceRetentionPolicy: ProjectTraceRetentionPolicy! projectTraceRetentionPolicies(first: Int = 100, last: Int, after: String, before: String): ProjectTraceRetentionPolicyConnection! """ The allocated storage capacity of the database in bytes. Return None if this information is unavailable. """ dbStorageCapacityBytes: Float dbTableStats: [DbTableStats!]! serverStatus: ServerStatus! validateRegularExpression(regex: String!): ValidationResult! getSpanByOtelId(spanId: String!): Span getTraceByOtelId(traceId: String!): Trace getProjectSessionById(sessionId: String!): ProjectSession } input RemoveAnnotationConfigFromProjectInput { projectId: ID! annotationConfigId: ID! } type RemoveAnnotationConfigFromProjectPayload { query: Query! project: Project! } input RemoveDatasetExamplesFromDatasetSplitsInput { datasetSplitIds: [ID!]! exampleIds: [ID!]! } type RemoveDatasetExamplesFromDatasetSplitsMutationPayload { query: Query! examples: [DatasetExample!]! } type ResponseFormat { definition: JSON! } input ResponseFormatInput { definition: JSON! } type Retrieval { queryId: ID! documentId: ID! relevance: Float } enum RevisionKind { CREATE PATCH DELETE } enum ScalarDriftMetric { psi klDivergence jsDistance } type Segment { bin: Bin! counts: DatasetValues! } type Segments { segments: [Segment!]! totalCounts: DatasetValues! } type ServerStatus { insufficientStorage: Boolean! } input SetDatasetLabelsInput { datasetLabelIds: [ID!]! datasetIds: [ID!]! } type SetDatasetLabelsMutationPayload { query: Query! } input SetPromptLabelsInput { promptId: ID! promptLabelIds: [ID!]! } input SetPromptVersionTagInput { promptVersionId: ID! name: Identifier! description: String = null } enum SortDir { asc desc } type Span implements Node { """The Globally Unique ID of this object""" id: ID! name: String! statusCode: SpanStatusCode! statusMessage: String! startTime: DateTime! endTime: DateTime latencyMs: Float """the parent span ID. If null, it is a root span""" parentId: ID spanKind: SpanKind! spanId: ID! trace: Trace! context: SpanContext! """Span attributes as a JSON string""" attributes: String! """Metadata as a JSON string""" metadata: String numDocuments: Int tokenCountTotal: Int tokenCountPrompt: Int tokenCountCompletion: Int tokenPromptDetails: TokenCountPromptDetails! input: SpanIOValue output: SpanIOValue events: [SpanEvent!]! """ Cumulative (prompt plus completion) token count from self and all descendant spans (children, grandchildren, etc.) """ cumulativeTokenCountTotal: Int """ Cumulative (prompt) token count from self and all descendant spans (children, grandchildren, etc.) """ cumulativeTokenCountPrompt: Int """ Cumulative (completion) token count from self and all descendant spans (children, grandchildren, etc.) """ cumulativeTokenCountCompletion: Int """ Propagated status code that percolates up error status codes from descendant spans (children, grandchildren, etc.) """ propagatedStatusCode: SpanStatusCode! """ Annotations associated with the span. This encompasses both LLM and human annotations. """ spanAnnotations(sort: SpanAnnotationSort, filter: AnnotationFilter = null): [SpanAnnotation!]! """Notes associated with the span.""" spanNotes: [SpanAnnotation!]! """Summarizes each annotation (by name) associated with the span""" spanAnnotationSummaries(filter: AnnotationFilter = null): [AnnotationSummary!]! """ Evaluations of the documents associated with the span, e.g. if the span is a RETRIEVER with a list of documents in its RETRIEVAL_DOCUMENTS attribute, an evaluation for each document may assess its relevance respect to the input query of the span. Note that RETRIEVAL_DOCUMENTS is a list, and each evaluation is identified by its document's (zero-based) index in that list. """ documentEvaluations: [DocumentAnnotation!]! """Retrieval metrics: NDCG@K, Precision@K, Reciprocal Rank, etc.""" documentRetrievalMetrics(evaluationName: String): [DocumentRetrievalMetrics!]! numChildSpans: Int! """All descendant spans (children, grandchildren, etc.)""" descendants( """ Maximum depth of breadth first search. For example, maxDepth=1 searches for only the immediate child spans (if any); maxDepth=2 searches for the immediate child spans plus their children. maxDepth=0 (or None) means no limit. """ maxDepth: Int = 3 first: Int = 50 last: Int after: String before: String ): SpanConnection! """ The span's attributes translated into an example revision for a dataset """ asExampleRevision: SpanAsExampleRevision! """The project that this span belongs to.""" project: Project! """Indicates if the span is contained in any dataset""" containedInDataset: Boolean! """Invocation parameters for the span""" invocationParameters: [InvocationParameter!]! costSummary: SpanCostSummary costDetailSummaryEntries: [SpanCostDetailSummaryEntry!]! } type SpanAnnotation implements Node & Annotation { """The Globally Unique ID of this object""" id: ID! """Name of the annotation, e.g. 'helpfulness' or 'relevance'.""" name: String! """Value of the annotation in the form of a numeric score.""" score: Float """ Value of the annotation in the form of a string, e.g. 'helpful' or 'not helpful'. Note that the label is not necessarily binary. """ label: String """ The annotator's explanation for the annotation result (i.e. score or label, or both) given to the subject. """ explanation: String """The date and time when the annotation was created.""" createdAt: DateTime! """The date and time when the annotation was last updated.""" updatedAt: DateTime! annotatorKind: AnnotatorKind! metadata: JSON! source: AnnotationSource! identifier: String! spanId: ID! user: User } enum SpanAnnotationColumn { createdAt name } type SpanAnnotationMutationPayload { spanAnnotations: [SpanAnnotation!]! query: Query! } type SpanAnnotationScoreTimeSeries { data: [SpanAnnotationScoreTimeSeriesDataPoint!]! names: [String!]! } type SpanAnnotationScoreTimeSeriesDataPoint { timestamp: DateTime! scoresWithLabels: [SpanAnnotationScoreWithLabel!]! } type SpanAnnotationScoreWithLabel { label: String! score: Float! } """The sort key and direction for SpanAnnotation connections""" input SpanAnnotationSort { col: SpanAnnotationColumn! dir: SortDir! } type SpanAsExampleRevision implements ExampleRevision { input: JSON! output: JSON! metadata: JSON! } enum SpanColumn { startTime endTime latencyMs tokenCountTotal tokenCountPrompt tokenCountCompletion cumulativeTokenCountTotal cumulativeTokenCountPrompt cumulativeTokenCountCompletion cumulativeTokenCostTotal tokenCostTotal } """A connection to a list of items.""" type SpanConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [SpanEdge!]! } type SpanContext { traceId: ID! spanId: ID! } type SpanCostDetailSummaryEntry { tokenType: String! isPrompt: Boolean! value: CostBreakdown! } type SpanCostSummary { prompt: CostBreakdown! completion: CostBreakdown! total: CostBreakdown! } type SpanCountTimeSeries { data: [SpanCountTimeSeriesDataPoint!]! } type SpanCountTimeSeriesDataPoint { timestamp: DateTime! okCount: Int errorCount: Int unsetCount: Int totalCount: Int } """An edge in a connection.""" type SpanEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: Span! } type SpanEvent { name: String! message: String! timestamp: DateTime! } type SpanIOValue { mimeType: MimeType! """Truncated value up to 100 characters, appending '...' if truncated.""" truncatedValue: String! value: String! } enum SpanKind { chain tool llm retriever embedding agent reranker evaluator guardrail unknown } """ The sort key and direction for span connections. Must specify one and only one of either `col` or `evalResultKey`. """ input SpanSort { col: SpanColumn evalResultKey: EvalResultKey dir: SortDir! } enum SpanStatusCode { OK ERROR UNSET } type StringInvocationParameter implements InvocationParameterBase { invocationName: String! canonicalName: CanonicalParameterName label: String! required: Boolean! invocationInputField: InvocationInputField! defaultValue: String } type StringListInvocationParameter implements InvocationParameterBase { invocationName: String! canonicalName: CanonicalParameterName label: String! required: Boolean! invocationInputField: InvocationInputField! defaultValue: [String!] } type Subscription { chatCompletion(input: ChatCompletionInput!): ChatCompletionSubscriptionPayload! chatCompletionOverDataset(input: ChatCompletionOverDatasetInput!): ChatCompletionSubscriptionPayload! } type SystemApiKey implements ApiKey & Node { """Name of the API key.""" name: String! """Description of the API key.""" description: String """The date and time the API key was created.""" createdAt: DateTime! """The date and time the API key will expire.""" expiresAt: DateTime """The Globally Unique ID of this object""" id: ID! } type TextChunk implements ChatCompletionSubscriptionPayload { datasetExampleId: ID repetitionNumber: Int content: String! } type TextContentPart { text: TextContentValue! } type TextContentValue { text: String! } input TextContentValueInput { text: String! } input TimeBinConfig { """The scale of time bins for aggregation.""" scale: TimeBinScale! = HOUR """Offset in minutes from UTC for local time binning.""" utcOffsetMinutes: Int! = 0 } enum TimeBinScale { MINUTE HOUR DAY WEEK MONTH YEAR } input TimeRange { """The start of the time range""" start: DateTime = null """The end of the time range. Right exclusive.""" end: DateTime = null } interface TimeSeries { data: [TimeSeriesDataPoint!]! } type TimeSeriesDataPoint { timestamp: DateTime! value: Float } type TokenCountPromptDetails { cacheRead: Int cacheWrite: Int audio: Int } enum TokenKind { PROMPT COMPLETION } type TokenPrice { tokenType: String! kind: TokenKind! costPerMillionTokens: Float! costPerToken: Float! } input TokenPriceInput { tokenType: String! costPerMillionTokens: Float! kind: TokenKind! } type TokenUsage { prompt: Float! completion: Float! total: Float! } type ToolCallChunk implements ChatCompletionSubscriptionPayload { datasetExampleId: ID repetitionNumber: Int id: String! function: FunctionCallChunk! } type ToolCallContentPart { toolCall: ToolCallContentValue! } type ToolCallContentValue { toolCallId: String! toolCall: ToolCallFunction! } input ToolCallContentValueInput { toolCallId: String! toolCall: ToolCallFunctionInput! } type ToolCallFunction { name: String! arguments: String! } input ToolCallFunctionInput { type: String = "function" name: String! arguments: String! } type ToolDefinition { definition: JSON! } input ToolDefinitionInput { definition: JSON! } type ToolResultContentPart { toolResult: ToolResultContentValue! } type ToolResultContentValue { toolCallId: String! result: JSON! } input ToolResultContentValueInput { toolCallId: String! result: JSON! } type Trace implements Node { """The Globally Unique ID of this object""" id: ID! traceId: ID! startTime: DateTime! endTime: DateTime! latencyMs: Float project: Project! projectId: ID! projectSessionId: ID session: ProjectSession rootSpan: Span numSpans: Int! spans(first: Int = 50, last: Int, after: String, before: String): SpanConnection! """Annotations associated with the trace.""" traceAnnotations(sort: TraceAnnotationSort = null): [TraceAnnotation!]! """Summarizes each annotation (by name) associated with the trace""" traceAnnotationSummaries(filter: AnnotationFilter = null): [AnnotationSummary!]! costSummary: SpanCostSummary! costDetailSummaryEntries: [SpanCostDetailSummaryEntry!]! } type TraceAnnotation implements Node { """The Globally Unique ID of this object""" id: ID! name: String! annotatorKind: AnnotatorKind! label: String score: Float explanation: String metadata: JSON! identifier: String! source: AnnotationSource! trace: Trace! user: User } enum TraceAnnotationColumn { createdAt name } type TraceAnnotationMutationPayload { traceAnnotations: [TraceAnnotation!]! query: Query! } """The sort key and direction for TraceAnnotation connections""" input TraceAnnotationSort { col: TraceAnnotationColumn! dir: SortDir! } """A connection to a list of items.""" type TraceConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [TraceEdge!]! } type TraceCountByStatusTimeSeries { data: [TraceCountByStatusTimeSeriesDataPoint!]! } type TraceCountByStatusTimeSeriesDataPoint { timestamp: DateTime! okCount: Int! errorCount: Int! totalCount: Int! } type TraceCountTimeSeries implements TimeSeries { data: [TimeSeriesDataPoint!]! } """An edge in a connection.""" type TraceEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: Trace! } type TraceLatencyMsPercentileTimeSeriesDataPoint { timestamp: DateTime! p50: Float p75: Float p90: Float p95: Float p99: Float p999: Float max: Float } type TraceLatencyPercentileTimeSeries { data: [TraceLatencyMsPercentileTimeSeriesDataPoint!]! } union TraceRetentionRule = TraceRetentionRuleMaxDays | TraceRetentionRuleMaxCount | TraceRetentionRuleMaxDaysOrCount type TraceRetentionRuleMaxCount { maxCount: Int! } type TraceRetentionRuleMaxDays { maxDays: Float! } type TraceRetentionRuleMaxDaysOrCount { maxDays: Float! maxCount: Int! } type TraceTokenCostTimeSeries { data: [TraceTokenCostTimeSeriesDataPoint!]! } type TraceTokenCostTimeSeriesDataPoint { timestamp: DateTime! promptCost: Float completionCost: Float totalCost: Float } type TraceTokenCountTimeSeries { data: [TraceTokenCountTimeSeriesDataPoint!]! } type TraceTokenCountTimeSeriesDataPoint { timestamp: DateTime! promptTokenCount: Float completionTokenCount: Float totalTokenCount: Float } type UMAPPoint { id: ID! """The ID of the event that the point is a projection of""" eventId: ID! coordinates: Point2DPoint3D! embeddingMetadata: EmbeddingMetadata! eventMetadata: EventMetadata! } type UMAPPoints { data: [UMAPPoint!]! referenceData: [UMAPPoint!]! clusters: [Cluster!]! corpusData: [UMAPPoint!]! contextRetrievals: [Retrieval!]! } input UnsetDatasetLabelsInput { datasetLabelIds: [ID!]! datasetIds: [ID!]! } type UnsetDatasetLabelsMutationPayload { query: Query! } input UnsetPromptLabelsInput { promptId: ID! promptLabelIds: [ID!]! } input UpdateAnnotationConfigInput { id: ID! annotationConfig: AnnotationConfigInput! } type UpdateAnnotationConfigPayload { query: Query! annotationConfig: AnnotationConfig! } input UpdateAnnotationInput { id: ID! name: String! annotatorKind: AnnotatorKind! = HUMAN label: String = null score: Float = null explanation: String = null metadata: JSON! = {} source: AnnotationSource! = APP } input UpdateDatasetLabelInput { datasetLabelId: ID! name: String! description: String = null color: String! } type UpdateDatasetLabelMutationPayload { datasetLabel: DatasetLabel! } input UpdateModelMutationInput { id: ID! name: String! provider: String namePattern: String! costs: [TokenPriceInput!]! startTime: DateTime = null } type UpdateModelMutationPayload { model: GenerativeModel! query: Query! } type User implements Node { """The Globally Unique ID of this object""" id: ID! passwordNeedsReset: Boolean! email: String! username: String! profilePictureUrl: String createdAt: DateTime! authMethod: AuthMethod! role: UserRole! apiKeys: [UserApiKey!]! isManagementUser: Boolean! } type UserApiKey implements ApiKey & Node { """Name of the API key.""" name: String! """Description of the API key.""" description: String """The date and time the API key was created.""" createdAt: DateTime! """The date and time the API key will expire.""" expiresAt: DateTime """The Globally Unique ID of this object""" id: ID! user: User! } """A connection to a list of items.""" type UserConnection { """Pagination data for this connection""" pageInfo: PageInfo! """Contains the nodes in this connection""" edges: [UserEdge!]! } """An edge in a connection.""" type UserEdge { """A cursor for use in pagination""" cursor: String! """The item at the end of the edge""" node: User! } type UserMutationPayload { user: User! } type UserRole implements Node { """The Globally Unique ID of this object""" id: ID! name: String! } enum UserRoleInput { ADMIN MEMBER VIEWER } type ValidationResult { isValid: Boolean! errorMessage: String } enum VectorDriftMetric { euclideanDistance } """Represents NULL values""" scalar Void

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Arize-ai/phoenix'

If you have feedback or need assistance with the MCP directory API, please join our Discord server