Skip to main content
Glama
Arize-ai

@arizeai/phoenix-mcp

Official
by Arize-ai
createDocumentRelevanceEvaluator.ts3.11 kB
import { DOCUMENT_RELEVANCE_CLASSIFICATION_EVALUATOR_CONFIG } from "../__generated__/default_templates"; import { CreateClassificationEvaluatorArgs } from "../types/evals"; import { ClassificationEvaluator } from "./ClassificationEvaluator"; import { createClassificationEvaluator } from "./createClassificationEvaluator"; export interface DocumentRelevanceEvaluatorArgs< RecordType extends Record< string, unknown > = DocumentRelevanceEvaluationRecord, > extends Omit< CreateClassificationEvaluatorArgs<RecordType>, "promptTemplate" | "choices" | "optimizationDirection" | "name" > { optimizationDirection?: CreateClassificationEvaluatorArgs<RecordType>["optimizationDirection"]; name?: CreateClassificationEvaluatorArgs<RecordType>["name"]; choices?: CreateClassificationEvaluatorArgs<RecordType>["choices"]; promptTemplate?: CreateClassificationEvaluatorArgs<RecordType>["promptTemplate"]; } /** * A record to be evaluated by the document relevance evaluator. */ export interface DocumentRelevanceEvaluationRecord { input: string; documentText: string; [key: string]: unknown; } /** * Creates a document relevance evaluator function. * * This function returns an evaluator that determines whether a given document text * is relevant to a provided input question. The evaluator uses a classification model * and a prompt template to make its determination. * * @param args - The arguments for creating the document relevance evaluator. * @param args.model - The model to use for classification. * @param args.choices - The possible classification choices (defaults to DOCUMENT_RELEVANCE_CHOICES). * @param args.promptTemplate - The prompt template to use (defaults to DOCUMENT_RELEVANCE_TEMPLATE). * @param args.telemetry - The telemetry to use for the evaluator. * * @returns An evaluator function that takes a {@link DocumentRelevanceExample} and returns a classification result * indicating whether the document is relevant to the input question. * * @example * ```ts * const evaluator = createDocumentRelevanceEvaluator({ model: openai("gpt-4o-mini") }); * const result = await evaluator.evaluate({ * input: "What is the capital of France?", * documentText: "Paris is the capital and most populous city of France.", * }); * console.log(result.label); // "relevant" or "unrelated" * ``` */ export function createDocumentRelevanceEvaluator< RecordType extends Record< string, unknown > = DocumentRelevanceEvaluationRecord, >( args: DocumentRelevanceEvaluatorArgs<RecordType> ): ClassificationEvaluator<RecordType> { const { choices = DOCUMENT_RELEVANCE_CLASSIFICATION_EVALUATOR_CONFIG.choices, promptTemplate = DOCUMENT_RELEVANCE_CLASSIFICATION_EVALUATOR_CONFIG.template, optimizationDirection = DOCUMENT_RELEVANCE_CLASSIFICATION_EVALUATOR_CONFIG.optimizationDirection, name = DOCUMENT_RELEVANCE_CLASSIFICATION_EVALUATOR_CONFIG.name, ...rest } = args; return createClassificationEvaluator<RecordType>({ ...rest, promptTemplate, choices, optimizationDirection, name, }); }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Arize-ai/phoenix'

If you have feedback or need assistance with the MCP directory API, please join our Discord server