Skip to main content
Glama

Prompt Auto-Optimizer MCP

by sloth-wq
integration-patterns.md33.3 kB
# Integration Patterns ## Overview This guide covers various patterns for integrating GEPA with different systems, frameworks, and workflows. These patterns enable GEPA to work seamlessly with existing infrastructure and applications. ## MCP Server Integration ### Basic MCP Server Setup ```typescript import { Server } from '@modelcontextprotocol/sdk/server/index.js'; import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; import { CallToolRequestSchema } from '@modelcontextprotocol/sdk/types.js'; import { GepaOrchestrator } from './gepa-orchestrator.js'; class GepaMCPServer { private server: Server; private gepaOrchestrator: GepaOrchestrator; constructor() { this.server = new Server( { name: 'gepa-server', version: '1.0.0' }, { capabilities: { tools: {} } } ); this.gepaOrchestrator = new GepaOrchestrator(); this.setupTools(); } private setupTools(): void { // Evolution tools this.server.setRequestHandler(CallToolRequestSchema, async (request) => { const { name, arguments: args } = request.params; switch (name) { case 'optimize_prompt': return this.handleOptimizePrompt(args); case 'evaluate_candidate': return this.handleEvaluateCandidate(args); case 'get_optimization_status': return this.handleGetOptimizationStatus(args); default: throw new Error(`Unknown tool: ${name}`); } }); } private async handleOptimizePrompt(args: any) { const { taskDescription, seedPrompt, config } = args; const result = await this.gepaOrchestrator.startOptimization({ taskDescription, seedPrompt, config }); return { content: [ { type: 'text', text: JSON.stringify(result, null, 2) } ] }; } async start(): Promise<void> { const transport = new StdioServerTransport(); await this.server.connect(transport); } } ``` ### Tool Definitions ```typescript const GEPA_TOOLS = [ { name: 'optimize_prompt', description: 'Optimize a prompt using genetic evolutionary algorithms', inputSchema: { type: 'object', properties: { taskDescription: { type: 'string', description: 'Description of the task for optimization' }, seedPrompt: { type: 'string', description: 'Initial prompt to optimize' }, config: { type: 'object', properties: { maxGenerations: { type: 'number', default: 10 }, populationSize: { type: 'number', default: 20 }, mutationRate: { type: 'number', default: 0.4 } } } }, required: ['taskDescription', 'seedPrompt'] } }, { name: 'evaluate_candidate', description: 'Evaluate a specific prompt candidate', inputSchema: { type: 'object', properties: { prompt: { type: 'string' }, taskContext: { type: 'object' }, metrics: { type: 'array', items: { type: 'string' } } }, required: ['prompt', 'taskContext'] } }, { name: 'get_pareto_frontier', description: 'Get current Pareto frontier candidates', inputSchema: { type: 'object', properties: { includeMetrics: { type: 'boolean', default: true }, format: { type: 'string', enum: ['summary', 'detailed'], default: 'summary' } } } } ]; ``` ## Web Framework Integration ### Express.js REST API ```typescript import express from 'express'; import { GepaService } from './gepa-service.js'; import { validateOptimizationRequest } from './validators.js'; import { authMiddleware } from './middleware/auth.js'; import { rateLimitMiddleware } from './middleware/rate-limit.js'; const app = express(); const gepaService = new GepaService(); app.use(express.json()); app.use(authMiddleware); app.use('/api/gepa', rateLimitMiddleware); // Start optimization app.post('/api/gepa/optimize', validateOptimizationRequest, async (req, res) => { try { const { taskDescription, seedPrompt, config } = req.body; const optimizationId = await gepaService.startOptimization({ taskDescription, seedPrompt, config, userId: req.user.id }); res.status(202).json({ success: true, optimizationId, status: 'started', estimatedCompletion: Date.now() + (config.maxGenerations * 30000) }); } catch (error) { res.status(400).json({ success: false, error: error.message }); } }); // Get optimization status app.get('/api/gepa/optimize/:id', async (req, res) => { try { const status = await gepaService.getOptimizationStatus(req.params.id); res.json({ success: true, ...status }); } catch (error) { res.status(404).json({ success: false, error: 'Optimization not found' }); } }); // Get results app.get('/api/gepa/optimize/:id/results', async (req, res) => { try { const results = await gepaService.getOptimizationResults(req.params.id); res.json({ success: true, results }); } catch (error) { res.status(404).json({ success: false, error: 'Results not available' }); } }); // WebSocket for real-time updates import { WebSocketServer } from 'ws'; import { createServer } from 'http'; const server = createServer(app); const wss = new WebSocketServer({ server }); wss.on('connection', (ws, req) => { const optimizationId = new URL(req.url!, 'http://localhost').searchParams.get('id'); if (optimizationId) { gepaService.subscribeToUpdates(optimizationId, (update) => { ws.send(JSON.stringify(update)); }); } }); ``` ### Next.js API Routes ```typescript // pages/api/gepa/optimize.ts import { NextApiRequest, NextApiResponse } from 'next'; import { GepaService } from '../../../lib/gepa-service'; import { withAuth } from '../../../lib/auth'; const gepaService = new GepaService(); async function handler(req: NextApiRequest, res: NextApiResponse) { if (req.method !== 'POST') { return res.status(405).json({ error: 'Method not allowed' }); } try { const { taskDescription, seedPrompt, config } = req.body; const result = await gepaService.optimizePrompt({ taskDescription, seedPrompt, config, userId: req.user.id }); res.status(200).json({ success: true, data: result }); } catch (error) { res.status(500).json({ success: false, error: error.message }); } } export default withAuth(handler); ``` ## Database Integration ### MongoDB Integration ```typescript import { MongoClient, Collection } from 'mongodb'; import { TrajectoryStore, ExecutionTrajectory, TrajectoryFilter } from '../types/gepa.js'; export class MongoTrajectoryStore implements TrajectoryStore { private client: MongoClient; private collection: Collection<ExecutionTrajectory>; constructor(connectionString: string, dbName: string) { this.client = new MongoClient(connectionString); } async connect(): Promise<void> { await this.client.connect(); const db = this.client.db('gepa'); this.collection = db.collection('trajectories'); // Create indexes for performance await this.createIndexes(); } async save(trajectory: ExecutionTrajectory): Promise<void> { await this.collection.insertOne(trajectory); } async query(filter: TrajectoryFilter): Promise<ExecutionTrajectory[]> { const mongoFilter = this.buildMongoFilter(filter); const cursor = this.collection.find(mongoFilter); if (filter.limit) { cursor.limit(filter.limit); } if (filter.sortBy) { cursor.sort({ [filter.sortBy]: filter.sortOrder === 'desc' ? -1 : 1 }); } return cursor.toArray(); } async load(id: string): Promise<ExecutionTrajectory | null> { return this.collection.findOne({ id }); } private async createIndexes(): Promise<void> { await Promise.all([ this.collection.createIndex({ promptId: 1 }), this.collection.createIndex({ taskId: 1 }), this.collection.createIndex({ startTime: 1 }), this.collection.createIndex({ 'finalResult.success': 1 }), this.collection.createIndex({ promptId: 1, startTime: -1 }) // Compound index ]); } private buildMongoFilter(filter: TrajectoryFilter): any { const mongoFilter: any = {}; if (filter.promptId) { mongoFilter.promptId = filter.promptId; } if (filter.taskId) { mongoFilter.taskId = filter.taskId; } if (filter.timeRange) { mongoFilter.startTime = { $gte: filter.timeRange.start, $lte: filter.timeRange.end }; } if (filter.successOnly) { mongoFilter['finalResult.success'] = true; } return mongoFilter; } } ``` ### PostgreSQL Integration ```typescript import { Pool, PoolClient } from 'pg'; import { TrajectoryStore, ExecutionTrajectory, TrajectoryFilter } from '../types/gepa.js'; export class PostgreSQLTrajectoryStore implements TrajectoryStore { private pool: Pool; constructor(config: any) { this.pool = new Pool(config); } async initialize(): Promise<void> { await this.createTables(); await this.createIndexes(); } async save(trajectory: ExecutionTrajectory): Promise<void> { const client = await this.pool.connect(); try { await client.query('BEGIN'); // Insert trajectory await client.query(` INSERT INTO trajectories (id, prompt_id, task_id, start_time, end_time, final_result) VALUES ($1, $2, $3, $4, $5, $6) `, [ trajectory.id, trajectory.promptId, trajectory.taskId, trajectory.startTime, trajectory.endTime, JSON.stringify(trajectory.finalResult) ]); // Insert steps for (const [index, step] of trajectory.steps.entries()) { await client.query(` INSERT INTO trajectory_steps (trajectory_id, step_index, action, input, output, timestamp, duration, success, error) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) `, [ trajectory.id, index, step.action, step.input, step.output, step.timestamp, step.duration, step.success, step.error ]); } await client.query('COMMIT'); } catch (error) { await client.query('ROLLBACK'); throw error; } finally { client.release(); } } async query(filter: TrajectoryFilter): Promise<ExecutionTrajectory[]> { const { query, params } = this.buildQuery(filter); const result = await this.pool.query(query, params); // Load steps for each trajectory const trajectories = await Promise.all( result.rows.map(row => this.loadTrajectoryWithSteps(row)) ); return trajectories; } private async createTables(): Promise<void> { await this.pool.query(` CREATE TABLE IF NOT EXISTS trajectories ( id VARCHAR(255) PRIMARY KEY, prompt_id VARCHAR(255) NOT NULL, task_id VARCHAR(255) NOT NULL, start_time BIGINT NOT NULL, end_time BIGINT NOT NULL, final_result JSONB NOT NULL, created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ) `); await this.pool.query(` CREATE TABLE IF NOT EXISTS trajectory_steps ( id SERIAL PRIMARY KEY, trajectory_id VARCHAR(255) NOT NULL REFERENCES trajectories(id) ON DELETE CASCADE, step_index INTEGER NOT NULL, action VARCHAR(255) NOT NULL, input TEXT, output TEXT, timestamp BIGINT NOT NULL, duration INTEGER, success BOOLEAN, error TEXT ) `); } } ``` ## Message Queue Integration ### Redis Queue Integration ```typescript import { Queue, Worker } from 'bullmq'; import Redis from 'ioredis'; import { GepaOrchestrator } from './gepa-orchestrator.js'; export class GepaQueueService { private optimizationQueue: Queue; private evaluationQueue: Queue; private redis: Redis; private gepaOrchestrator: GepaOrchestrator; constructor(redisConfig: any) { this.redis = new Redis(redisConfig); this.optimizationQueue = new Queue('gepa-optimization', { connection: this.redis }); this.evaluationQueue = new Queue('gepa-evaluation', { connection: this.redis }); this.gepaOrchestrator = new GepaOrchestrator(); this.setupWorkers(); } async queueOptimization(request: OptimizationRequest): Promise<string> { const job = await this.optimizationQueue.add('optimize', request, { priority: request.priority || 5, attempts: 3, backoff: { type: 'exponential', delay: 2000 } }); return job.id!; } async queueEvaluation(request: EvaluationRequest): Promise<string> { const job = await this.evaluationQueue.add('evaluate', request, { priority: 10, // Higher priority for evaluations attempts: 5 }); return job.id!; } private setupWorkers(): void { // Optimization worker new Worker('gepa-optimization', async (job) => { const { taskDescription, seedPrompt, config } = job.data; try { // Update job progress await job.updateProgress(0); const result = await this.gepaOrchestrator.startOptimization({ taskDescription, seedPrompt, config, progressCallback: (progress) => { job.updateProgress(progress.percentage); } }); await job.updateProgress(100); return result; } catch (error) { throw new Error(`Optimization failed: ${error.message}`); } }, { connection: this.redis, concurrency: 2 // Limit concurrent optimizations }); // Evaluation worker new Worker('gepa-evaluation', async (job) => { const { prompt, taskContext, metrics } = job.data; const result = await this.gepaOrchestrator.evaluatePrompt({ prompt, taskContext, metrics }); return result; }, { connection: this.redis, concurrency: 10 // Higher concurrency for evaluations }); } async getJobStatus(jobId: string): Promise<JobStatus> { const optimizationJob = await this.optimizationQueue.getJob(jobId); const evaluationJob = await this.evaluationQueue.getJob(jobId); const job = optimizationJob || evaluationJob; if (!job) { throw new Error('Job not found'); } return { id: job.id!, status: await job.getState(), progress: job.progress, data: job.data, result: job.returnvalue, error: job.failedReason }; } } ``` ### AWS SQS Integration ```typescript import { SQSClient, SendMessageCommand, ReceiveMessageCommand, DeleteMessageCommand } from '@aws-sdk/client-sqs'; import { GepaProcessor } from './gepa-processor.js'; export class GepaSQSService { private sqsClient: SQSClient; private queueUrl: string; private processor: GepaProcessor; private isProcessing = false; constructor(awsConfig: any, queueUrl: string) { this.sqsClient = new SQSClient(awsConfig); this.queueUrl = queueUrl; this.processor = new GepaProcessor(); } async sendOptimizationRequest(request: OptimizationRequest): Promise<void> { const message = { type: 'optimization', data: request, timestamp: Date.now(), id: this.generateId() }; await this.sqsClient.send(new SendMessageCommand({ QueueUrl: this.queueUrl, MessageBody: JSON.stringify(message), MessageAttributes: { Type: { DataType: 'String', StringValue: 'optimization' }, Priority: { DataType: 'Number', StringValue: request.priority?.toString() || '5' } } })); } async startProcessing(): Promise<void> { this.isProcessing = true; while (this.isProcessing) { try { const messages = await this.receiveMessages(); if (messages.length === 0) { await this.sleep(5000); // Wait 5 seconds before polling again continue; } await Promise.all(messages.map(message => this.processMessage(message))); } catch (error) { console.error('Error processing messages:', error); await this.sleep(10000); // Wait longer on error } } } stopProcessing(): void { this.isProcessing = false; } private async receiveMessages(): Promise<any[]> { const response = await this.sqsClient.send(new ReceiveMessageCommand({ QueueUrl: this.queueUrl, MaxNumberOfMessages: 10, WaitTimeSeconds: 20, // Long polling VisibilityTimeoutSeconds: 300 // 5 minutes to process })); return response.Messages || []; } private async processMessage(message: any): Promise<void> { try { const body = JSON.parse(message.Body); switch (body.type) { case 'optimization': await this.processor.processOptimization(body.data); break; case 'evaluation': await this.processor.processEvaluation(body.data); break; default: console.warn(`Unknown message type: ${body.type}`); } // Delete message after successful processing await this.sqsClient.send(new DeleteMessageCommand({ QueueUrl: this.queueUrl, ReceiptHandle: message.ReceiptHandle })); } catch (error) { console.error('Error processing message:', error); // Message will be returned to queue due to visibility timeout } } } ``` ## Workflow Integration ### GitHub Actions Integration ```yaml # .github/workflows/gepa-optimization.yml name: GEPA Prompt Optimization on: workflow_dispatch: inputs: task_description: description: 'Task description for optimization' required: true type: string seed_prompt: description: 'Initial prompt to optimize' required: true type: string max_generations: description: 'Maximum generations' required: false default: '10' type: string jobs: optimize: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: Setup Node.js uses: actions/setup-node@v3 with: node-version: '18' cache: 'npm' - name: Install dependencies run: npm ci - name: Run GEPA optimization run: | node -e " const { GepaOrchestrator } = require('./dist/gepa-orchestrator.js'); async function run() { const orchestrator = new GepaOrchestrator(); const result = await orchestrator.startOptimization({ taskDescription: '${{ github.event.inputs.task_description }}', seedPrompt: '${{ github.event.inputs.seed_prompt }}', config: { maxGenerations: parseInt('${{ github.event.inputs.max_generations }}') } }); console.log('Optimization completed:'); console.log(JSON.stringify(result, null, 2)); // Save results to file require('fs').writeFileSync('optimization-results.json', JSON.stringify(result, null, 2)); } run().catch(console.error); " - name: Upload results uses: actions/upload-artifact@v3 with: name: optimization-results path: optimization-results.json - name: Create Issue with Results uses: actions/github-script@v6 with: script: | const fs = require('fs'); const results = JSON.parse(fs.readFileSync('optimization-results.json', 'utf8')); const body = ` ## GEPA Optimization Results **Task**: ${{ github.event.inputs.task_description }} **Best Prompt** (Score: ${results.bestPrompt.averageScore.toFixed(3)}): \`\`\` ${results.bestPrompt.content} \`\`\` **Statistics**: - Generations: ${results.generations} - Convergence: ${results.convergenceAchieved} - Total Rollouts: ${results.totalRollouts} **Deployment Ready**: ${results.bestPrompt.averageScore > 0.8 ? '✅ Yes' : '❌ Needs more optimization'} `; await github.rest.issues.create({ owner: context.repo.owner, repo: context.repo.repo, title: 'GEPA Optimization Results', body: body, labels: ['gepa', 'optimization-results'] }); ``` ### Jenkins Pipeline Integration ```groovy pipeline { agent any parameters { string(name: 'TASK_DESCRIPTION', description: 'Task description for optimization') text(name: 'SEED_PROMPT', description: 'Initial prompt to optimize') choice(name: 'ENVIRONMENT', choices: ['development', 'staging', 'production'], description: 'Target environment') booleanParam(name: 'AUTO_DEPLOY', defaultValue: false, description: 'Auto-deploy if score > 0.8') } environment { GEPA_API_KEY = credentials('gepa-api-key') NODE_VERSION = '18' } stages { stage('Setup') { steps { nodejs(nodeJSInstallationName: "${NODE_VERSION}") { sh 'npm ci' sh 'npm run build' } } } stage('Optimize Prompt') { steps { script { nodejs(nodeJSInstallationName: "${NODE_VERSION}") { def optimizationScript = """ const { GepaOrchestrator } = require('./dist/gepa-orchestrator.js'); async function optimize() { const orchestrator = new GepaOrchestrator({ environment: '${params.ENVIRONMENT}' }); const result = await orchestrator.startOptimization({ taskDescription: '${params.TASK_DESCRIPTION}', seedPrompt: \`${params.SEED_PROMPT}\`, config: { maxGenerations: 15, populationSize: 25 } }); console.log(JSON.stringify(result, null, 2)); // Save for later stages require('fs').writeFileSync('results.json', JSON.stringify(result)); return result; } optimize().catch(process.exit); """ sh "node -e \"${optimizationScript}\"" } } } } stage('Validate Results') { steps { script { def results = readJSON file: 'results.json' def score = results.bestPrompt.averageScore echo "Optimization Score: ${score}" if (score < 0.7) { currentBuild.result = 'UNSTABLE' echo "Warning: Low optimization score (${score})" } // Store results for downstream jobs env.OPTIMIZATION_SCORE = score.toString() env.BEST_PROMPT = results.bestPrompt.content } } } stage('Deploy') { when { expression { return params.AUTO_DEPLOY && env.OPTIMIZATION_SCORE.toDouble() > 0.8 } } steps { script { // Deploy optimized prompt sh """ curl -X POST ${DEPLOYMENT_ENDPOINT} \\ -H "Authorization: Bearer ${GEPA_API_KEY}" \\ -H "Content-Type: application/json" \\ -d '{ "prompt": "${env.BEST_PROMPT}", "score": ${env.OPTIMIZATION_SCORE}, "environment": "${params.ENVIRONMENT}" }' """ } } } stage('Notify') { steps { script { def message = """ GEPA Optimization Complete Task: ${params.TASK_DESCRIPTION} Score: ${env.OPTIMIZATION_SCORE} Environment: ${params.ENVIRONMENT} Auto-deployed: ${params.AUTO_DEPLOY && env.OPTIMIZATION_SCORE.toDouble() > 0.8} Build: ${env.BUILD_URL} """ // Send to Slack/Teams/Discord slackSend( channel: '#gepa-notifications', message: message, color: env.OPTIMIZATION_SCORE.toDouble() > 0.8 ? 'good' : 'warning' ) } } } } post { always { archiveArtifacts artifacts: 'results.json', allowEmptyArchive: true publishHTML([ allowMissing: false, alwaysLinkToLastBuild: true, keepAll: true, reportDir: '.', reportFiles: 'results.json', reportName: 'GEPA Results' ]) } failure { emailext( subject: "GEPA Optimization Failed: ${params.TASK_DESCRIPTION}", body: "Optimization failed. Check build logs: ${env.BUILD_URL}", to: "${env.CHANGE_AUTHOR_EMAIL}" ) } } } ``` ## Cloud Platform Integration ### AWS Lambda Integration ```typescript import { Handler, APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda'; import { GepaOrchestrator } from './gepa-orchestrator.js'; import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; import { S3Client } from '@aws-sdk/client-s3'; const gepaOrchestrator = new GepaOrchestrator({ dynamoClient: new DynamoDBClient({}), s3Client: new S3Client({}), isLambda: true }); export const optimize: Handler<APIGatewayProxyEvent, APIGatewayProxyResult> = async (event) => { try { const { taskDescription, seedPrompt, config } = JSON.parse(event.body || '{}'); // For long-running optimizations, use Step Functions if (config?.maxGenerations > 5) { return await startStepFunction(event); } // Quick optimizations can run in Lambda const result = await gepaOrchestrator.startOptimization({ taskDescription, seedPrompt, config: { ...config, timeout: 14 * 60 * 1000 // Lambda timeout limit } }); return { statusCode: 200, headers: { 'Content-Type': 'application/json', 'Access-Control-Allow-Origin': '*' }, body: JSON.stringify({ success: true, data: result }) }; } catch (error) { console.error('Optimization error:', error); return { statusCode: 500, headers: { 'Content-Type': 'application/json', 'Access-Control-Allow-Origin': '*' }, body: JSON.stringify({ success: false, error: error.message }) }; } }; async function startStepFunction(event: APIGatewayProxyEvent) { // Implementation for AWS Step Functions integration // for long-running optimizations } ``` ### Google Cloud Functions Integration ```typescript import { HttpFunction } from '@google-cloud/functions-framework'; import { GepaOrchestrator } from './gepa-orchestrator.js'; import { Firestore } from '@google-cloud/firestore'; import { Storage } from '@google-cloud/storage'; const gepaOrchestrator = new GepaOrchestrator({ firestore: new Firestore(), storage: new Storage(), isCloudFunction: true }); export const optimizePrompt: HttpFunction = async (req, res) => { // Enable CORS res.set('Access-Control-Allow-Origin', '*'); res.set('Access-Control-Allow-Methods', 'POST'); res.set('Access-Control-Allow-Headers', 'Content-Type'); if (req.method === 'OPTIONS') { res.status(204).send(''); return; } if (req.method !== 'POST') { res.status(405).send('Method Not Allowed'); return; } try { const { taskDescription, seedPrompt, config } = req.body; const result = await gepaOrchestrator.startOptimization({ taskDescription, seedPrompt, config }); res.status(200).json({ success: true, data: result }); } catch (error) { console.error('Optimization error:', error); res.status(500).json({ success: false, error: error.message }); } }; ``` ## Monitoring and Observability Integration ### Prometheus Metrics ```typescript import { register, Counter, Histogram, Gauge } from 'prom-client'; export class GepaMetrics { private optimizationCounter = new Counter({ name: 'gepa_optimizations_total', help: 'Total number of optimizations started', labelNames: ['status', 'user_id'] }); private optimizationDuration = new Histogram({ name: 'gepa_optimization_duration_seconds', help: 'Duration of optimization processes', labelNames: ['task_category'], buckets: [30, 60, 120, 300, 600, 1200, 3600] }); private activeOptimizations = new Gauge({ name: 'gepa_active_optimizations', help: 'Number of currently running optimizations' }); private candidateEvaluations = new Counter({ name: 'gepa_candidate_evaluations_total', help: 'Total number of candidate evaluations', labelNames: ['strategy'] }); recordOptimizationStart(userId: string): void { this.optimizationCounter.inc({ status: 'started', user_id: userId }); this.activeOptimizations.inc(); } recordOptimizationComplete(userId: string, duration: number, taskCategory: string): void { this.optimizationCounter.inc({ status: 'completed', user_id: userId }); this.activeOptimizations.dec(); this.optimizationDuration.observe({ task_category: taskCategory }, duration); } recordCandidateEvaluation(strategy: string): void { this.candidateEvaluations.inc({ strategy }); } getMetrics(): string { return register.metrics(); } } ``` ### OpenTelemetry Integration ```typescript import { trace, SpanStatusCode } from '@opentelemetry/api'; import { NodeTracerProvider } from '@opentelemetry/sdk-node'; import { Resource } from '@opentelemetry/resources'; import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'; const provider = new NodeTracerProvider({ resource: new Resource({ [SemanticResourceAttributes.SERVICE_NAME]: 'gepa-optimization', [SemanticResourceAttributes.SERVICE_VERSION]: '1.0.0', }), }); const tracer = trace.getTracer('gepa'); export class TracedGepaOrchestrator extends GepaOrchestrator { async startOptimization(params: OptimizationParams): Promise<OptimizationResult> { const span = tracer.startSpan('gepa.optimization', { attributes: { 'gepa.task.description': params.taskDescription, 'gepa.config.max_generations': params.config?.maxGenerations || 10, 'gepa.config.population_size': params.config?.populationSize || 20 } }); try { const result = await super.startOptimization(params); span.setAttributes({ 'gepa.result.generations': result.generations, 'gepa.result.best_score': result.bestPrompt.averageScore, 'gepa.result.convergence_achieved': result.convergenceAchieved }); span.setStatus({ code: SpanStatusCode.OK }); return result; } catch (error) { span.recordException(error); span.setStatus({ code: SpanStatusCode.ERROR, message: error.message }); throw error; } finally { span.end(); } } } ``` This comprehensive integration guide provides patterns for connecting GEPA with various systems and platforms, enabling seamless integration into existing workflows and infrastructure.

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/sloth-wq/prompt-auto-optimizer-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server