Skip to main content
Glama
ec49ca

MCP Multi-Agent Orchestration Server

by ec49ca
route.ts1.59 kB
import { NextRequest, NextResponse } from 'next/server'; // MCP Server configuration const MCP_SERVER_URL = process.env.MCP_SERVER_URL || 'http://localhost:8000'; export async function POST(request: Request) { try { const { message, selectedDocuments, provider, model } = await request.json(); if (!message) { return NextResponse.json( { error: 'Message is required' }, { status: 400 } ); } // Forward query to MCP server orchestrator endpoint const requestBody: any = { query: message }; if (selectedDocuments && selectedDocuments.length > 0) { requestBody.selected_documents = selectedDocuments; } if (provider) { requestBody.provider = provider; } if (model) { requestBody.model = model; } const response = await fetch(`${MCP_SERVER_URL}/orchestrate`, { method: 'POST', headers: { 'Content-Type': 'application/json', }, body: JSON.stringify(requestBody), }); if (!response.ok) { const errorText = await response.text(); throw new Error(`MCP server error: ${response.status} - ${errorText}`); } const result = await response.json(); return NextResponse.json({ response: result.interpreted_response || result.response || 'No response received', agents_used: result.agents_used || [], internal_results: result.internal_results, external_results: result.external_results, }); } catch (error: any) { console.error('Error:', error); return NextResponse.json( { error: 'Failed to process request', message: error.message || 'Unknown error' }, { status: 500 } ); } }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ec49ca/NLP-project-contract-comparison'

If you have feedback or need assistance with the MCP directory API, please join our Discord server