Skip to main content
Glama

Formula One MCP Server

f1AgentEndpoint.ts3.93 kB
��// F1 Agent Endpoint for Firebase Integration // Exposes your F1 agent as an LLM endpoint that Firebase can call import { llmService } from '../services/llmService'; import { callF1ToolsRecursive } from '../utils/toolCaller'; export interface ChatMessage { role: 'user' | 'model'; content: string; } export interface ChatRequest { model: string; messages: ChatMessage[]; stream?: boolean; } export interface ChatResponse { message: { content: string; }; } /** * F1 Agent Chat Endpoint * Handles chat requests from Firebase and processes them with F1 agent */ export async function handleF1AgentChat(request: ChatRequest): Promise<ChatResponse> { try { // Extract the latest user message const userMessages = request.messages.filter(msg => msg.role === 'user'); const latestUserMessage = userMessages[userMessages.length - 1]; if (!latestUserMessage) { throw new Error('No user message found in request'); } console.log('>�� F1 Agent processing:', latestUserMessage.content); // Process with F1 agent using LLM service const queryPlan = await llmService.parseQueryIntelligently(latestUserMessage.content); // Call F1 tools (now recursive) const toolResult = await callF1ToolsRecursive(queryPlan); // Synthesize response const agentResponse = await llmService.synthesizeResponse( latestUserMessage.content, toolResult, queryPlan.tool ); console.log('' F1 Agent response:', agentResponse); return { message: { content: agentResponse } }; } catch (error) { console.error('L' F1 Agent error:', error); return { message: { content: 'L' Sorry, I encountered an error processing your F1 query. Please try again.' } }; } } // Removed duplicate functions - now using shared utility from ../utils/toolCaller

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/notsedano/f1-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server