"""FastAPI application factory with MCP integration."""
import logging
from contextlib import asynccontextmanager
from typing import AsyncGenerator
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from common.config import settings_factory
from .mcp_server import mcp
# Import tools and widgets to register them (side effects)
from .tools import benchmarking_tool # noqa: F401
from .tools import benchmarking_widget # noqa: F401
logger = logging.getLogger(__name__)
@asynccontextmanager
async def combined_lifespan(app: FastAPI) -> AsyncGenerator[None, None]:
"""Lifespan context manager for the FastAPI app."""
# Log available tools and resources
tools = await mcp.list_tools()
logger.info(f"✓ MCP tools available: {[t.name for t in tools]}")
resources = await mcp.list_resources()
logger.info(f"✓ MCP resources available: {[r.name for r in resources]}")
yield
def create_app() -> FastAPI:
"""Create and configure the FastAPI application."""
config = settings_factory()
# Use SSE app as the main MCP transport (works with both Claude and ChatGPT)
# Mount at root like openai-mcp does
mcp_app = mcp.sse_app()
app = FastAPI(
title="Benchmark MCP Server",
description="MCP Server with Industry Benchmarking - Works with Claude and ChatGPT",
lifespan=combined_lifespan,
)
app.state.config = config
# Configure logging
logging.basicConfig(
level=getattr(logging, config.log_level.upper()),
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
)
# Add CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Health check endpoint
@app.get("/health")
async def health():
return {"status": "healthy"}
# Mount MCP SSE app at root (handles /sse and /messages/)
app.mount("/", mcp_app)
logger.info("✓ FastAPI + MCP Server created!")
logger.info(" Endpoint: /sse")
return app
# Create the app instance
app = create_app()
__all__ = ["app", "mcp"]