Skip to main content
Glama

MCP Server with LLM Integration

by MelaLitho
answer_api.py4.6 kB
#!/usr/bin/env python3 """ Simple Answer API server using your smart search functionality """ from fastapi import FastAPI, HTTPException from pydantic import BaseModel import logging import uvicorn # Setup logging logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) app = FastAPI(title="Smart Search Answer API", version="1.0.0") class AnswerRequest(BaseModel): question: str db: str = "db3" # Default to your local postgres include_schema: bool = True include_sql: bool = True include_semantic: bool = True class AnswerResponse(BaseModel): success: bool question: str database: str answer_markdown: str sql_queries: list = [] semantic_results: list = [] execution_time: float = 0.0 @app.get("/health") async def health_check(): """Health check endpoint""" return {"status": "healthy", "message": "Smart Search Answer API is running"} @app.post("/api/answer", response_model=AnswerResponse) async def answer_question(request: AnswerRequest): """Answer natural language questions using smart search""" try: logger.info(f"Processing question: {request.question}") # Import your router and config from llmDatabaseRouter import LLMDatabaseRouter import sys import os # Add the current directory to the path to find config.py sys.path.insert(0, os.path.dirname(__file__)) from config import Config # Get connection string from your config connection_string = Config.SQLALCHEMY_BINDS[request.db] # Initialize router router = LLMDatabaseRouter(connection_string) # Use the answer_question method (which routes to smart search) result = await router.answer_question( question=request.question, include_schema=request.include_schema ) return AnswerResponse( success=True, question=request.question, database=request.db, **result ) except Exception as e: logger.error(f"Answer API failed: {e}") raise HTTPException(status_code=500, detail=str(e)) @app.post("/api/smart-search") async def smart_search_direct(request: AnswerRequest): """Direct access to smart search functionality""" try: logger.info(f"Smart search question: {request.question}") # Import smart search components from services.smart_search import SmartSearch from services.schema_service import SchemaService from services.sql_service import SQLService from services.semantic_service import SemanticService from services.synthesis_service import SynthesisService from repositories.postgres_repository import PostgresRepository from sqlalchemy import create_engine from config import Config # Setup database connection connection_string = Config.SQLALCHEMY_BINDS[request.db] engine = create_engine(connection_string) # Initialize services postgres_repo = PostgresRepository(engine) schema_service = SchemaService(postgres_repo) sql_service = SQLService(postgres_repo, schema_service) semantic_service = SemanticService(postgres_repo) synthesis_service = SynthesisService() # Initialize smart search smart_search = SmartSearch( schema_service=schema_service, sql_service=sql_service, semantic_service=semantic_service, synthesis_service=synthesis_service ) # Use the answer method result = smart_search.answer( question=request.question, include_sql=request.include_sql, include_semantic=request.include_semantic, include_schema=request.include_schema ) # Convert result to dict if hasattr(result, '__dict__'): result_dict = result.__dict__ else: result_dict = result return { "success": True, "question": request.question, "database": request.db, **result_dict } except Exception as e: logger.error(f"Smart search failed: {e}") raise HTTPException(status_code=500, detail=str(e)) if __name__ == "__main__": print("Starting Smart Search Answer API...") print("API available at: http://localhost:8001") print("Health check: http://localhost:8001/health") print("Interactive docs: http://localhost:8001/docs") uvicorn.run(app, host="0.0.0.0", port=8001)

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/MelaLitho/MCPServer'

If you have feedback or need assistance with the MCP directory API, please join our Discord server