Skip to main content
Glama
http_server.py3.32 kB
#!/usr/bin/env python3 """HTTP wrapper for SpaceX STDIO MCP Server - wraps server.py for smithery.ai hosting.""" import os import sys import json import logging import subprocess from typing import Any, Dict from fastapi import FastAPI, Request, Response from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import StreamingResponse import asyncio # Disable logging to avoid interference logging.basicConfig(level=logging.CRITICAL) app = FastAPI() # Configure CORS as required by smithery.ai app.add_middleware( CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"], expose_headers=["mcp-session-id", "mcp-protocol-version"], ) # Store the STDIO subprocess stdio_process = None def start_stdio_server(): """Start the STDIO MCP server as subprocess.""" global stdio_process stdio_process = subprocess.Popen( [sys.executable, "server.py"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, bufsize=1 ) async def forward_to_stdio(request_data: Dict[str, Any]) -> Dict[str, Any]: """Forward MCP request to STDIO server and get response.""" global stdio_process if stdio_process is None or stdio_process.poll() is not None: start_stdio_server() try: # Send request to STDIO server request_json = json.dumps(request_data) + "\n" stdio_process.stdin.write(request_json) stdio_process.stdin.flush() # Read response from STDIO server response_line = stdio_process.stdout.readline() if not response_line: raise Exception("No response from STDIO server") response_data = json.loads(response_line.strip()) return response_data except Exception as e: logging.error(f"Error forwarding to STDIO: {e}") return { "jsonrpc": "2.0", "id": request_data.get("id"), "error": { "code": -32603, "message": f"Internal error: {str(e)}" } } @app.post("/mcp") async def mcp_endpoint(request: Request): """Handle MCP HTTP requests and forward to STDIO server.""" try: request_data = await request.json() response_data = await forward_to_stdio(request_data) return Response( content=json.dumps(response_data), media_type="application/json" ) except Exception as e: logging.error(f"Error in MCP endpoint: {e}") return Response( content=json.dumps({ "jsonrpc": "2.0", "error": { "code": -32700, "message": f"Parse error: {str(e)}" } }), media_type="application/json", status_code=500 ) @app.get("/health") async def health_check(): """Health check endpoint.""" return {"status": "ok", "server": "spacex-mcp"} if __name__ == "__main__": import uvicorn # Start STDIO server subprocess start_stdio_server() # Get port from environment (smithery sets PORT=8081) port = int(os.environ.get("PORT", 8081)) # Start HTTP server uvicorn.run(app, host="0.0.0.0", port=port)

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/rftsngl/SpaceX-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server