Skip to main content
Glama
ingeno
by ingeno
runtime.py1.47 kB
import logging import os import time from contextlib import contextmanager import uvicorn from awslabs.openapi_mcp_server.api.config import Config from awslabs.openapi_mcp_server.server import create_mcp_server import config as api_config logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @contextmanager def stopwatch(description: str): logger.info(f"start {description}") start = time.time() try: yield finally: elapsed = time.time() - start logger.info(f"end {description}: {elapsed:.3f}s") def get_port_from_environment() -> int: return int(os.getenv("PORT", "8080")) if __name__ == "__main__": with stopwatch("config creation"): # Use pre-downloaded spec (cached at Docker build time for faster cold starts) config = Config( api_name=api_config.API_NAME, api_base_url=api_config.API_BASE_URL, api_spec_path="/var/task/openapi-spec.json", transport="http", auth_type=api_config.AUTH_TYPE, version="0.1.0", ) with stopwatch("MCP server creation"): mcp = create_mcp_server(config) with stopwatch("HTTP app creation"): app = mcp.http_app( path="/", transport="http", stateless_http=True, ) uvicorn.run( app, host="0.0.0.0", port=get_port_from_environment(), log_level="info", )

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ingeno/mcp-openapi-lambda'

If you have feedback or need assistance with the MCP directory API, please join our Discord server