Skip to main content
Glama

FastMCP

by Dev00355
run_server.py•1.76 kB
#!/usr/bin/env python3 """ CLI script to run the FastMCP server """ import asyncio import logging import sys from pathlib import Path import click from config import MCPConfig # Add the current directory to Python path sys.path.insert(0, str(Path(__file__).parent)) def setup_logging(level: str = "INFO"): """Setup logging configuration""" logging.basicConfig( level=getattr(logging, level.upper()), format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', handlers=[ logging.StreamHandler(sys.stderr), logging.FileHandler('mcp_server.log') ] ) @click.command() @click.option('--llm-url', default=None, help='URL of the local LLM service') @click.option('--log-level', default='INFO', help='Logging level') @click.option('--server-name', default=None, help='MCP server name') def main(llm_url: str, log_level: str, server_name: str): """Run the FastMCP server""" # Override config if provided if llm_url: MCPConfig.LOCAL_LLM_SERVICE_URL = llm_url if server_name: MCPConfig.SERVER_NAME = server_name # Setup logging setup_logging(log_level) logger = logging.getLogger(__name__) logger.info(f"Starting {MCPConfig.SERVER_NAME} v{MCPConfig.SERVER_VERSION}") logger.info(f"LLM Service URL: {MCPConfig.LOCAL_LLM_SERVICE_URL}") logger.info(f"Log Level: {log_level}") try: # Import and run the MCP server from mcp_server import main as run_mcp_server asyncio.run(run_mcp_server()) except KeyboardInterrupt: logger.info("Server stopped by user") except Exception as e: logger.error(f"Server error: {e}") sys.exit(1) if __name__ == "__main__": main()

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Dev00355/custom-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server