Skip to main content
Glama
mcp_server.py4.34 kB
#!/usr/bin/env python3 """ mcp_server.py This file implements a slightly more robust Model Context Protocol (MCP) server. It is meant to be run as a standalone process that communicates with an MCP client (e.g., Claude Desktop) over stdin/stdout using JSON messages. The server: - Performs the MCP handshake - Registers available tools (loaded from schemas.py) - Dispatches tool calls to the actual Python functions (in tools.py) - Returns structured MCP tool results back to the client This is the backend that connects your LLM to your synth controller. """ import sys import json import traceback from typing import Dict, Any, Callable from .schemas import TOOL_SCHEMAS from .tools import TOOL_FUNCTIONS # ------------------------------------------------------------ # Low-level utilities # ------------------------------------------------------------ def _read_message() -> Dict[str, Any]: """ Read a single JSON message from stdin. The MCP client (Claude Desktop) writes newline-delimited JSON. Returns a Python dict. """ line = sys.stdin.readline() if not line: # End-of-file: client disconnected sys.exit(0) try: return json.loads(line) except json.JSONDecodeError as e: print(json.dumps({"type": "error", "message": f"Invalid JSON: {e}"})) return {} def _send_message(obj: Dict[str, Any]) -> None: """ Send a JSON message encoded as one line to stdout. The MCP client expects each message as a single JSON object per line. """ sys.stdout.write(json.dumps(obj) + "\n") sys.stdout.flush() # ------------------------------------------------------------ # Handshake # ------------------------------------------------------------ def handle_initialize(msg: Dict[str, Any]) -> None: """ Respond to the MCP "initialize" message. We must return a list of tools with their schemas. """ tools = [] for tool_name, schema in TOOL_SCHEMAS.items(): tools.append({ "name": tool_name, "description": schema.get("description", f"{tool_name} tool"), "inputSchema": schema.get("inputSchema", {}), "outputSchema": schema.get("outputSchema", {}) }) response = { "type": "initialized", "tools": tools } _send_message(response) # ------------------------------------------------------------ # Tool call handling # ------------------------------------------------------------ def handle_tool_call(msg: Dict[str, Any]) -> None: """ Dispatch a tool call from the LLM to the appropriate Python function. Expected message format: { "type": "toolCall", "tool": "setParameter", "arguments": { ... } } """ tool_name = msg.get("tool") args = msg.get("arguments", {}) if tool_name not in TOOL_FUNCTIONS: _send_message({ "type": "toolResult", "tool": tool_name, "error": f"Unknown tool '{tool_name}'." }) return try: result = TOOL_FUNCTIONS[tool_name](**args) # Return structured JSON matching MCP toolResult format _send_message({ "type": "toolResult", "tool": tool_name, "result": result }) except Exception as e: # Ensure any failure is visible to the client traceback.print_exc() _send_message({ "type": "toolResult", "tool": tool_name, "error": f"Exception: {e}" }) # ------------------------------------------------------------ # Main event loop # ------------------------------------------------------------ def main(): """ Main loop: - read JSON messages from the MCP client - dispatch based on "type" """ while True: msg = _read_message() if not msg: continue msg_type = msg.get("type") if msg_type == "initialize": handle_initialize(msg) elif msg_type == "toolCall": handle_tool_call(msg) else: # Unknown or unsupported message type _send_message({ "type": "error", "message": f"Unsupported message type '{msg_type}'." }) if __name__ == "__main__": main()

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/TYLERSFOSTER/MCPSynthController'

If you have feedback or need assistance with the MCP directory API, please join our Discord server