Skip to main content
Glama

Context Pods

by conorluddy
main.py.mustacheโ€ข2.07 kB
#!/usr/bin/env python3 """ {{serverName}} - MCP Server """ import asyncio import json import sys from typing import Any, Dict, List from mcp import ClientSession, StdioServerSession from mcp.server import Server from mcp.types import ( Tool, TextContent, ) class {{serverName | pascalCase}}Server: """MCP Server for {{serverName}}""" def __init__(self): self.server = Server("{{serverName}}") self._setup_tools() def _setup_tools(self): """Set up server tools""" @self.server.list_tools() async def list_tools() -> List[Tool]: """List available tools""" return [ Tool( name="hello", description="Say hello", inputSchema={ "type": "object", "properties": { "name": { "type": "string", "description": "Name to greet", } }, "required": ["name"], }, ) ] @self.server.call_tool() async def call_tool(name: str, arguments: Dict[str, Any]) -> List[TextContent]: """Handle tool calls""" if name == "hello": user_name = arguments.get("name", "World") return [ TextContent( type="text", text=f"Hello, {user_name}!", ) ] else: raise ValueError(f"Unknown tool: {name}") async def main(): """Main entry point""" server = {{serverName | pascalCase}}Server() async with StdioServerSession() as session: await server.server.run( session.read_stream, session.write_stream, server.server.create_initialization_options(), ) if __name__ == "__main__": asyncio.run(main())

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/conorluddy/ContextPods'

If you have feedback or need assistance with the MCP directory API, please join our Discord server