Skip to main content
Glama

AiDD MCP Server

by skydeckai
server.py1.57 kB
import asyncio import mcp.server.stdio import mcp.types as types from mcp.server import NotificationOptions, Server from mcp.server.models import InitializationOptions from .tools import TOOL_DEFINITIONS, TOOL_HANDLERS server = Server("skydeckai-code") @server.list_tools() async def handle_list_tools() -> list[types.Tool]: """ List available tools. Each tool specifies its arguments using JSON Schema validation. """ return [types.Tool(**tool) for tool in TOOL_DEFINITIONS] @server.call_tool() async def handle_call_tool( name: str, arguments: dict | None ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: """ Handle tool execution requests. """ if not arguments: arguments = {} handler = TOOL_HANDLERS.get(name) if not handler: raise ValueError(f"Unknown tool: {name}") return await handler(arguments) async def main(): # Run the server using stdin/stdout streams async with mcp.server.stdio.stdio_server() as (read_stream, write_stream): await server.run( read_stream, write_stream, InitializationOptions( server_name="skydeckai-code", server_version="0.1.0", capabilities=server.get_capabilities( notification_options=NotificationOptions(), experimental_capabilities={}, ), ), ) # This is needed if you'd like to connect to a custom client if __name__ == "__main__": asyncio.run(main())

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/skydeckai/mcp-server-aidd'

If you have feedback or need assistance with the MCP directory API, please join our Discord server