Skip to main content
Glama

Aidderall MCP Server

by cheezcake
server.py4.04 kB
# Aidderall MCP Server - Hierarchical task management for AI assistants # Copyright (C) 2024 Briam R. <briamr@gmail.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. import asyncio import json import logging from typing import Any from mcp.server import Server from mcp.server.stdio import stdio_server from mcp.types import TextContent from .handlers import AidderallHandlers from .task_manager import TaskManager logging.basicConfig(level=logging.INFO) logger = logging.getLogger("aidderall-mcp") async def run_server() -> None: logger.info("Starting Aidderall MCP server...") server: Server = Server("aidderall-mcp") task_manager = TaskManager() handlers = AidderallHandlers(task_manager) @server.list_tools() async def list_tools() -> list[Any]: return handlers.get_tool_definitions() @server.call_tool() async def call_tool(name: str, arguments: dict) -> list[Any]: logger.info(f"Tool called: {name} with args: {arguments}") handler_map = { "create_new_task": lambda: handlers.handle_create_new_task( arguments["title"], arguments["body"] ), "extend_current_task": lambda: handlers.handle_extend_current_task( arguments["title"], arguments["body"] ), "get_current_task": handlers.handle_get_current_task, "get_big_picture": lambda: handlers.handle_get_big_picture( arguments.get("format", "text") ), "complete_current_task": handlers.handle_complete_current_task, "get_completed_tasks": lambda: handlers.handle_get_completed_tasks( arguments.get("order", "chronological") ), "update_current_task": lambda: handlers.handle_update_current_task( arguments["body"] ), "get_stack_overview": handlers.handle_get_stack_overview, "peek_context": lambda: handlers.handle_peek_context( arguments.get("include_body", False) ), "list_siblings": lambda: handlers.handle_list_siblings( arguments.get("include_body", False) ), "switch_focus": lambda: handlers.handle_switch_focus(arguments["task_id"]), "remove_task": lambda: handlers.handle_remove_task(arguments["task_id"]), } if name not in handler_map: raise ValueError(f"Unknown tool: {name}") try: result = await handler_map[name]() logger.info(f"Tool {name} completed successfully") # Convert dictionary result to TextContent return [TextContent(type="text", text=json.dumps(result, indent=2))] except Exception as e: logger.error(f"Error in tool {name}: {str(e)}") return [ TextContent(type="text", text=json.dumps({"error": str(e)}, indent=2)) ] logger.info("Aidderall MCP server initialized") async with stdio_server() as (read_stream, write_stream): await server.run( read_stream, write_stream, server.create_initialization_options() ) def main() -> None: try: asyncio.run(run_server()) except KeyboardInterrupt: logger.info("Server stopped by user") except Exception as e: logger.error(f"Server error: {str(e)}") raise if __name__ == "__main__": main()

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/cheezcake/aidderall_mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server