Skip to main content
Glama

Aidderall MCP Server

by cheezcake
test_switch_focus.py3.48 kB
# Aidderall MCP Server - Hierarchical task management for AI assistants # Copyright (C) 2024 Briam R. <briamr@gmail.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. #!/usr/bin/env python3 """Manual test for switch_focus functionality.""" import asyncio import json from src.handlers import AidderallHandlers from src.task_manager import TaskManager async def test_switch_focus(): print("=== Testing Switch Focus Feature ===\n") manager = TaskManager() handlers = AidderallHandlers(manager) # Create a complex task structure print("Creating task structure...") await handlers.handle_create_new_task("Project A", "Working on feature A") result = await handlers.handle_extend_current_task("Research", "Research the API") research_id = result["task_id"] await handlers.handle_extend_current_task("Implement", "Write the code") # Create another independent task result = await handlers.handle_create_new_task("Urgent Bug", "Critical issue found") bug_id = result["task_id"] await handlers.handle_extend_current_task("Debug", "Find root cause") # Show current state print("\nCurrent focus state:") big_picture = await handlers.handle_get_big_picture() print(big_picture["structure"]) current = await handlers.handle_get_current_task() print(f"\nCurrent task: {current['title']}") print(f"Focus path: {current['focus_path']}") # Switch back to research task print(f"\n--- Switching focus to Research task (ID: {research_id}) ---") switch_result = await handlers.handle_switch_focus(research_id) print(json.dumps(switch_result, indent=2)) # Verify the switch current = await handlers.handle_get_current_task() print(f"\nNew current task: {current['title']}") print(f"New focus path: {current['focus_path']}") # Show the structure again print("\nUpdated focus state:") big_picture = await handlers.handle_get_big_picture() print(big_picture["structure"]) # Try to switch to the same task print(f"\n--- Trying to switch to same task (Research) ---") switch_result = await handlers.handle_switch_focus(research_id) print(json.dumps(switch_result, indent=2)) # Switch to the bug task print(f"\n--- Switching to Bug task (ID: {bug_id}) ---") switch_result = await handlers.handle_switch_focus(bug_id) print(json.dumps(switch_result, indent=2)) # Complete a task and try to switch to it print("\n--- Completing current task and trying to switch to it ---") complete_result = await handlers.handle_complete_current_task() completed_id = complete_result["completed_task_id"] try: await handlers.handle_switch_focus(completed_id) except Exception as e: print(f"Expected error: {e}") print("\n=== Test Complete ===") if __name__ == "__main__": asyncio.run(test_switch_focus())

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/cheezcake/aidderall_mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server