Skip to main content
Glama
bee4come

Plan-MCP

by bee4come
project_planner.py3.58 kB
"""Project planning tool using Gemini AI.""" from ..api.gemini_client import GeminiClient from ..models import ProjectPlan from ..prompts.system_prompts import PROJECT_PLANNER_PROMPT from ..utils.logger import logger class ProjectPlanner: """Tool for generating project plans using Gemini.""" def __init__(self, gemini_client: GeminiClient | None = None): """Initialize the project planner. Args: gemini_client: Optional Gemini client instance """ self.client = gemini_client or GeminiClient() async def create_plan( self, description: str, requirements: list[str] | None = None, constraints: list[str] | None = None, tech_stack: list[str] | None = None, ) -> ProjectPlan: """Create a project plan based on the provided information. Args: description: Project description requirements: List of specific requirements constraints: List of constraints or limitations tech_stack: Preferred technology stack Returns: Generated project plan """ logger.info("Creating project plan") # Build the prompt prompt_parts = [f"Project Description: {description}"] if requirements: prompt_parts.append("Requirements:\n" + "\n".join(f"- {req}" for req in requirements)) if constraints: prompt_parts.append("Constraints:\n" + "\n".join(f"- {con}" for con in constraints)) if tech_stack: prompt_parts.append(f"Technology Stack: {', '.join(tech_stack)}") prompt_parts.append( "\nPlease create a comprehensive project plan with phases, tasks, and estimates." ) prompt = "\n\n".join(prompt_parts) try: # Generate the plan plan = await self.client.generate_json( prompt=prompt, response_model=ProjectPlan, system_prompt=PROJECT_PLANNER_PROMPT, temperature=0.7, ) logger.info(f"Created project plan with {len(plan.phases)} phases") return plan except Exception as e: logger.error(f"Failed to create project plan: {str(e)}") raise async def refine_plan( self, current_plan: ProjectPlan, feedback: str, additional_context: str | None = None, ) -> ProjectPlan: """Refine an existing project plan based on feedback. Args: current_plan: The current project plan feedback: Feedback on what to change additional_context: Any additional context Returns: Refined project plan """ logger.info("Refining project plan based on feedback") prompt = f"""Current Project Plan: {current_plan.model_dump_json(indent=2)} Feedback: {feedback} {f"Additional Context: {additional_context}" if additional_context else ""} Please refine the project plan based on the feedback while maintaining its overall structure and quality.""" try: refined_plan = await self.client.generate_json( prompt=prompt, response_model=ProjectPlan, system_prompt=PROJECT_PLANNER_PROMPT, temperature=0.7, ) logger.info("Successfully refined project plan") return refined_plan except Exception as e: logger.error(f"Failed to refine project plan: {str(e)}") raise

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/bee4come/plan-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server