Skip to main content
Glama

mcp-chain-of-thought

listTasks.ts2.85 kB
/** * listTasks prompt generator * Responsible for combining templates and parameters into the final prompt */ import { loadPrompt, generatePrompt, loadPromptFromTemplate, } from "../loader.js"; import { Task, TaskStatus } from "../../types/index.js"; /** * listTasks prompt parameter interface */ export interface ListTasksPromptParams { status: string; tasks: Record<string, Task[]>; allTasks: Task[]; } /** * Get the complete listTasks prompt * @param params prompt parameters * @returns generated prompt */ export function getListTasksPrompt(params: ListTasksPromptParams): string { const { status, tasks, allTasks } = params; // If no tasks exist, show notification if (allTasks.length === 0) { const notFoundTemplate = loadPromptFromTemplate("listTasks/notFound.md"); const statusText = status === "all" ? "any" : `any ${status}`; return generatePrompt(notFoundTemplate, { statusText: statusText, }); } // Get counts for all statuses const statusCounts = Object.values(TaskStatus) .map((statusType) => { const count = tasks[statusType]?.length || 0; return `- **${statusType}**: ${count} tasks`; }) .join("\n"); let filterStatus = "all"; switch (status) { case "pending": filterStatus = TaskStatus.PENDING; break; case "in_progress": filterStatus = TaskStatus.IN_PROGRESS; break; case "completed": filterStatus = TaskStatus.COMPLETED; break; } let taskDetails = ""; let taskDetailsTemplate = loadPromptFromTemplate("listTasks/taskDetails.md"); // Add detailed tasks for each status for (const statusType of Object.values(TaskStatus)) { const tasksWithStatus = tasks[statusType] || []; if ( tasksWithStatus.length > 0 && (filterStatus === "all" || filterStatus === statusType) ) { for (const task of tasksWithStatus) { let dependencies = "no dependencies"; if (task.dependencies && task.dependencies.length > 0) { dependencies = task.dependencies .map((d) => `\`${d.taskId}\``) .join(", "); } taskDetails += generatePrompt(taskDetailsTemplate, { name: task.name, id: task.id, description: task.description, createAt: task.createdAt, complatedSummary: (task.summary || "").substring(0, 100) + ((task.summary || "").length > 100 ? "..." : ""), dependencies: dependencies, complatedAt: task.completedAt, }); } } } const indexTemplate = loadPromptFromTemplate("listTasks/index.md"); let prompt = generatePrompt(indexTemplate, { statusCount: statusCounts, taskDetailsTemplate: taskDetails, }); // Load possible custom prompt return loadPrompt(prompt, "LIST_TASKS"); }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/liorfranko/mcp-chain-of-thought'

If you have feedback or need assistance with the MCP directory API, please join our Discord server