Skip to main content
Glama
processThought.ts1.28 kB
import { loadPrompt, generatePrompt, loadPromptFromTemplate, } from "../loader.js"; export interface ProcessThoughtPromptParams { thought: string; thoughtNumber: number; totalThoughts: number; nextThoughtNeeded: boolean; stage: string; tags: string[]; axioms_used: string[]; assumptions_challenged: string[]; } export async function getProcessThoughtPrompt( param: ProcessThoughtPromptParams ): Promise<string> { let nextThoughtNeeded = ""; if (param.nextThoughtNeeded) { nextThoughtNeeded = await loadPromptFromTemplate( "processThought/moreThought.md" ); } else { nextThoughtNeeded = await loadPromptFromTemplate( "processThought/complatedThought.md" ); } const indexTemplate = await loadPromptFromTemplate("processThought/index.md"); const prompt = generatePrompt(indexTemplate, { thought: param.thought, thoughtNumber: param.thoughtNumber, totalThoughts: param.totalThoughts, stage: param.stage, tags: param.tags.join(", ") || "no tags", axioms_used: param.axioms_used.join(", ") || "no axioms used", assumptions_challenged: param.assumptions_challenged.join(", ") || "no assumptions challenged", nextThoughtNeeded, }); return loadPrompt(prompt, "PROCESS_THOUGHT"); }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/cjo4m06/mcp-shrimp-task-manager'

If you have feedback or need assistance with the MCP directory API, please join our Discord server