Skip to main content
Glama
refinePrompt.ts3.9 kB
/** * Refine Prompt Tool * Iteratively improves existing prompts based on feedback * Uses PromptArchitect API with local fallback */ import { z } from 'zod'; import { logger, apiRefinePrompt, isApiClientAvailable, } from '../utils/index.js'; export const refinePromptSchema = z.object({ prompt: z.string().min(1).describe('The current prompt to refine'), feedback: z.string().min(1).describe('What should be improved or changed'), preserveStructure: z.boolean().optional().default(true).describe('Whether to preserve the original structure'), targetModel: z.enum(['gpt-4', 'claude', 'gemini', 'general']) .optional() .default('general') .describe('Target AI model to optimize for'), workspaceContext: z.string().optional().describe('Project context including file structure, tech stack, dependencies, and any relevant code snippets to ensure the refined prompt aligns with the project scope'), }); export type RefinePromptInput = z.infer<typeof refinePromptSchema>; export async function refinePrompt(input: RefinePromptInput): Promise<{ refinedPrompt: string; changes: string[]; metadata: { originalWordCount: number; refinedWordCount: number; structurePreserved: boolean; }; }> { const { prompt, feedback, preserveStructure = true, targetModel, workspaceContext } = input; logger.info('Refining prompt', { promptLength: prompt.length, feedbackLength: feedback.length, preserveStructure, targetModel, hasWorkspaceContext: !!workspaceContext }); let refinedPrompt: string = ''; let changes: string[] = []; // Use PromptArchitect API if (isApiClientAvailable()) { try { const response = await apiRefinePrompt({ prompt, feedback, preserveStructure, targetModel, workspaceContext, }); refinedPrompt = response.refinedPrompt; changes = response.changes || ['Prompt refined based on feedback']; logger.info('Refined via PromptArchitect API'); } catch (error) { logger.warn('API request failed, using fallback', { error: error instanceof Error ? error.message : 'Unknown error' }); } } // Fallback basic refinement if (!refinedPrompt) { refinedPrompt = applyBasicRefinements(prompt, feedback); changes = ['Applied basic refinements']; logger.warn('Using fallback refinement'); } // Calculate metadata const originalWordCount = prompt.split(/\s+/).length; const refinedWordCount = refinedPrompt.split(/\s+/).length; const originalHasStructure = /^#+\s|^\d+\.|^-\s|^\*\s/m.test(prompt); const refinedHasStructure = /^#+\s|^\d+\.|^-\s|^\*\s/m.test(refinedPrompt); const structurePreserved = !originalHasStructure || refinedHasStructure; return { refinedPrompt, changes, metadata: { originalWordCount, refinedWordCount, structurePreserved, }, }; } function applyBasicRefinements(prompt: string, feedback: string): string { let refined = prompt; const feedbackLower = feedback.toLowerCase(); // Basic transformations based on common feedback if (feedbackLower.includes('more specific') || feedbackLower.includes('more detail')) { refined += '\n\n## Additional Requirements\n- Provide specific, detailed information\n- Include concrete examples where applicable'; } if (feedbackLower.includes('shorter') || feedbackLower.includes('concise')) { // Try to simplify refined = refined.replace(/\n\n+/g, '\n\n'); } if (feedbackLower.includes('structure') || feedbackLower.includes('organize')) { if (!/^#+\s/m.test(refined)) { refined = '## Task\n' + refined + '\n\n## Output\nProvide a well-structured response.'; } } if (feedbackLower.includes('example')) { refined += '\n\n## Example\nProvide a relevant example in your response.'; } return refined; } export default refinePrompt;

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/MerabyLabs/promptarchitect-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server