Skip to main content
Glama
cache-manager.ts4.62 kB
/** * Cache Management Functions * Handles cache operations for multi-file analysis */ import { BasePlugin } from '../../plugins/base-plugin.js'; import { IPromptPlugin } from './types.js'; import { ResponseFactory } from '../../validation/response-factory.js'; import { withSecurity } from '../../security/integration-helpers.js'; import { ThreeStagePromptManager } from '../../core/ThreeStagePromptManager.js'; import { PromptStages } from '../../types/prompt-stages.js'; export class CacheManager { private static cache: Map<string, any> = new Map(); static clear(filePath?: string): void { if (filePath) { this.cache.delete(filePath); } else { this.cache.clear(); } } static getStatistics(): any { return { totalEntries: this.cache.size, memoryUsage: this.estimateMemoryUsage(), files: Array.from(this.cache.keys()) }; } static getCacheSize(): number { return this.cache.size; } static estimateMemoryUsage(): string { const size = JSON.stringify(Array.from(this.cache.entries())).length; return `${(size / 1024).toFixed(2)} KB`; } } export class ClearCachePlugin extends BasePlugin implements IPromptPlugin { name = 'clear_analysis_cache'; category = 'system' as const; description = 'Clear the multi-file analysis cache for a specific file or all files'; parameters = { filePath: { type: 'string' as const, description: 'Optional: specific file to clear from cache', required: false } }; async execute(params: any, llmClient: any) { return await withSecurity(this, params, llmClient, async (secureParams) => { const entriesBefore = CacheManager.getCacheSize(); CacheManager.clear(secureParams.filePath); // Use ResponseFactory for consistent, spec-compliant output ResponseFactory.setStartTime(); return ResponseFactory.createSystemResponse({ status: 'success', details: { success: true, message: secureParams.filePath ? `Cache cleared for ${secureParams.filePath}` : 'All cache entries cleared', filesCleared: secureParams.filePath ? 1 : entriesBefore, memoryFreed: CacheManager.estimateMemoryUsage() } }); }); } // MODERN: 3-Stage prompt architecture (system utility - no prompting needed) getPromptStages(params: any): PromptStages { return { systemAndContext: 'System cache management utility', dataPayload: 'Cache clearing operation', outputInstructions: 'Clear cache and return status' }; } // LEGACY: Backwards compatibility method getPrompt(params: any): string { const stages = this.getPromptStages(params); return `${stages.systemAndContext}\n\n${stages.dataPayload}\n\n${stages.outputInstructions}`; } } export class CacheStatisticsPlugin extends BasePlugin implements IPromptPlugin { name = 'get_cache_statistics'; category = 'system' as const; description = 'Get statistics about the current analysis cache'; parameters = {}; async execute(params: any, llmClient: any) { return await withSecurity(this, params, llmClient, async (secureParams) => { const stats = CacheManager.getStatistics(); // Use ResponseFactory for consistent, spec-compliant output ResponseFactory.setStartTime(); return ResponseFactory.createSystemResponse({ status: 'active', details: { totalEntries: stats.totalEntries, memoryUsage: stats.memoryUsage, files: stats.files, oldestEntry: stats.files.length > 0 ? new Date().toISOString() : 'none', newestEntry: stats.files.length > 0 ? new Date().toISOString() : 'none', hitRate: 0, // Would need actual hit tracking statistics: { byType: { 'analysis': stats.totalEntries }, bySize: { 'small': stats.totalEntries } } } }); }); } // MODERN: 3-Stage prompt architecture (system utility - no prompting needed) getPromptStages(params: any): PromptStages { return { systemAndContext: 'System cache statistics utility', dataPayload: 'Cache statistics request', outputInstructions: 'Return cache statistics and metrics' }; } // LEGACY: Backwards compatibility method getPrompt(params: any): string { const stages = this.getPromptStages(params); return `${stages.systemAndContext}\n\n${stages.dataPayload}\n\n${stages.outputInstructions}`; } } export default { CacheManager, ClearCachePlugin, CacheStatisticsPlugin };

Implementation Reference

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/houtini-ai/lm'

If you have feedback or need assistance with the MCP directory API, please join our Discord server