Skip to main content
Glama
cache.ts4.98 kB
import { getConfig } from './config.js'; import { logger } from './logger.js'; interface CacheEntry<T> { data: T; expiresAt: number; prev?: string; next?: string; } class LRUCache { private cache = new Map<string, CacheEntry<unknown>>(); private head: string | null = null; private tail: string | null = null; private maxSize = 1000; private cleanupInterval: NodeJS.Timeout | null = null; constructor() { const cleanupMs = getConfig().cacheCleanupIntervalMs ?? 60000; this.cleanupInterval = setInterval(() => this.cleanup(), cleanupMs); } private moveToHead(key: string): void { const entry = this.cache.get(key); if (!entry) return; if (this.head === key) return; // Remove from current position if (entry.prev) { const prev = this.cache.get(entry.prev); if (prev) prev.next = entry.next; } if (entry.next) { const next = this.cache.get(entry.next); if (next) next.prev = entry.prev; } if (this.tail === key) this.tail = entry.prev ?? null; // Add to head entry.prev = undefined; entry.next = this.head ?? undefined; if (this.head) { const oldHead = this.cache.get(this.head); if (oldHead) oldHead.prev = key; } this.head = key; if (!this.tail) this.tail = key; } private evictLRU(): void { if (!this.tail) return; const tailEntry = this.cache.get(this.tail); if (tailEntry?.prev) { const newTail = this.cache.get(tailEntry.prev); if (newTail) newTail.next = undefined; } this.cache.delete(this.tail); this.tail = tailEntry?.prev ?? null; if (!this.tail) this.head = null; } get<T>(key: string): T | undefined { const config = getConfig(); if (!config.enableCache) return undefined; const entry = this.cache.get(key); if (!entry) return undefined; if (Date.now() > entry.expiresAt) { this.invalidate(key); return undefined; } this.moveToHead(key); logger.debug('Cache hit', { key }); return entry.data as T; } set<T>(key: string, data: T, ttlSeconds?: number): void { const config = getConfig(); if (!config.enableCache) return; const ttl = ttlSeconds ?? config.cacheTtlSeconds; if (this.cache.size >= this.maxSize) this.evictLRU(); this.cache.set(key, { data, expiresAt: Date.now() + ttl * 1000, next: this.head ?? undefined, }); if (this.head) { const oldHead = this.cache.get(this.head); if (oldHead) oldHead.prev = key; } this.head = key; if (!this.tail) this.tail = key; logger.debug('Cache set', { key, ttlSeconds: ttl }); } invalidate(key: string): void { const entry = this.cache.get(key); if (!entry) return; if (entry.prev) { const prev = this.cache.get(entry.prev); if (prev) prev.next = entry.next; } if (entry.next) { const next = this.cache.get(entry.next); if (next) next.prev = entry.prev; } if (this.head === key) this.head = entry.next ?? null; if (this.tail === key) this.tail = entry.prev ?? null; this.cache.delete(key); } invalidatePrefix(prefix: string): void { for (const key of this.cache.keys()) { if (key.startsWith(prefix)) this.invalidate(key); } } clear(): void { this.cache.clear(); this.head = null; this.tail = null; } private cleanup(): void { const now = Date.now(); for (const [key, entry] of this.cache.entries()) { if (now > entry.expiresAt) this.invalidate(key); } } getStats(): { size: number; keys: string[] } { return { size: this.cache.size, keys: Array.from(this.cache.keys()) }; } destroy(): void { if (this.cleanupInterval) { clearInterval(this.cleanupInterval); this.cleanupInterval = null; } } } export const cache = new LRUCache(); export const CacheKeys = { subscriptions: () => 'azure:subscriptions', resourceGroups: (subId?: string) => subId ? `azure:rg:${subId}` : 'azure:rg:default', resources: (subId?: string, rg?: string) => ['azure:res', subId, rg].filter(Boolean).join(':'), service: (type: string, ...args: string[]) => ['azure:svc', type, ...args].filter(Boolean).join(':'), }; export async function withCache<T>(key: string, fetcher: () => Promise<T>, ttl?: number): Promise<T> { const cached = cache.get<T>(key); if (cached !== undefined) return cached; const data = await fetcher(); cache.set(key, data, ttl); return data; }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/vedantparmar12/Azure-_MCP'

If you have feedback or need assistance with the MCP directory API, please join our Discord server