/**
* Native Concurrency Control (Sliding Window pattern)
* Runs 'asyncMapper' on 'items' with max 'concurrency' in parallel.
*/
export async function mapConcurrent<T, R>(
items: T[],
concurrency: number,
asyncMapper: (item: T, index: number) => Promise<R>
): Promise<(R | null)[]> {
const results = Array.from({ length: items.length }).fill(null) as (R | null)[];
const iterator = items.entries(); // [index, item]
const workers = Array.from({ length: Math.min(concurrency, items.length) })
.fill(iterator)
.map(async (iter) => {
for (const [index, item] of iter as IterableIterator<[number, T]>) {
try {
results[index] = await asyncMapper(item, index);
} catch (err) {
console.error(`Error processing item ${index}:`, err);
results[index] = null;
}
}
});
await Promise.all(workers);
return results;
}
interface CacheEntry<T> {
timestamp: number;
value: Promise<T>;
}
/**
* In-Memory Cache with TTL (Time To Live)
* Prevents stale data and memory leaks.
*/
export class RequestCache {
private cache: Map<string, CacheEntry<unknown>>;
private ttlMs: number;
constructor(ttlMs: number = 1000 * 60 * 60) { // Default 1 hour TTL
this.cache = new Map();
this.ttlMs = ttlMs;
}
/**
* Get or fetch value.
* If key exists and is fresh, return cached promise/value.
* If expired or missing, execute fetcher.
*/
getOrFetch<T>(key: string, fetcher: () => Promise<T>): Promise<T> {
const now = Date.now();
if (this.cache.has(key)) {
const entry = this.cache.get(key) as CacheEntry<T>;
if (now - entry.timestamp < this.ttlMs) {
return entry.value;
}
// Expired: cleanup
this.cache.delete(key);
}
const promise = fetcher().catch(err => {
this.cache.delete(key); // Evict on error so we can retry later
throw err;
});
this.cache.set(key, { timestamp: now, value: promise });
return promise;
}
/**
* Clear all cache entries
*/
clear(): void {
this.cache.clear();
}
}
// Global singleton cache instance for the server process
export const globalCache = new RequestCache();