import { mcpDb } from './supabase.js';
import { rateLimitedMcpDb } from './rate-limited-client.js';
import { batchProcessors, BatchProcessor } from '../utils/batch-processor.js';
import { createLogger } from '../utils/logger.js';
import type { Database } from './types/database.js';
const logger = createLogger({ module: 'batch-operations' });
// Type definitions for common tables
type Tables = Database['public']['Tables'];
type UseCase = Tables['use_cases']['Row'];
type UseCaseInsert = Tables['use_cases']['Insert'];
type Projection = Tables['projections']['Row'];
type ProjectionInsert = Tables['projections']['Insert'];
/**
* Batch operations for use cases
*/
export const useCaseBatch = {
// Batch insert processor
insert: batchProcessors.createInsertProcessor<UseCaseInsert>(
'use_cases',
async (items) => {
const { data, error } = await rateLimitedMcpDb
.from('use_cases')
.insert(items)
.select();
if (error) {
logger.error('Batch insert failed for use_cases', error);
throw error;
}
return data || [];
}
),
// Batch update processor
update: batchProcessors.createUpdateProcessor<Partial<UseCase> & { id: string }>(
'use_cases',
async (items) => {
// Group updates by common fields for efficiency
const updates = await Promise.all(
items.map(item => {
const { id, ...updates } = item;
return rateLimitedMcpDb
.from('use_cases')
.update(updates)
.eq('id', id)
.select()
.single();
})
);
return updates
.filter(result => !result.error)
.map(result => result.data!);
}
)
};
/**
* Batch operations for projections
*/
export const projectionBatch = {
// Batch insert processor
insert: batchProcessors.createInsertProcessor<ProjectionInsert>(
'projections',
async (items) => {
const { data, error } = await rateLimitedMcpDb
.from('projections')
.insert(items)
.select();
if (error) {
logger.error('Batch insert failed for projections', error);
throw error;
}
return data || [];
}
)
};
/**
* Batch fetch operations with optimal chunking
*/
export class BatchFetcher {
/**
* Fetch multiple records by IDs with optimal batching
*/
static async fetchByIds<T extends keyof Tables>(
table: T,
ids: string[],
options?: {
batchSize?: number;
select?: string;
}
): Promise<Tables[T]['Row'][]> {
const batchSize = options?.batchSize || 100;
const select = options?.select || '*';
logger.debug(`Batch fetching ${ids.length} records from ${table}`);
// Create ID chunks
const chunks: string[][] = [];
for (let i = 0; i < ids.length; i += batchSize) {
chunks.push(ids.slice(i, i + batchSize));
}
// Fetch all chunks in parallel (with rate limiting)
const results = await Promise.all(
chunks.map(async (chunk) => {
const { data, error } = await rateLimitedMcpDb
.from(table)
.select(select)
.in('id', chunk);
if (error) {
logger.error(`Batch fetch failed for ${table}`, error);
throw error;
}
return (data || []) as unknown as Tables[T]['Row'][];
})
);
// Flatten results
return results.flat() as unknown as Tables[T]['Row'][];
}
/**
* Fetch related records with optimal query strategy
*/
static async fetchRelated<T extends keyof Tables>(
table: T,
parentField: string,
parentIds: string[],
options?: {
batchSize?: number;
select?: string;
orderBy?: string;
}
): Promise<Map<string, Tables[T]['Row'][]>> {
const batchSize = options?.batchSize || 50;
const select = options?.select || '*';
logger.debug(`Batch fetching related records from ${table} for ${parentIds.length} parents`);
// Create parent ID chunks
const chunks: string[][] = [];
for (let i = 0; i < parentIds.length; i += batchSize) {
chunks.push(parentIds.slice(i, i + batchSize));
}
// Fetch all chunks
const results = await Promise.all(
chunks.map(async (chunk) => {
let query = (rateLimitedMcpDb as any)
.from(table)
.select(select)
.in(parentField, chunk);
if (options?.orderBy) {
query = query.order(options.orderBy as any);
}
const { data, error } = await query;
if (error) {
logger.error(`Batch fetch related failed for ${table}`, error);
throw error;
}
return (data || []) as unknown as Tables[T]['Row'][];
})
);
// Group by parent ID
const grouped = new Map<string, Tables[T]['Row'][]>();
for (const records of results) {
for (const record of records) {
const parentId = (record as any)[parentField];
if (!grouped.has(parentId)) {
grouped.set(parentId, []);
}
grouped.get(parentId)!.push(record as Tables[T]['Row']);
}
}
return grouped;
}
}
/**
* Example usage of batch operations
*/
export async function exampleBatchOperations() {
// Example 1: Batch insert use cases
const useCases: UseCaseInsert[] = [
{
project_id: 'uuid-1',
name: 'Use Case 1',
category: 'automation',
current_state: { volume_per_month: 1000, cost_per_transaction: 10 },
future_state: { automation_percentage: 0.8, error_reduction: 0.9 },
implementation: { complexity_score: 5, integration_points: 3 }
},
// ... more use cases
];
// Add to batch - these will be automatically batched and processed
const insertPromises = useCases.map(uc => useCaseBatch.insert.add(uc));
const insertedUseCases = await Promise.all(insertPromises);
logger.info('Batch inserted use cases', { count: insertedUseCases.length });
// Example 2: Batch fetch by IDs
const projectIds = ['uuid-1', 'uuid-2', 'uuid-3'];
const projects = await BatchFetcher.fetchByIds('projects', projectIds, {
select: 'id, project_name, client_name, industry'
});
logger.info('Batch fetched projects', { count: projects.length });
// Example 3: Batch fetch related records
const useCasesByProject = await BatchFetcher.fetchRelated(
'use_cases',
'project_id',
projectIds,
{
orderBy: 'created_at'
}
);
logger.info('Batch fetched related use cases', {
projectCount: useCasesByProject.size
});
}