retry-handler.ts•11 kB
/**
* Retry logic and circuit breaker implementation
*/
import { GoogleDriveMCPError, ErrorCategory } from '../types/errors.js';
import { Logger } from '../logging/logger.js';
/**
* Retry configuration options
*/
export interface RetryConfig {
maxRetries: number;
baseDelay: number;
maxDelay: number;
backoffMultiplier: number;
jitterFactor: number;
retryableErrors: ErrorCategory[];
timeoutMs?: number;
}
/**
* Circuit breaker configuration
*/
export interface CircuitBreakerConfig {
enabled: boolean;
failureThreshold: number;
recoveryTimeout: number;
monitoringWindow: number;
minimumRequests: number;
}
/**
* Circuit breaker states
*/
export enum CircuitBreakerState {
CLOSED = 'closed',
OPEN = 'open',
HALF_OPEN = 'half_open'
}
/**
* Default retry configuration
*/
export const DEFAULT_RETRY_CONFIG: RetryConfig = {
maxRetries: 3,
baseDelay: 1000,
maxDelay: 30000,
backoffMultiplier: 2,
jitterFactor: 0.1,
retryableErrors: [
ErrorCategory.NETWORK,
ErrorCategory.API_ERROR,
ErrorCategory.RATE_LIMIT,
ErrorCategory.CACHE
],
timeoutMs: 60000
};
/**
* Default circuit breaker configuration
*/
export const DEFAULT_CIRCUIT_BREAKER_CONFIG: CircuitBreakerConfig = {
enabled: true,
failureThreshold: 5,
recoveryTimeout: 60000,
monitoringWindow: 300000, // 5 minutes
minimumRequests: 3
};
/**
* Retry handler with exponential backoff and jitter
*/
export class RetryHandler {
private config: RetryConfig;
private logger: Logger;
constructor(config: Partial<RetryConfig> = {}, logger?: Logger) {
this.config = { ...DEFAULT_RETRY_CONFIG, ...config };
this.logger = logger || new Logger();
}
/**
* Execute operation with retry logic
*/
async executeWithRetry<T>(
operation: () => Promise<T>,
context?: Record<string, any>
): Promise<T> {
let lastError: GoogleDriveMCPError | undefined;
let attempt = 0;
while (attempt <= this.config.maxRetries) {
try {
// Add timeout if configured
if (this.config.timeoutMs) {
return await this.withTimeout(operation(), this.config.timeoutMs);
}
return await operation();
} catch (error) {
attempt++;
lastError = error instanceof GoogleDriveMCPError ? error : new (require('../types/errors.js').InternalError)(
error instanceof Error ? error.message : String(error),
context
);
// Check if error is retryable
if (!lastError || !this.isRetryable(lastError) || attempt > this.config.maxRetries) {
this.logger.error(`Operation failed after ${attempt} attempts`, lastError, {
context,
finalAttempt: true
});
throw lastError;
}
// Calculate delay with exponential backoff and jitter
const delay = this.calculateDelay(attempt);
this.logger.warn(`Operation failed, retrying in ${delay}ms`, {
attempt,
maxRetries: this.config.maxRetries,
context,
delay,
error: lastError?.toJSON()
});
await this.sleep(delay);
}
}
throw lastError!;
}
/**
* Check if error is retryable based on configuration
*/
private isRetryable(error: GoogleDriveMCPError): boolean {
return error.retryable && this.config.retryableErrors.includes(error.category);
}
/**
* Calculate delay with exponential backoff and jitter
*/
private calculateDelay(attempt: number): number {
// Exponential backoff: baseDelay * (backoffMultiplier ^ (attempt - 1))
const exponentialDelay = this.config.baseDelay * Math.pow(this.config.backoffMultiplier, attempt - 1);
// Apply maximum delay limit
const cappedDelay = Math.min(exponentialDelay, this.config.maxDelay);
// Add jitter to prevent thundering herd
const jitter = cappedDelay * this.config.jitterFactor * Math.random();
return Math.floor(cappedDelay + jitter);
}
/**
* Add timeout to operation
*/
private async withTimeout<T>(promise: Promise<T>, timeoutMs: number): Promise<T> {
const timeoutPromise = new Promise<never>((_, reject) => {
setTimeout(() => {
reject(new (require('../types/errors.js').TimeoutError)(
`Operation timed out after ${timeoutMs}ms`
));
}, timeoutMs);
});
return Promise.race([promise, timeoutPromise]);
}
/**
* Sleep for specified milliseconds
*/
private sleep(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
}
/**
* Circuit breaker implementation
*/
export class CircuitBreaker {
private config: CircuitBreakerConfig;
private state: CircuitBreakerState = CircuitBreakerState.CLOSED;
private failureCount = 0;
private lastFailureTime = 0;
private successCount = 0;
private requestCount = 0;
private logger: Logger;
constructor(config: Partial<CircuitBreakerConfig> = {}, logger?: Logger) {
this.config = { ...DEFAULT_CIRCUIT_BREAKER_CONFIG, ...config };
this.logger = logger || new Logger();
}
/**
* Execute operation with circuit breaker protection
*/
async execute<T>(
operation: () => Promise<T>,
context?: Record<string, any>
): Promise<T> {
if (!this.config.enabled) {
return operation();
}
// Check circuit breaker state
if (this.state === CircuitBreakerState.OPEN) {
if (this.shouldAttemptReset()) {
this.state = CircuitBreakerState.HALF_OPEN;
this.logger.info('Circuit breaker transitioning to HALF_OPEN', { context });
} else {
const error = new (require('../types/errors.js').InternalError)(
'Circuit breaker is OPEN - operation blocked to prevent cascade failures',
{ ...context, circuitBreakerState: this.state }
);
this.logger.warn('Circuit breaker blocked operation', {
error: error.toJSON(),
context
});
throw error;
}
}
try {
const result = await operation();
this.onSuccess(context);
return result;
} catch (error) {
this.onFailure(error, context);
throw error;
}
}
/**
* Handle successful operation
*/
private onSuccess(context?: Record<string, any>): void {
this.requestCount++;
if (this.state === CircuitBreakerState.HALF_OPEN) {
this.successCount++;
// If we have enough successful requests, close the circuit
if (this.successCount >= this.config.minimumRequests) {
this.reset();
this.logger.info('Circuit breaker reset to CLOSED after successful recovery', { context });
}
} else if (this.state === CircuitBreakerState.CLOSED) {
// Reset failure count on success
this.failureCount = 0;
}
}
/**
* Handle failed operation
*/
private onFailure(error: unknown, context?: Record<string, any>): void {
this.requestCount++;
this.failureCount++;
this.lastFailureTime = Date.now();
const shouldTrigger = this.shouldTriggerCircuitBreaker(error);
this.logger.warn('Circuit breaker recorded failure', {
failureCount: this.failureCount,
threshold: this.config.failureThreshold,
shouldTrigger,
state: this.state,
context
});
if (shouldTrigger && this.state === CircuitBreakerState.CLOSED) {
this.state = CircuitBreakerState.OPEN;
this.logger.error('Circuit breaker opened due to repeated failures', undefined, {
failureCount: this.failureCount,
threshold: this.config.failureThreshold,
...context
});
} else if (this.state === CircuitBreakerState.HALF_OPEN) {
// If we fail in half-open state, go back to open
this.state = CircuitBreakerState.OPEN;
this.successCount = 0;
this.logger.warn('Circuit breaker returned to OPEN from HALF_OPEN after failure', { context });
}
}
/**
* Check if circuit breaker should be triggered
*/
private shouldTriggerCircuitBreaker(error: unknown): boolean {
// Only trigger for certain types of errors
if (error instanceof GoogleDriveMCPError) {
const triggerCategories = [
ErrorCategory.API_ERROR,
ErrorCategory.NETWORK,
ErrorCategory.RATE_LIMIT,
ErrorCategory.QUOTA
];
if (!triggerCategories.includes(error.category)) {
return false;
}
}
// Check if we have enough requests and failures
return this.requestCount >= this.config.minimumRequests &&
this.failureCount >= this.config.failureThreshold;
}
/**
* Check if we should attempt to reset the circuit breaker
*/
private shouldAttemptReset(): boolean {
return Date.now() - this.lastFailureTime >= this.config.recoveryTimeout;
}
/**
* Reset circuit breaker to closed state
*/
private reset(): void {
this.state = CircuitBreakerState.CLOSED;
this.failureCount = 0;
this.successCount = 0;
this.requestCount = 0;
this.lastFailureTime = 0;
}
/**
* Get current circuit breaker status
*/
getStatus(): {
state: CircuitBreakerState;
failureCount: number;
requestCount: number;
lastFailureTime: number;
} {
return {
state: this.state,
failureCount: this.failureCount,
requestCount: this.requestCount,
lastFailureTime: this.lastFailureTime
};
}
/**
* Manually reset circuit breaker (for testing or admin operations)
*/
manualReset(): void {
this.reset();
this.logger.info('Circuit breaker manually reset');
}
}
/**
* Combined retry handler with circuit breaker
*/
export class ResilientExecutor {
private retryHandler: RetryHandler;
private circuitBreaker: CircuitBreaker;
private logger: Logger;
constructor(
retryConfig: Partial<RetryConfig> = {},
circuitBreakerConfig: Partial<CircuitBreakerConfig> = {},
logger?: Logger
) {
this.logger = logger || new Logger();
this.retryHandler = new RetryHandler(retryConfig, this.logger);
this.circuitBreaker = new CircuitBreaker(circuitBreakerConfig, this.logger);
}
/**
* Execute operation with both retry logic and circuit breaker protection
*/
async execute<T>(
operation: () => Promise<T>,
context?: Record<string, any>
): Promise<T> {
return this.circuitBreaker.execute(async () => {
return this.retryHandler.executeWithRetry(operation, context);
}, context);
}
/**
* Get status of both retry handler and circuit breaker
*/
getStatus(): {
circuitBreaker: ReturnType<CircuitBreaker['getStatus']>;
retryConfig: RetryConfig;
} {
return {
circuitBreaker: this.circuitBreaker.getStatus(),
retryConfig: this.retryHandler['config']
};
}
/**
* Reset circuit breaker
*/
resetCircuitBreaker(): void {
this.circuitBreaker.manualReset();
}
}
/**
* Global resilient executor instance
*/
export const globalResilientExecutor = new ResilientExecutor();