#!/usr/bin/env node
/**
* NDB MCP Server - Main Entry Point
*
* A Model Context Protocol server for Nutanix Database Service (NDB)
* that enables Claude Desktop and other MCP-compatible LLMs to manage
* databases, clones, snapshots, and infrastructure through natural language.
*/
// Load environment variables from .env file
import 'dotenv/config';
// import "mcps-logger/console"; // Uncomment if you want to use console logging
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
import {
InitializeRequestSchema,
CallToolRequestSchema,
ErrorCode,
ListToolsRequestSchema,
McpError,
} from '@modelcontextprotocol/sdk/types.js';
import { NDBClient, createNDBClient } from './ndb_client.js';
import { advancedFilter, validateAndCollectMissingParams } from './utils.js';
import { tools } from './tools.js';
import { ProvisionDatabaseRequestSchema, RestoreDatabaseRequestSchema, UpdateTimeMachineRequestSchema, CreateCloneRequestSchema, CreateDbserverRequestSchema } from './schemas.js';
import type { ToolCallArgs } from './types.js';
import {
loadCustomInstructions,
generateSystemPrompt
} from './custom_instructions.js';
// Create the MCP server
const server = new Server(
{
name: 'ndb-mcp-server',
version: '1.4.1', // Updated version with custom instructions support
}
);
// Initialize NDB client and custom instructions
let ndbClient: NDBClient;
let customInstructionsConfig: any;
try {
ndbClient = createNDBClient();
customInstructionsConfig = loadCustomInstructions();
console.error('✅ NDB MCP Server initialized successfully');
if (customInstructionsConfig.instructions.length > 0) {
console.error(`📋 Loaded ${customInstructionsConfig.instructions.length} custom instructions`);
}
} catch (error) {
console.error('❌ Failed to initialize NDB client:', error);
process.exit(1);
}
server.setRequestHandler(InitializeRequestSchema, async (request) => {
return {
protocolVersion: "2024-11-05",
capabilities: {
tools: {},
resources: {},
prompts: {}
},
serverInfo: {
name: "ndb-mcp-server",
version: "1.4.1",
description: `NDB MCP Server for managing Nutanix Database Service (NDB) with custom instructions support.
It provides multiple tools to manage databases, clones, snapshots, and infrastructure through natural language commands.
Custom Instructions Support:
- Supports custom behavioral instructions via environment variables
Hints:
- Most of the request asking for information about databases will go either through list_databases or list_dbservers, the latter being able to provide information about databases running on a specific dbserver, and thus also aother information like the cluster
- Use the most specific attributes first to reduce the result set.
- Combine multiple filters in the same query to narrow down the results.
- If a request or action fails, never try to fall back to a more generic tool, always get back to the user with the error message and ask him if he wants to fix the request or to retry (for instance if it was a network transient issue).
- This MCP server does not provide tools to configure NDB itself, such as creating users or configuring profiles. It is focused on database management, clones, snapshots, and infrastructure operations. Configure NDB using the official NDB UI or CLI.
- Any request involving mass action (creating or deleting several databases, servers, clones, etc.) should be avoided to prevent accidental data loss or service disruption.
- When displaying information about any entity (databases, clones, snapshots, etc.), always include the entity name for easy identification (avoid entity ID only as it might not be meaningful).
- Time values are GMT if not specified otherwise in the value. Convert to local TZ before displaying.
Instructions:
- Always check all the tools available and use the most efficient one, i.e. the one(s) that could retrieve the right information with the minimum effort (time and resources like tokens).
- **CRITICAL** : When an error occurs, always get back to the user and ask for instruction, never try to solve it by yourself, the problem might be caused by a recent change in the API so you won't fix it alone.
- If you need to make changes to the request, explain the changes to the user and ask for confirmation before proceeding.
- Follow any custom instructions provided in the systemPrompt field of responses.
`,
}
};
});
// List tools handler
server.setRequestHandler(ListToolsRequestSchema, async () => {
return { tools };
});
// Call tool handler
server.setRequestHandler(CallToolRequestSchema, async (request) => {
const { name, arguments: args } = request.params;
try {
// Execute the tool and get its response
const result = await handleToolCall(name, args as ToolCallArgs);
// Return the tool result with system prompt
return {
content: [{
type: "text",
text: JSON.stringify({
...result,
systemPrompt: generateSystemPrompt(customInstructionsConfig)
}, null, 2)
}]
};
} catch (error) {
if (error instanceof McpError) {
throw error;
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
const statusCode = (error as any).response?.status;
const responseData = (error as any).response?.data;
let detailedError = `NDB API Error: ${errorMessage}`;
if (statusCode) {
detailedError += ` (HTTP ${statusCode})`;
}
if (responseData) {
detailedError += `\nResponse: ${JSON.stringify(responseData, null, 2)}`;
}
// Return error with system prompt
return {
content: [{
type: "text",
text: JSON.stringify({
status: 'error',
message: errorMessage,
details: detailedError,
timestamp: new Date().toISOString(),
systemPrompt: generateSystemPrompt(customInstructionsConfig)
}, null, 2)
}]
};
}
});
/**
* Handle tool calls by routing to appropriate handlers
*/
async function handleToolCall(name: string, args: ToolCallArgs): Promise<any> {
switch (name) {
// Database Management
case 'list_databases':
return handleListDatabases(args);
case 'get_database':
return handleGetDatabase(args);
case 'provision_database':
return handleProvisionDatabase(args);
case 'register_database':
return handleRegisterDatabase(args);
case 'update_database':
return handleUpdateDatabase(args);
case 'deregister_database':
return handleDeregisterDatabase(args);
// Database Server Management
case 'list_dbservers':
return handleListDbservers(args);
case 'get_dbserver':
return handleGetDbserver(args);
case 'create_dbserver':
return handleCreateDbserver(args);
case 'register_dbserver':
return handleRegisterDbserver(args);
case 'delete_dbserver':
return handleDeleteDbserver(args);
// Clone Management
case 'list_clones':
return handleListClones(args);
case 'get_clone':
return handleGetClone(args);
case 'create_clone':
return handleCreateClone(args);
case 'refresh_clone':
return handleRefreshClone(args);
case 'delete_clone':
return handleDeleteClone(args);
// Time Machine Management
case 'list_time_machines':
return handleListTimeMachines(args);
case 'get_time_machine':
return handleGetTimeMachine(args);
case 'get_time_machine_capability':
return handleGetTimeMachineCapability(args);
case 'pause_time_machine':
return handlePauseTimeMachine(args);
case 'resume_time_machine':
return handleResumeTimeMachine(args);
case 'add_authorized_server':
return handleAddAuthorizedServer(args);
case 'get_authorized_server':
return handleGetAuthorizedServer(args);
case 'get_time_machine_access':
return handleGetTimeMachineAccess(args);
case 'grant_time_machine_access':
return handleGrantTimeMachineAccess(args);
// Snapshot Management
case 'list_snapshots':
return handleListSnapshots(args);
case 'get_snapshot':
return handleGetSnapshot(args);
case 'take_snapshot':
return handleTakeSnapshot(args);
case 'delete_snapshot':
return handleDeleteSnapshot(args);
// Infrastructure
case 'list_clusters':
return handleListClusters(args);
case 'get_cluster':
return handleGetCluster(args);
case 'get_cluster_by_name':
return handleGetClusterByName(args);
case 'list_profiles':
return handleListProfiles(args);
case 'get_profile':
return handleGetProfile(args);
case 'list_slas':
return handleListSlas(args);
case 'get_sla':
return handleGetSla(args);
// Operations and Monitoring
case 'list_operations':
return handleListOperations(args);
case 'get_operation':
return handleGetOperation(args);
case 'list_alerts':
return handleListAlerts(args);
case 'get_alert':
return handleGetAlert(args);
// User Management
case 'list_users':
return handleListUsers(args);
case 'get_user':
return handleGetUser(args);
case 'list_roles':
return handleListRoles(args);
case 'get_current_user':
return handleGetCurrentUser();
// Database Restore
case 'restore_database':
return handleRestoreDatabase(args);
case 'update_time_machine':
return handleUpdateTimeMachine(args);
default:
throw new McpError(ErrorCode.MethodNotFound, `Unknown tool: ${name}`);
}
}
// Function to generate contextual suggestions for all profile types
async function generateSuggestions(
args: any,
missingParams: any[],
context?: { caller: 'handleProvisionDatabase' | 'handleCreateClone' | 'other' }
) {
const suggestions: any = {};
for (const param of missingParams) {
switch (param.parameter) {
case 'softwareProfileId': {
// Fetch software profiles with advanced filtering
const valueType = ['versions.published', 'versions.deprecated', 'topology'];
const value = ['true', 'false', args.clustered ? 'ALL|cluster' : 'ALL|single'];
if (Array.isArray(args.requiredClusterIds) && args.requiredClusterIds.length > 0) {
valueType.push('clusterAvailability.nxClusterId', 'clusterAvailability.status');
value.push(args.requiredClusterIds.join('|'), 'ACTIVE');
}
const profiles = await handleListProfiles({
engine: args.databaseType,
type: 'Software',
valueType: valueType.join(','),
value: value.join(',')
});
suggestions.softwareProfiles = profiles.map((p: any) => ({
id: p.id,
name: p.name,
engineType: p.engineType,
dbVersion: p.dbVersion,
topology: p.topology,
clusterAvailability: p.clusterAvailability,
publishedVersions: Array.isArray(p.versions)
? p.versions.map((v: any) => ({ id: v.id, name: v.name, dbVersion: v.dbVersion }))
: []
}));
break;
}
case 'networkProfileId': {
// Build advanced filtering criteria
const valueType = ['versions.published', 'versions.deprecated', 'topology'];
const value = ['true', 'false', args.clustered ? 'cluster' : 'ALL'];
// If not clustered and nxClusterId is provided, filter by nxClusterId
if (!args.clustered && args.nxClusterId) {
valueType.push('nxClusterId');
value.push(args.nxClusterId);
}
// If clustered, include cluster ID filtering in advanced filtering
if (args.clustered && Array.isArray(args.requiredClusterIds) && args.requiredClusterIds.length > 0) {
valueType.push('versions.properties.name', 'versions.properties.value');
value.push('CLUSTER_ID_\d', args.requiredClusterIds.join('|'));
}
// Fetch network profiles with advanced filtering
const profiles = await handleListProfiles({
engine: args.databaseType,
type: 'Network',
valueType: valueType.join(','),
value: value.join(',')
});
suggestions.networkProfiles = profiles.map((p: any) => ({
id: p.id,
name: p.name,
engineType: p.engineType,
dbVersion: p.dbVersion,
topology: p.topology,
description: p.description,
publishedVersions: Array.isArray(p.versions)
? p.versions.map((v: any) => ({
id: v.id,
name: v.name,
dbVersion: v.dbVersion,
topology: v.topology,
description: v.description,
properties: v.properties
}))
: []
}));
break;
}
case 'dbParameterProfileId':
case 'databaseParameterProfileId': {
// Fetch DB parameter profiles with advanced filtering
const profiles = await handleListProfiles({
engine: args.databaseType,
type: 'Database_Parameter',
valueType: 'versions.published,versions.deprecated',
value: 'true,false'
});
suggestions.dbParameterProfiles = profiles.map((p: any) => ({
id: p.id,
name: p.name,
engineType: p.engineType,
dbVersion: p.dbVersion,
description: p.description,
publishedVersions: Array.isArray(p.versions)
? p.versions.filter((v: any) => v.published && !v.deprecated).map((v: any) => ({
id: v.id,
name: v.name,
dbVersion: v.dbVersion
}))
: []
}));
break;
}
case 'computeProfileId': {
// Fetch all published compute profiles
const profiles = await handleListProfiles({
type: 'Compute',
valueType: 'versions.published,versions.deprecated',
value: 'true,false'
});
suggestions.computeProfiles = profiles.map((p: any) => ({
id: p.id,
name: p.name,
description: p.description,
publishedVersions: Array.isArray(p.versions)
? p.versions.map((v: any) => ({
id: v.id,
name: v.name,
description: v.description
}))
: []
}));
break;
}
case 'timeMachineId': {
// Fetch time machine details
const timeMachineDetails = await handleListTimeMachines({
status: 'READY'
});
suggestions.timeMachines = timeMachineDetails.map((t: any) => ({
id: t.id,
name: t.name,
description: t.description,
databaseType: t.type,
}));
break;
}
case 'nxClusterId': {
const clusters = await handleListClusters({});
suggestions.nxClusters = clusters.map((c: any) => ({
id: c.id,
name: c.name,
description: c.description,
databaseType: c.type,
}));
break;
}
case 'dbserverId': {
const dbServers: any[] = [];
if (context?.caller === 'handleProvisionDatabase') {
const servers = await handleListDbservers({
loadDatabases: true,
valueType: 'databaseType,databases,status,dbserverClusterId,eraCreated',
value: args.databaseType + ',0,UP,null,true'
});
dbServers.push(...servers);
} else if (context?.caller === 'handleCreateClone') {
const servers = await handleGetAuthorizedServer({
timeMachineId: args.timeMachineId,
usable: true
});
dbServers.push(...servers);
} else {
break; // Unknown context, skip dbserver suggestions
}
suggestions.dbServers = dbServers.map((d: any) => ({
id: d.id,
name: d.name,
description: d.description,
nxClusterId: d.nxClusterId,
properties: d.properties,
ownerId: d.ownerId,
dateCreated: d.dateCreated,
dateModified: d.dateModified,
tags: d.tags,
}));
break;
}
case 'timeMachineInfo.slaId': {
const slas = await handleListSlas({});
suggestions.slaPolicies = slas;
break;
}
}
}
return suggestions;
}
///////////////////////////////
// Database Management Handlers
///////////////////////////////
async function handleListDatabases(args: any) {
const allDatabases = await ndbClient.get('/databases', null);
// Map first, then apply advanced filtering
let mapped = Array.isArray(allDatabases)
? allDatabases.map((db: any) => ({
id: db.id,
name: db.name,
description: db.description,
ownerId: db.ownerId,
dateCreated: db.dateCreated,
dateModified: db.dateModified,
clustered: db.clustered,
eraCreated: db.eraCreated,
placeholder: db.placeholder,
databaseName: db.databaseName,
type: db.type,
status: db.status,
databaseStatus: db.databaseStatus,
dbserverLogicalClusterId: db.dbserverLogicalClusterId,
timeMachineId: db.timeMachineId,
timeZone: db.timeZone,
properties: args.detailed ? db.properties : undefined,
tags: Array.isArray(db.tags) ? db.tags.map((t: any) => ({
tagId: t.tagId,
entityId: t.entityId,
entityType: t.entityType,
value: t.value,
tagName: t.tagName
})) : []
}))
: [];
let filtered = advancedFilter(mapped, args.valueType, args.value);
return filtered;
}
async function handleGetDatabase(args: any) {
const params: any = {
'value-type': args.valueType || 'id',
'detailed': args.detailed,
'load-dbserver-cluster': args.loadDbserverCluster
};
return await ndbClient.get(`/databases/${args.databaseId}`, params);
}
async function handleProvisionDatabase(args: any) {
// Determine input categories based on provisioning scenario
let category = 'db_server;database';
if (args.clustered) {
category = 'db_server;database;cluster;node_info';
} else {
category = 'db_server;database';
}
if (!args.createDbserver) {
category = 'database';
}
// Step 1: Fetch engine-specific parameters
const inputFile = await ndbClient.get(`/app_types/${args.databaseType}/provision/input-file`, {
category
});
// Step 2: Check and collect missing resources
const missingParams = await validateAndCollectMissingParams(args, inputFile, ProvisionDatabaseRequestSchema);
if (missingParams.length > 0) {
// Return a help message with missing parameters and suggestions
return {
status: 'validation_required',
message: 'Some required parameters are missing. Please provide the following:',
missingParameters: missingParams,
suggestions: await generateSuggestions(args, missingParams, { caller: 'handleProvisionDatabase' })
};
}
// If all parameters are present, proceed with provisioning
return await executeProvisionDatabase(args, args.testMode);
}
// Function to execute provisioning with all parameters and testMode
async function executeProvisionDatabase(args: any, testMode?: boolean) {
// Validate args using zod schema
const result = ProvisionDatabaseRequestSchema.safeParse(args);
if (!result.success) {
return {
status: 'validation_error',
errors: result.error.errors
};
}
const data = result.data;
if (testMode) {
return {
status: 'test_mode',
message: 'Test mode enabled. This is the payload that would be sent to the API.',
payload: data
};
}
return await ndbClient.post('/databases/provision', data);
}
async function handleRegisterDatabase(args: any) {
const data = {
databaseType: args.databaseType,
databaseName: args.databaseName,
vmIp: args.vmIp,
nxClusterId: args.nxClusterId,
actionArguments: args.actionArguments || []
};
return await ndbClient.post('/databases/register', data);
}
async function handleUpdateDatabase(args: any) {
const data = {
name: args.name,
description: args.description,
tags: args.tags
};
return await ndbClient.patch(`/databases/${args.databaseId}`, data);
}
async function handleDeregisterDatabase(args: any) {
const data = {
delete: args.delete || false,
remove: args.remove || false,
deleteTimeMachine: args.deleteTimeMachine || false
};
return await ndbClient.delete(`/databases/${args.databaseId}`, data);
}
async function handleRestoreDatabase(args: any) {
// Validate args using the RestoreDatabaseRequestSchema
const result = RestoreDatabaseRequestSchema.safeParse(args);
if (!result.success) {
return {
status: 'validation_error',
errors: result.error.errors
};
}
const data = result.data;
return await ndbClient.post(`/databases/${data.databaseId}/restore`, {
userPitrTimestamp: data.userPitrTimestamp,
snapshotId: data.snapshotId,
actionArguments: data.actionArguments
});
}
///////////////////////////////////
// Database Server Management Handlers
//////////////////////////////////////
async function handleListDbservers(args: any) {
const params: any = {
'load-databases': args.loadDatabases,
'load-clones': args.loadClones
};
const fullList = await ndbClient.get('/dbservers', params);
// Map first, then apply advanced filtering
let mapped = Array.isArray(fullList)
? fullList.map((srv: any) => {
const mapped: any = {
id: srv.id,
eraCreated: srv.eraCreated,
dbserverClusterId: srv.dbserverClusterId,
name: srv.name,
description: srv.description,
ipAddresses: srv.ipAddresses,
fqdns: srv.fqdns,
type: srv.type,
status: srv.status,
nxClusterId: srv.nxClusterId,
databaseType: srv.databaseType,
eraVersion: srv.eraVersion,
ownerId: srv.ownerId,
dateCreated: srv.dateCreated,
dateModified: srv.dateModified,
properties: Array.isArray(srv.properties) ? srv.properties.map((p: any) => ({
name: p.name,
value: p.value
})) : [],
tags: Array.isArray(srv.tags) ? srv.tags.map((t: any) => ({
tagId: t.tagId,
entityId: t.entityId,
entityType: t.entityType,
value: t.value,
tagName: t.tagName
})) : []
};
if (args.loadDatabases !== false || args.loadClones !== false) {
mapped.databases = Array.isArray(srv.databases)
? srv.databases.map((db: any) => ({
id: db.id,
name: db.name,
description: db.description,
ownerId: db.ownerId,
dateCreated: db.dateCreated,
dateModified: db.dateModified,
clustered: db.clustered,
clone: db.clone,
databaseName: db.databaseName,
type: db.type,
status: db.status,
dbserverLogicalClusterId: db.dbserverLogicalClusterId,
timeMachineId: db.timeMachineId,
parentTimeMachineId: db.parentTimeMachineId,
timeZone: db.timeZone,
tags: Array.isArray(db.tags) ? db.tags.map((t: any) => ({
tagId: t.tagId,
entityId: t.entityId,
entityType: t.entityType,
value: t.value,
tagName: t.tagName
})) : []
}))
: [];
}
return mapped;
})
: [];
let filtered = advancedFilter(mapped, args.valueType, args.value);
return filtered;
}
async function handleGetDbserver(args: any) {
const params = {
'value-type': args.valueType || 'id',
'load-databases': args.loadDatabases,
'load-clones': args.loadClones
};
return await ndbClient.get(`/dbservers/${args.dbserverId}`, params);
}
async function handleCreateDbserver(args: any) {
// Step 1: Check and collect missing resources
const missingParams = await validateAndCollectMissingParams(args, null, CreateDbserverRequestSchema);
if (missingParams.length > 0) {
// Return a help message with missing parameters and suggestions
return {
status: 'validation_required',
message: 'Some required parameters are missing. Please provide the following:',
missingParameters: missingParams,
suggestions: await generateSuggestions(args, missingParams),
};
}
// Step 2: Fetch engine-specific parameters using timeMachineId or softwareProfileId
if (args.timeMachineId) {
const timeMachineDetails = await ndbClient.get(`/tms/${args.timeMachineId}`);
const databaseEngine = timeMachineDetails.type;
args.databaseType = databaseEngine; // Ensure databaseType is set for suggestions
} else if (args.softwareProfileId) {
const softwareProfileDetails = await ndbClient.get(`/profiles/${args.softwareProfileId}`);
const databaseEngine = softwareProfileDetails.engineType;
args.databaseType = databaseEngine; // Ensure databaseType is set for suggestions
} else {
throw new McpError(ErrorCode.InvalidParams, 'Either timeMachineId or softwareProfileId must be provided.');
}
// Validate args using the CreateDbserverRequestSchema
const result = CreateDbserverRequestSchema.safeParse(args);
if (!result.success) {
return {
status: 'validation_error',
errors: result.error.errors
};
}
const data = result.data;
// Make the API call to create the database server
return await ndbClient.post('/dbservers/provision', data);
}
async function handleRegisterDbserver(args: any) {
const data = {
vmIp: args.vmIp,
nxClusterUuid: args.nxClusterUuid,
databaseType: args.databaseType,
username: args.username,
password: args.password,
actionArguments: args.actionArguments || []
};
return await ndbClient.post('/dbservers/register', data);
}
async function handleDeleteDbserver(args: any) {
const data = {
delete: args.delete || false,
remove: args.remove || false,
softRemove: false,
deleteVgs: true,
deleteVmSnapshots: true
};
return await ndbClient.delete(`/dbservers/${args.dbserverId}`, data);
}
///////////////////////////////////
// Clone Management Handlers
///////////////////////////////////
async function handleListClones(args: any) {
// Get all clones from NDB
const allClones = await ndbClient.get('/clones', null);
// Map first, then apply advanced filtering
let mapped = Array.isArray(allClones)
? allClones.map((clone: any) => ({
id: clone.id,
name: clone.name,
description: clone.description,
ownerId: clone.ownerId,
dateCreated: clone.dateCreated,
dateModified: clone.dateModified,
databaseName: clone.databaseName,
type: clone.type,
status: clone.status,
dbserverLogicalClusterId: clone.dbserverLogicalClusterId,
timeMachineId: clone.timeMachineId,
parentTimeMachineId: clone.parentTimeMachineId,
timeZone: clone.timeZone,
tags: Array.isArray(clone.tags) ? clone.tags.map((t: any) => ({
tagId: t.tagId,
entityId: t.entityId,
entityType: t.entityType,
value: t.value,
tagName: t.tagName
})) : []
}))
: [];
let filtered = advancedFilter(mapped, args.valueType, args.value);
return filtered;
}
async function handleGetClone(args: any) {
const params = {
'value-type': args.valueType || 'id',
detailed: args.detailed
};
return await ndbClient.get(`/clones/${args.cloneId}`, params);
}
async function handleCreateClone(args: any) {
// Step 1: Fetch engine-specific parameters using timeMachineId
const timeMachineDetails = await ndbClient.get(`/tms/${args.timeMachineId}`);
const databaseEngine = timeMachineDetails.type;
args.databaseType = databaseEngine; // Ensure databaseType is set for suggestions
const inputFile = await ndbClient.get(`/tms/${args.timeMachineId}/clones/input-file`, {});
// Step 2: Check and collect missing resources
const missingParams = await validateAndCollectMissingParams(args, inputFile, CreateCloneRequestSchema);
if (missingParams.length > 0) {
// Return a help message with missing parameters and suggestions
return {
status: 'validation_required',
message: 'Some required parameters are missing. Please provide the following:',
missingParameters: missingParams,
suggestions: await generateSuggestions(args, missingParams, { caller: 'handleCreateClone' }),
};
}
// Step 3: Execute the clone creation
return await ndbClient.post(`/tms/${args.timeMachineId}/clones`, args);
}
async function handleRefreshClone(args: any) {
const data = {
snapshotId: args.snapshotId,
latestSnapshot: args.latestSnapshot || false
};
return await ndbClient.post(`/clones/${args.cloneId}/refresh`, data);
}
async function handleDeleteClone(args: any) {
const data = {
delete: args.delete || false,
remove: args.remove || false,
deleteTimeMachine: args.deleteTimeMachine || false
};
return await ndbClient.delete(`/clones/${args.cloneId}`, data);
}
// Time Machine Management Handlers
async function handleListTimeMachines(args: any) {
// No params to API, always use defaults
const allTms = await ndbClient.get('/tms', null);
// Map first to the reduced schema to guarantee the presence of properties, then filter
let mapped = Array.isArray(allTms)
? allTms.map((tm: any) => ({
id: tm.id,
name: tm.name,
description: tm.description,
databaseId: tm.databaseId,
logDriveId: tm.logDriveId,
type: tm.type,
status: tm.status,
slaId: tm.slaId,
scheduleId: tm.scheduleId,
ownerId: tm.ownerId,
dateCreated: tm.dateCreated,
dateModified: tm.dateModified,
properties: Array.isArray(tm.properties)
? tm.properties.map((p: any) => ({
ref_id: p.ref_id,
name: p.name,
value: p.value,
secure: p.secure ?? false,
description: p.description
}))
: [],
zeroSla: tm.zeroSla ?? false,
slaSet: tm.slaSet ?? false,
continuousRecoveryEnabled: tm.continuousRecoveryEnabled ?? false,
snapshotableState: tm.snapshotableState ?? false,
tags: Array.isArray(tm.tags) ? tm.tags.map((t: any) => ({
tagId: t.tagId,
entityId: t.entityId,
entityType: t.entityType,
value: t.value,
tagName: t.tagName
})) : []
}))
: [];
let filtered = advancedFilter(mapped, args.valueType, args.value);
return filtered;
}
async function handleGetTimeMachine(args: any) {
const params = {
'value-type': args.valueType || 'id',
detailed: args.detailed
};
return await ndbClient.get(`/tms/${args.timeMachineId}`, params);
}
async function handleGetTimeMachineCapability(args: any) {
const params = {
'time-zone': args.timeZone || 'UTC',
'load-health': args.loadHealth || false
};
return await ndbClient.get(`/tms/${args.timeMachineId}/capability`, params);
}
async function handlePauseTimeMachine(args: any) {
const data = {
forced: args.forced || false,
reason: args.reason
};
return await ndbClient.patch(`/tms/${args.timeMachineId}/pause`, data);
}
async function handleResumeTimeMachine(args: any) {
const data = {
resetCapability: args.resetCapability || false
};
return await ndbClient.patch(`/tms/${args.timeMachineId}/resume`, data);
}
async function handleUpdateTimeMachine(args: any) {
const result = UpdateTimeMachineRequestSchema.safeParse(args);
if (!result.success) {
return {
status: 'validation_error',
errors: result.error.errors,
};
}
const { timemachine_id, ...data } = result.data;
return await ndbClient.patch(`/tms/${timemachine_id}`, data);
}
// Add Authorized Server to Time Machine
async function handleAddAuthorizedServer(args: any) {
const { timeMachineId, dbserverIds } = args;
// Validate input
if (!timeMachineId || !Array.isArray(dbserverIds) || dbserverIds.length === 0) {
throw new Error('Invalid input: timeMachineId and dbserverIds are required.');
}
// Make the API call to add authorized servers
return await ndbClient.post(`/tms/${timeMachineId}/dbservers`, dbserverIds);
}
// Get Authorized Server for Time Machine
async function handleGetAuthorizedServer(args: any) {
const { timeMachineId, usable = false } = args;
// Validate input
if (!timeMachineId) {
throw new Error('Invalid input: timeMachineId is required.');
}
// Make the API call to retrieve authorized servers
return await ndbClient.get(`/tms/${timeMachineId}/candidate-dbservers`, {
usable
});
}
// Add these handlers with the other Time Machine Management Handlers section
async function handleGetTimeMachineAccess(args: any) {
const { timeMachineId } = args;
// Validate input
if (!timeMachineId) {
throw new Error('Invalid input: timeMachineId is required.');
}
// Make the API call to retrieve cluster access
return await ndbClient.get(`/tms/${timeMachineId}/clusters`);
}
async function handleGrantTimeMachineAccess(args: any) {
const { timeMachineId, nxClusterId, slaId } = args;
// Validate input
if (!timeMachineId || !nxClusterId || !slaId) {
throw new Error('Invalid input: timeMachineId, nxClusterId, and slaId are required.');
}
// Prepare the request data
const data = {
nxClusterId,
slaId,
type: 'OTHER' // Assuming 'OTHER' is a valid type; adjust as necessary
};
// Make the API call to grant cluster access
return await ndbClient.post(`/tms/${timeMachineId}/clusters`, data);
}
// Snapshot Management Handlers
async function handleListSnapshots(args: any) {
// Liste des valueType supportés côté API
const apiKeys = [
'type',
'status',
'protection-domain-id',
'database-node',
'snapshot-id',
'time-machine',
'latest'
];
let apiValueTypes: string[] = [];
let apiValues: string[] = [];
let localValueTypes: string[] = [];
let localValues: string[] = [];
if (args.valueType && args.value) {
const keys = args.valueType.split(',').map((k: string) => k.trim());
const values = args.value.split(',').map((v: string) => v.trim());
for (let i = 0; i < keys.length; i++) {
if (apiKeys.includes(keys[i])) {
apiValueTypes.push(keys[i]);
apiValues.push(values[i]);
} else {
localValueTypes.push(keys[i]);
localValues.push(values[i]);
}
}
}
// Prépare les paramètres API uniquement avec les filtres supportés
const params: any = {
'database-ids': args.databaseIds,
limit: args.limit || 100
};
if (apiValueTypes.length > 0) {
params['value-type'] = apiValueTypes.join(',');
params['value'] = apiValues.join(',');
}
const snapshots = await ndbClient.get('/snapshots', params);
// Mapping réduit selon le schéma fourni
let mapped = Array.isArray(snapshots)
? snapshots.map((s: any) => ({
id: s.id,
name: s.name,
description: s.description,
ownerId: s.ownerId,
dateCreated: s.dateCreated,
dateModified: s.dateModified,
queryCount: s.queryCount,
snapshotId: s.snapshotId,
snapshotUuid: s.snapshotUuid,
protectionDomainId: s.protectionDomainId,
timeMachineId: s.timeMachineId,
databaseNodeId: s.databaseNodeId,
appInfoVersion: s.appInfoVersion,
status: s.status,
type: s.type,
snapshotTimeStamp: s.snapshotTimeStamp,
snapshotSize: s.snapshotSize,
fromTimeStamp: s.fromTimeStamp,
toTimeStamp: s.toTimeStamp,
tags: Array.isArray(s.tags) ? s.tags.map((t: any) => ({
tagId: t.tagId,
entityId: t.entityId,
entityType: t.entityType,
value: t.value,
tagName: t.tagName
})) : []
}))
: [];
// Only local advanced filtering on valueType not supported by the API
let filtered =
localValueTypes.length > 0
? advancedFilter(mapped, localValueTypes.join(','), localValues.join(','))
: mapped;
return filtered;
}
async function handleGetSnapshot(args: any) {
const params = {
'time-zone': args.timeZone || 'UTC'
};
return await ndbClient.get(`/snapshots/${args.snapshotId}`, params);
}
async function handleTakeSnapshot(args: any) {
const data = {
name: args.name,
lcmConfig: args.expireInDays ? {
snapshotLCMConfig: {
expiryDetails: {
expireInDays: args.expireInDays
}
}
} : undefined
};
return await ndbClient.post(`/tms/${args.timeMachineId}/snapshots`, data);
}
async function handleDeleteSnapshot(args: any) {
return await ndbClient.delete(`/snapshots/${args.snapshotId}`);
}
// Infrastructure Handlers
async function handleListClusters(args: any) {
// Appel API sans paramètre
const allClusters = await ndbClient.get('/clusters', null);
// Mapping réduit selon le schéma demandé
let mapped = Array.isArray(allClusters)
? allClusters.map((c: any) => ({
id: c.id,
name: c.name,
uniqueName: c.uniqueName,
ipAddresses: Array.isArray(c.ipAddresses) ? c.ipAddresses : [],
fqdns: Array.isArray(c.fqdns) ? c.fqdns : [],
description: c.description,
cloudType: c.cloudType,
dateCreated: c.dateCreated,
dateModified: c.dateModified,
ownerId: c.ownerId,
status: c.status,
version: c.version,
hypervisorType: c.hypervisorType,
hypervisorVersion: c.hypervisorVersion
}))
: [];
let filtered = advancedFilter(mapped, args.valueType, args.value);
return filtered;
}
async function handleGetCluster(args: any) {
return await ndbClient.get(`/clusters/${args.clusterId}`);
}
async function handleGetClusterByName(args: any) {
if (!args.clusterName) {
throw new Error('clusterName is required');
}
return await ndbClient.get(`/clusters/name/${encodeURIComponent(args.clusterName)}`);
}
async function handleListProfiles(args: any) {
const params = {
engine: args.engine,
type: args.type
};
let profiles = await ndbClient.get('/profiles', params);
// Mapping d'abord, puis filtrage avancé
let mapped = Array.isArray(profiles)
? profiles.map((p: any) => ({
id: p.id,
name: p.name,
description: p.description,
dateCreated: p.dateCreated,
dateModified: p.dateModified,
owner: p.owner,
engineType: p.engineType,
type: p.type,
nxClusterId: p.nxClusterId,
topology: p.topology,
dbVersion: p.dbVersion,
systemProfile: p.systemProfile ?? false,
latestVersion: p.latestVersion,
latestVersionId: p.latestVersionId,
versions: Array.isArray(p.versions)
? p.versions.map((v: any) => ({
id: v.id,
name: v.name,
description: v.description,
dateCreated: v.dateCreated,
dateModified: v.dateModified,
owner: v.owner,
engineType: v.engineType,
type: v.type,
nxClusterId: v.nxClusterId,
topology: v.topology,
dbVersion: v.dbVersion,
systemProfile: v.systemProfile ?? false,
assocDbServers: Array.isArray(v.assocDbServers) ? v.assocDbServers : [],
assocDatabases: Array.isArray(v.assocDatabases) ? v.assocDatabases : [],
version: v.version,
profileId: v.profileId,
published: v.published ?? false,
deprecated: v.deprecated ?? false,
properties: Array.isArray(v.properties) ? v.properties.map((p: any) => ({
name: p.name,
value: p.value
})) : [],
}))
: [],
clusterAvailability: Array.isArray(p.clusterAvailability)
? p.clusterAvailability.map((ca: any) => ({
nxClusterId: ca.nxClusterId,
status: ca.status
})) : []
}))
: [];
let filtered = advancedFilter(mapped, args.valueType, args.value);
return filtered;
}
async function handleGetProfile(args: any) {
const params = {
id: args.byName ? undefined : args.profileId,
name: args.byName ? args.profileId : undefined,
engine: args.engine,
type: args.type
};
// Filter out undefined values
const filteredParams = Object.fromEntries(
Object.entries(params).filter(([_, value]) => value !== undefined)
);
const profiles = await ndbClient.get('/profiles', filteredParams);
// Always return the full object(s) as received from the API
if (Array.isArray(profiles)) {
if (profiles.length === 1) {
return profiles[0]; // full details, including versions
} else if (profiles.length === 0) {
throw new McpError(ErrorCode.InvalidRequest, `Profile not found: ${args.profileId}`);
} else {
// Multiple matches - return all for user to choose, with full details
return profiles;
}
}
return profiles; // full details if not an array
}
async function handleListSlas(args: any) {
return await ndbClient.get('/slas');
}
async function handleGetSla(args: any) {
const endpoint = args.byName ? `/slas/name/${args.slaId}` : `/slas/${args.slaId}`;
return await ndbClient.get(endpoint);
}
// Operations and Monitoring Handlers
async function handleListOperations(args: any) {
const params: any = {};
if (args.dbserverId) params['dbserver-id'] = args.dbserverId;
if (args['eraServer'] !== undefined) params['era-server'] = args['eraServer'];
if (args.ip) params['ip'] = args.ip;
if (args.clientId) params['client-id'] = args.clientId;
if (args.status) params['status'] = args.status;
if (args.type) params['type'] = args.type;
if (args.hideSubops !== undefined) params['hide-subops'] = args.hideSubops;
if (args.systemTriggered !== undefined) params['system-triggered'] = args.systemTriggered;
if (args.userTriggered !== undefined) params['user-triggered'] = args.userTriggered;
if (args.scheduled !== undefined) params['scheduled'] = args.scheduled;
if (args.dateSubmitted) params['date-submitted'] = args.dateSubmitted;
if (args.fromTime) params['from-time'] = args.fromTime;
if (args.toTime) params['to-time'] = args.toTime;
if (args.days) params['days'] = args.days;
if (args.entityId) params['entity-id'] = args.entityId;
if (args.entityName) params['entity-name'] = args.entityName;
if (args.entityType) params['entity-type'] = args.entityType;
if (args.timeZone) params['time-zone'] = args.timeZone;
if (args.descending !== undefined) params['descending'] = args.descending;
if (args.operationId) params['operation-id'] = args.operationId;
if (args.timestamp) params['timestamp'] = args.timestamp;
if (args.limit) params['limit'] = args.limit;
return await ndbClient.get('/operations/short-info', params);
}
async function handleGetOperation(args: any) {
const params = {
'time-zone': args.timeZone || 'UTC'
};
return await ndbClient.get(`/operations/${args.operationId}`, params);
}
async function handleListAlerts(args: any) {
const params = {
resolved: args.resolved,
timeInterval: args.timeInterval
};
const apiResult = await ndbClient.get('/alerts', params);
// If the result has an 'entities' array, use it; otherwise, use the result directly
const alerts = Array.isArray(apiResult?.entities) ? apiResult.entities : apiResult;
// Reduced mapping (adapt to typical NDB alert fields)
let mapped = Array.isArray(alerts)
? alerts.map((a: any) => ({
id: a.id,
name: a.name,
description: a.description,
status: a.status,
severity: a.severity,
type: a.type,
entityType: a.entityType,
entityId: a.entityId,
entityName: a.entityName,
dateCreated: a.dateCreated,
dateModified: a.dateModified,
resolved: a.resolved,
resolution: a.resolution,
acknowledged: a.acknowledged,
acknowledgedBy: a.acknowledgedBy,
acknowledgedAt: a.acknowledgedAt
}))
: [];
let filtered = advancedFilter(mapped, args.valueType, args.value);
return filtered;
}
// Get full details for a single alert by alertId
async function handleGetAlert(args: any) {
return await ndbClient.get(`/alerts/${args.alertId}`);
}
// List all NDB users with advanced filtering using valueType/value
async function handleListUsers(args: any) {
const users = await ndbClient.get('/users');
// Mapping réduit (garde les champs principaux)
let mapped = Array.isArray(users)
? users.map((user: any) => ({
id: user.id,
username: user.username,
email: user.email,
isExternalAuth: user.isExternalAuth,
passwordExpired: user.passwordExpired,
roles: user.roles
}))
: [];
// Filtrage avancé (supporte booléens, arrays, etc.)
let filtered = advancedFilter(mapped, args.valueType, args.value);
return filtered;
}
// Get full details for a single user by userId
async function handleGetUser(args: any) {
return await ndbClient.get(`/users/${args.userId}`);
}
// List all roles
async function handleListRoles(args: any) {
// No args needed for this endpoint
return await ndbClient.get('/roles', null);
}
// Handler for /users/me endpoint
async function handleGetCurrentUser() {
return await ndbClient.get('/users/me');
}
// Start the server
async function main() {
const transport = new StdioServerTransport();
await server.connect(transport);
console.error('🚀 NDB MCP Server running on stdio');
}
main().catch((error) => {
console.error('❌ Failed to start server:', error);
process.exit(1);
});