Skip to main content
Glama
jakubbuskiewicz

MCP Database Server

index.ts7.93 kB
import { Server } from "@modelcontextprotocol/sdk/server/index.js"; import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; import { CallToolRequestSchema, ListToolsRequestSchema, } from "@modelcontextprotocol/sdk/types.js"; import Database from "better-sqlite3"; import { parse } from "csv-parse/sync"; import * as fs from "fs"; import * as path from "path"; const DB_PATH = process.env.DB_PATH || "./data.db"; const db = new Database(DB_PATH); db.exec(` CREATE TABLE IF NOT EXISTS sample_data ( id INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT, value TEXT, created_at DATETIME DEFAULT CURRENT_TIMESTAMP ) `); const server = new Server( { name: "mcp-database-server", version: "1.0.0", }, { capabilities: { tools: {}, }, } ); server.setRequestHandler(ListToolsRequestSchema, async () => { return { tools: [ { name: "query_database", description: "Execute a SQL query on the SQLite database and return results. Supports SELECT, INSERT, UPDATE, DELETE operations.", inputSchema: { type: "object", properties: { query: { type: "string", description: "The SQL query to execute", }, params: { type: "array", items: { type: "string" }, description: "Optional parameters for prepared statements", }, }, required: ["query"], }, }, { name: "import_csv", description: "Import data from a CSV file into a database table. Creates the table if it doesn't exist based on CSV headers.", inputSchema: { type: "object", properties: { file_path: { type: "string", description: "Path to the CSV file to import", }, table_name: { type: "string", description: "Name of the table to import data into", }, delimiter: { type: "string", description: "CSV delimiter character (default: comma)", default: ",", }, }, required: ["file_path", "table_name"], }, }, { name: "list_tables", description: "List all tables in the database with their schemas", inputSchema: { type: "object", properties: {}, }, }, ], }; }); server.setRequestHandler(CallToolRequestSchema, async (request) => { const { name, arguments: args } = request.params; try { switch (name) { case "query_database": { const { query, params = [] } = args as { query: string; params?: string[]; }; const trimmedQuery = query.trim().toUpperCase(); const isSelect = trimmedQuery.startsWith("SELECT"); if (isSelect) { const stmt = db.prepare(query); const results = stmt.all(...params); return { content: [ { type: "text", text: JSON.stringify(results, null, 2), }, ], }; } else { const stmt = db.prepare(query); const result = stmt.run(...params); return { content: [ { type: "text", text: JSON.stringify( { changes: result.changes, lastInsertRowid: result.lastInsertRowid, }, null, 2 ), }, ], }; } } case "import_csv": { const { file_path, table_name, delimiter = "," } = args as { file_path: string; table_name: string; delimiter?: string; }; const absolutePath = path.resolve(file_path); if (!fs.existsSync(absolutePath)) { return { content: [ { type: "text", text: `Error: File not found: ${absolutePath}`, }, ], isError: true, }; } const fileContent = fs.readFileSync(absolutePath, "utf-8"); const records = parse(fileContent, { columns: true, skip_empty_lines: true, delimiter, }) as Record<string, string>[]; if (records.length === 0) { return { content: [ { type: "text", text: "Error: CSV file is empty or has no data rows", }, ], isError: true, }; } const columns = Object.keys(records[0]); const safeTableName = table_name.replace(/[^a-zA-Z0-9_]/g, "_"); const safeColumns = columns.map((col) => col.replace(/[^a-zA-Z0-9_]/g, "_") ); const createTableSQL = ` CREATE TABLE IF NOT EXISTS ${safeTableName} ( id INTEGER PRIMARY KEY AUTOINCREMENT, ${safeColumns.map((col) => `${col} TEXT`).join(",\n ")}, imported_at DATETIME DEFAULT CURRENT_TIMESTAMP ) `; db.exec(createTableSQL); const insertSQL = ` INSERT INTO ${safeTableName} (${safeColumns.join(", ")}) VALUES (${safeColumns.map(() => "?").join(", ")}) `; const insertStmt = db.prepare(insertSQL); const insertMany = db.transaction((rows: Record<string, string>[]) => { for (const row of rows) { const values = columns.map((col) => row[col] || null); insertStmt.run(...values); } }); insertMany(records); return { content: [ { type: "text", text: JSON.stringify( { success: true, table: safeTableName, rowsImported: records.length, columns: safeColumns, }, null, 2 ), }, ], }; } case "list_tables": { const tables = db .prepare( ` SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ` ) .all() as { name: string }[]; const tableSchemas = tables.map((table) => { const schema = db .prepare(`PRAGMA table_info(${table.name})`) .all() as { name: string; type: string; notnull: number; pk: number; }[]; return { table: table.name, columns: schema.map((col) => ({ name: col.name, type: col.type, notNull: col.notnull === 1, primaryKey: col.pk === 1, })), }; }); return { content: [ { type: "text", text: JSON.stringify(tableSchemas, null, 2), }, ], }; } default: return { content: [ { type: "text", text: `Unknown tool: ${name}`, }, ], isError: true, }; } } catch (error) { const errorMessage = error instanceof Error ? error.message : String(error); return { content: [ { type: "text", text: `Error: ${errorMessage}`, }, ], isError: true, }; } }); async function main() { const transport = new StdioServerTransport(); await server.connect(transport); console.error("MCP Database Server running on stdio"); } main().catch(console.error);

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/jakubbuskiewicz/mcp-test'

If you have feedback or need assistance with the MCP directory API, please join our Discord server