Skip to main content
Glama

Couchbase MCP Server for LLMs

by Aniket310101
query.ts1.28 kB
import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; import { Cluster, Bucket } from 'couchbase'; import { z } from 'zod'; export function registerQueryTools(server: McpServer, cluster: Cluster, bucket: Bucket) { server.tool( 'query', 'Execute N1QL queries on Couchbase', { query: z.string().describe('The N1QL query to execute'), }, async ({ query }) => { try { const result = await cluster.query(query); return { content: [{ type: 'text', text: JSON.stringify(result.rows, null, 2) }], isError: false, }; } catch (error: any) { return { content: [{ type: 'text', text: `Query failed: ${error.message}` }], isError: true, }; } } ); server.tool( 'listBuckets', 'List all available buckets', {}, async () => { try { const buckets = await cluster.buckets().getAllBuckets(); return { content: [{ type: 'text', text: JSON.stringify(buckets, null, 2) }], isError: false, }; } catch (error: any) { return { content: [{ type: 'text', text: `Failed to list buckets: ${error.message}` }], isError: true, }; } } ); }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Aniket310101/MCP-Server-Couchbase'

If you have feedback or need assistance with the MCP directory API, please join our Discord server