Skip to main content
Glama
dbhub.toml.example10.3 kB
# DBHub Multi-Database Configuration Example # Save this file as 'dbhub.toml' in your project directory # Or use '--config=path/to/your/config.toml' to specify a custom location # Define multiple database sources using [[sources]] array syntax # Each source requires a unique 'id' field for identification ## Example 1: PostgreSQL with DSN (recommended) [[sources]] id = "prod_pg" # Required: Unique identifier dsn = "postgres://user:password@localhost:5432/production?sslmode=require" # Required: Connection string readonly = false # Optional: Limit to read-only operations (default: false) max_rows = 1000 # Optional: Maximum rows to return per query (default: unlimited) connection_timeout = 30 # Optional: Connection timeout in seconds (default: driver-specific) ## Example 2: MySQL with individual parameters [[sources]] id = "staging_mysql" # Required: Unique identifier type = "mysql" # Required when using individual parameters (not DSN) host = "localhost" # Required when using individual parameters port = 3306 # Optional: Uses default if not specified (MySQL default: 3306) database = "staging" # Required: Database name user = "root" # Required: Database username password = "secret" # Required: Database password readonly = false # Optional: Limit to read-only operations (default: false) max_rows = 500 # Optional: Maximum rows to return per query (default: unlimited) connection_timeout = 60 # Optional: Connection timeout in seconds (useful for high-latency connections) ## Example 3: MariaDB with SSH tunnel [[sources]] id = "remote_mariadb" # Required: Unique identifier dsn = "mariadb://dbuser:dbpass@10.0.0.5:3306/mydb" # Required: Connection string (target DB behind SSH tunnel) ssh_host = "bastion.example.com" # Optional: SSH server hostname (required if using SSH tunnel) ssh_port = 22 # Optional: SSH server port (default: 22) ssh_user = "ubuntu" # Optional: SSH username (required if using SSH tunnel) ssh_key = "~/.ssh/id_rsa" # Optional: Path to private key file (use ssh_key OR ssh_password) # ssh_passphrase = "key_passphrase" # Optional: Passphrase for encrypted private key # ssh_password = "ssh_password" # Optional: SSH password (use instead of ssh_key for password auth) ## Example 4: SQL Server with timeouts [[sources]] id = "analytics_sqlserver" # Required: Unique identifier type = "sqlserver" # Required when using individual parameters host = "sqlserver.example.com" # Required when using individual parameters port = 1433 # Optional: Uses default if not specified (SQL Server default: 1433) database = "analytics" # Required: Database name user = "sa" # Required: Database username password = "YourStrong@Passw0rd" # Required: Database password max_rows = 2000 # Optional: Maximum rows to return per query (default: unlimited) connection_timeout = 30 # Optional: Connection establishment timeout in seconds (default: 15s) request_timeout = 120 # Optional: Query execution timeout in seconds (SQL Server only, default: 15s) # instanceName = "INSTANCE1" # Optional: SQL Server named instance (e.g., SERVER\INSTANCE1) ## Example 5: SQLite local file [[sources]] id = "local_sqlite" # Required: Unique identifier type = "sqlite" # Required when using individual parameters database = "/path/to/database.db" # Required: Path to SQLite database file readonly = true # Optional: Limit to read-only operations (default: false) ## Example 6: SQLite in-memory (for testing) [[sources]] id = "test_db" # Required: Unique identifier dsn = "sqlite:///:memory:" # Required: Connection string (in-memory database) # Connection Parameters Reference: # ------------------------------- # # REQUIRED FIELDS: # ---------------- # - id: Unique identifier for this database source (string) # # AND one of: # Option A: DSN (connection string) # - dsn: Complete connection string (e.g., "postgres://user:pass@host:port/db") # # Option B: Individual connection parameters # - type: Database type (postgres, mysql, mariadb, sqlserver, sqlite) [REQUIRED] # - database: Database name or file path [REQUIRED] # For network databases (postgres, mysql, mariadb, sqlserver): # - host: Database host [REQUIRED] # - user: Database username [REQUIRED] # - password: Database password [REQUIRED] # For SQLite: only database path is needed # # OPTIONAL FIELDS: # ---------------- # Connection parameters: # - port: Database port (default: 5432 for postgres, 3306 for mysql/mariadb, 1433 for sqlserver) # - instanceName: SQL Server named instance (sqlserver only, e.g., "INSTANCE1") # # Execution options: # - readonly: Limit to read-only operations (default: false) # - max_rows: Maximum rows to return per query (default: unlimited, e.g., 1000) # - connection_timeout: Connection timeout in seconds (default: driver-specific, e.g., 30, 60) # - request_timeout: Query execution timeout in seconds (SQL Server only, default: 15, e.g., 120) # # SSH tunnel options (all optional, but if using SSH tunnel, ssh_host, ssh_user required): # - ssh_host: SSH server hostname or IP # - ssh_port: SSH server port (default: 22) # - ssh_user: SSH username # - ssh_key: Path to private key file (use ssh_key OR ssh_password, not both) # - ssh_password: SSH password (use instead of ssh_key for password authentication) # - ssh_passphrase: Passphrase for encrypted private key # Default Port Numbers: # - PostgreSQL: 5432 # - MySQL/MariaDB: 3306 # - SQL Server: 1433 # - SQLite: N/A (file-based) # Usage Notes: # ------------ # 1. The first [[sources]] entry is the default database # 2. Access specific sources in MCP tools using the source_id parameter # 3. Paths starting with ~/ will be expanded to your home directory # 4. Passwords in DSN strings will be redacted in logs automatically # 5. For security, consider using environment variables for sensitive data # Custom Tools # ------------ # Define custom SQL-based tools that are automatically registered as MCP tools. # Each tool wraps a parameterized SQL query with validation and type checking. ## Example 1: Simple query with single parameter [[tools]] name = "get_user_by_email" description = "Retrieve user details by email address from the production database" source = "prod_pg" # Required: must reference a valid source ID statement = "SELECT id, name, email, created_at FROM users WHERE email = $1" [[tools.parameters]] name = "email" type = "string" description = "The email address to search for" required = true ## Example 2: Query with multiple parameters and default value [[tools]] name = "search_orders" description = "Search orders by customer ID and optional status filter" source = "prod_pg" statement = "SELECT * FROM orders WHERE customer_id = $1 AND ($2::text IS NULL OR status = $2) ORDER BY created_at DESC" [[tools.parameters]] name = "customer_id" type = "integer" description = "The customer ID to search orders for" required = true [[tools.parameters]] name = "status" type = "string" description = "Optional order status filter (pending, completed, cancelled)" required = false allowed_values = ["pending", "completed", "cancelled"] ## Example 3: MySQL tool with positional parameters # [[tools]] # name = "get_product_inventory" # description = "Get current inventory level for a product" # source = "staging_mysql" # References a MySQL source # statement = "SELECT product_id, quantity, last_updated FROM inventory WHERE product_id = ?" # # [[tools.parameters]] # name = "product_id" # type = "integer" # description = "The product ID to check inventory for" # required = true ## Example 4: Analytics query with multiple integer parameters # [[tools]] # name = "daily_revenue" # description = "Get total revenue for a specific date range" # source = "analytics_db" # statement = "SELECT DATE(created_at) as date, SUM(amount) as revenue FROM orders WHERE DATE(created_at) BETWEEN $1 AND $2 GROUP BY DATE(created_at)" # # [[tools.parameters]] # name = "start_date" # type = "string" # description = "Start date in YYYY-MM-DD format" # required = true # # [[tools.parameters]] # name = "end_date" # type = "string" # description = "End date in YYYY-MM-DD format" # required = true # Custom Tool Field Reference: # ---------------------------- # # REQUIRED FIELDS: # ---------------- # - name: Unique tool identifier (string) # - description: Natural language description of what the tool does # - source: ID of the database source to execute against (must exist in [[sources]]) # - statement: SQL query with parameter placeholders # * PostgreSQL: Use $1, $2, $3 for parameters # * MySQL/MariaDB: Use ?, ?, ? for parameters # * SQL Server: Use @p1, @p2, @p3 for parameters # * SQLite: Use ?, ?, ? for parameters # # PARAMETER FIELDS (for [[tools.parameters]]): # --------------------------------------------- # - name: Parameter identifier (string) # - type: Parameter type (string, integer, float, boolean, array) # - description: Natural language description for the AI agent # - required: Whether parameter is required (default: true) # - default: Default value if not provided (makes parameter optional) # - allowed_values: Array of valid values (creates enum validation) # # VALIDATION RULES: # ----------------- # 1. Tool names cannot conflict with built-in tools (execute_sql, search_objects) # 2. Tool names must be unique across all custom tools # 3. Source ID must reference an existing [[sources]] entry # 4. Parameter count in SQL must match number of [[tools.parameters]] entries # 5. Parameter syntax must match the connector type: # - PostgreSQL requires $1, $2, etc. # - MySQL/MariaDB/SQLite require ?, ?, etc. # - SQL Server requires @p1, @p2, etc. # # SECURITY: # --------- # - Custom tools inherit the source's readonly and max_rows settings # - Only parameterized queries are supported (no template interpolation) # - All parameters are validated with Zod schemas at runtime # - SQL injection prevention through native DB parameter binding # # USAGE: # ------ # After defining tools in this file, they are automatically registered # when the server starts. They appear in the MCP client's tool list alongside # built-in tools like execute_sql and search_objects.

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/bytebase/dbhub'

If you have feedback or need assistance with the MCP directory API, please join our Discord server