Skip to main content
Glama
index.ts1.32 kB
#!/usr/bin/env node import yargs from 'yargs'; import { hideBin } from 'yargs/helpers'; import { createMCPServer } from './server.js'; import { runStdioServer } from './transports/stdio.js'; import { runHttpServer } from './transports/http.js'; import { homedir } from 'os'; import { join } from 'path'; const config = { name: 'vec-memory-mcp', version: '1.0.0', memory: { database: { dbPath: process.env.MEMORY_DB_PATH || join(homedir(), '.vec-memory-mcp.db'), enableWAL: true }, ollama: { baseUrl: process.env.OLLAMA_BASE_URL || 'http://localhost:11434', model: process.env.OLLAMA_MODEL || 'nomic-embed-text' } } }; async function main() { const argv = await yargs(hideBin(process.argv)) .option('http', { type: 'boolean', default: false, description: 'Run as HTTP server for remote access (enables memory sharing across instances)' }) .option('port', { type: 'number', default: 3000, description: 'Port for HTTP server (only used with --http)' }) .help() .parse(); try { const server = await createMCPServer(config); if (argv.http) { await runHttpServer(server, argv.port); } else { await runStdioServer(server); } } catch (error) { process.exit(1); } } main();

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/dlasky/mcp-memory-vec'

If you have feedback or need assistance with the MCP directory API, please join our Discord server