We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/danielsimonjr/memory-mcp'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
{
"name": "chunking-for-files",
"version": "1.0.0",
"description": "Split and merge large files for editing within context limits",
"main": "./dist/chunking-for-files.js",
"bin": {
"chunking-for-files": "./dist/chunking-for-files.js"
},
"scripts": {
"build": "npm run build:ts && npm run build:exe",
"build:ts": "tsc",
"build:exe": "npx @yao-pkg/pkg . --target node22-win-x64 --output chunking-for-files.exe",
"start": "node dist/chunking-for-files.js"
},
"pkg": {
"scripts": ["dist/**/*.js"],
"targets": ["node22-win-x64"]
},
"keywords": [
"markdown",
"chunker",
"chunking",
"split",
"merge",
"llm",
"context"
],
"author": "DeepThinking MCP",
"license": "MIT",
"devDependencies": {
"@types/node": "^22",
"@yao-pkg/pkg": "^6.11.0",
"typescript": "^5.6.2"
},
"engines": {
"node": ">=18.0.0"
}
}