get_chunk_count
Calculate the number of content chunks for file reading operations. Use before reading to determine required chunk requests based on file size and parameters.
Instructions
Get the total number of chunks that will be returned for a read_context request. Use this tool FIRST before reading content to determine how many chunks you need to request. The parameters should match what you'll use in read_context.
Input Schema
TableJSON Schema
| Name | Required | Description | Default |
|---|---|---|---|
| path | Yes | Path to file or directory | |
| encoding | No | File encoding (e.g., utf8, ascii, latin1) | utf8 |
| maxSize | No | Maximum file size in bytes. Files larger than this will be chunked. | |
| recursive | No | Whether to read directories recursively (includes subdirectories) | |
| fileTypes | No | File extension(s) to include WITHOUT dots (e.g. ["ts", "js", "py"] or just "ts"). Empty/undefined means all files. |
Implementation Reference
- src/index.ts:764-784 (handler)The primary handler function that implements the logic for the 'get_chunk_count' MCP tool. It extracts parameters from args, reads file contents info via readContent, computes total chunks using getTotalChunks, and returns a standardized JSON response.private async handleGetChunkCount(args: any) { const { path: filePath, encoding = 'utf8', maxSize, recursive = true, fileTypes } = args; try { const filesInfo = await this.readContent(filePath, encoding as BufferEncoding, maxSize, recursive, fileTypes); const totalChunks = this.getTotalChunks(filesInfo); return this.createJsonResponse({ totalChunks, chunkSize: this.config.chunkSize }); } catch (error) { throw this.handleFileOperationError(error, 'get chunk count', filePath); } }
- src/index.ts:341-374 (registration)Registers the 'get_chunk_count' tool in the MCP server capabilities, providing its description and input schema definition.get_chunk_count: { description: 'RUN ME ONE TIME BEFORE READING CONTENT\nGet the total number of chunks that will be returned for a read_context request.\nUse this tool FIRST before reading content to determine how many chunks you need to request.\nThe parameters should match what you\'ll use in read_context.', inputSchema: { type: 'object', properties: { path: { type: 'string', description: 'Path to file or directory' }, encoding: { type: 'string', description: 'File encoding (e.g., utf8, ascii, latin1)', default: 'utf8' }, maxSize: { type: 'number', description: 'Maximum file size in bytes. Files larger than this will be chunked.', default: 1048576 }, recursive: { type: 'boolean', description: 'Whether to read directories recursively (includes subdirectories)', default: true }, fileTypes: { type: ['array', 'string'], items: { type: 'string' }, description: 'File extension(s) to include WITHOUT dots (e.g. ["ts", "js", "py"] or just "ts"). Empty/undefined means all files.', default: [] } }, required: ['path'] } },
- src/index.ts:925-933 (helper)Helper function called by the handler to compute the total number of chunks required based on the combined length of all matching file contents (formatted with file headers).private getTotalChunks(filesInfo: FilesInfo): number { let totalContentLength = 0; for (const fileInfo of Object.values(filesInfo)) { totalContentLength += `File: ${fileInfo.path}\n${fileInfo.content}\n`.length; } return Math.ceil(totalContentLength / this.config.chunkSize); }
- src/index.ts:789-903 (helper)Core helper function that gathers and caches content from matching files or directories based on path, recursive flag, fileTypes filter, maxSize limit, and default ignore patterns. Used by both get_chunk_count and read_context handlers.private async readContent( filePath: string, encoding: BufferEncoding = 'utf8', maxSize?: number, recursive: boolean = true, fileTypes?: string[] | string ): Promise<FilesInfo> { const filesInfo: FilesInfo = {}; const absolutePath = path.resolve(filePath); const cleanFileTypes = Array.isArray(fileTypes) ? fileTypes.map(ext => ext.toLowerCase().replace(/^\./, '')) : fileTypes ? [fileTypes.toLowerCase().replace(/^\./, '')] : undefined; await this.loggingService.debug('Reading content with file type filtering', { cleanFileTypes, absolutePath, operation: 'read_content' }); // Handle single file if ((await fs.stat(absolutePath)).isFile()) { if (cleanFileTypes && !cleanFileTypes.some(ext => absolutePath.toLowerCase().endsWith(`.${ext}`))) { return filesInfo; } const stat = await fs.stat(absolutePath); if (maxSize && stat.size > maxSize) { throw new FileOperationError( FileErrorCode.FILE_TOO_LARGE, `File ${absolutePath} exceeds maximum size limit of ${maxSize} bytes`, absolutePath ); } // Check cache first const cached = this.fileContentCache.get(absolutePath); let content: string; if (cached && cached.lastModified === stat.mtimeMs) { content = cached.content; } else { content = await fs.readFile(absolutePath, encoding); this.fileContentCache.set(absolutePath, { content, lastModified: stat.mtimeMs }); } const hash = createHash('md5').update(content).digest('hex'); filesInfo[absolutePath] = { path: absolutePath, content, hash, size: stat.size, lastModified: stat.mtimeMs }; return filesInfo; } // Handle directory: use POSIX join for glob const pattern = recursive ? '**/*' : '*'; const globPattern = path.posix.join(absolutePath.split(path.sep).join(path.posix.sep), pattern); const files = await this.globPromise(globPattern, { ignore: DEFAULT_IGNORE_PATTERNS, nodir: true, dot: false, cache: true, follow: false }); await Promise.all(files.map(async (file) => { if (cleanFileTypes && !cleanFileTypes.some(ext => file.toLowerCase().endsWith(`.${ext}`))) { return; } try { const stat = await fs.stat(file); if (maxSize && stat.size > maxSize) { return; } // Check cache first const cached = this.fileContentCache.get(file); let content: string; if (cached && cached.lastModified === stat.mtimeMs) { content = cached.content; } else { content = await fs.readFile(file, encoding); this.fileContentCache.set(file, { content, lastModified: stat.mtimeMs }); } const hash = createHash('md5').update(content).digest('hex'); filesInfo[file] = { path: file, content, hash, size: stat.size, lastModified: stat.mtimeMs }; } catch (error) { await this.loggingService.error('Error reading file for info collection', error as Error, { filePath: file, operation: 'get_files_info' }); } })); return filesInfo; }
- src/index.ts:1604-1626 (registration)Tool dispatch logic in CallToolRequestSchema handler that routes 'get_chunk_count' calls to the handleGetChunkCount function.switch (request.params.name) { case 'list_context_files': return await this.handleListFiles(request.params.arguments); case 'read_context': return await this.handleReadFile(request.params.arguments); case 'search_context': return await this.handleSearchFiles(request.params.arguments); case 'get_chunk_count': return await this.handleGetChunkCount(request.params.arguments); case 'set_profile': return await this.handleSetProfile(request.params.arguments); case 'get_profile_context': return await this.handleGetProfileContext(request.params.arguments); case 'generate_outline': return await this.handleGenerateOutline(request.params.arguments); case 'getFiles': return await this.handleGetFiles(request.params.arguments); default: throw new McpError( ErrorCode.MethodNotFound, `Unknown tool: ${request.params.name}` ); }