Skip to main content
Glama

MCP Smart Filesystem Server

by lofcz
pagination.ts3.06 kB
/** * File pagination utilities for reading large files in chunks */ // Get configuration from environment variables export const LINES_PER_CHUNK = parseInt(process.env.MCP_LINES_PER_PAGE || '500', 10); export const DEFAULT_MAX_RESULTS = parseInt(process.env.MCP_MAX_SEARCH_RESULTS || '100', 10); export interface PaginatedFileResult { path: string; content: string; startLine: number; endLine: number; totalLines: number; hasMore: boolean; nextStartLine?: number; hint?: string; } /** * Read a chunk of lines from file content * @param content Full file content * @param startLine Starting line number (0-indexed) * @param linesPerChunk Number of lines to read * @returns Paginated result */ export function paginateFileContent( filePath: string, content: string, startLine: number = 0, linesPerChunk: number = LINES_PER_CHUNK ): PaginatedFileResult { const lines = content.split('\n'); const totalLines = lines.length; // Validate startLine if (startLine < 0) { startLine = 0; } if (startLine >= totalLines) { return { path: filePath, content: '', startLine, endLine: startLine, totalLines, hasMore: false, hint: `Start line ${startLine} is beyond file end (total lines: ${totalLines})` }; } const endLine = Math.min(startLine + linesPerChunk - 1, totalLines - 1); const hasMore = endLine < totalLines - 1; const chunkLines = lines.slice(startLine, endLine + 1); const chunkContent = chunkLines.join('\n'); const result: PaginatedFileResult = { path: filePath, content: chunkContent, startLine, endLine, totalLines, hasMore }; if (hasMore) { result.nextStartLine = endLine + 1; result.hint = `To read next chunk, call read_file with start_line=${result.nextStartLine}`; } return result; } /** * Determine if file should be paginated based on size * @param totalLines Total number of lines in file * @param threshold Threshold for pagination * @returns true if file should be paginated */ export function shouldPaginate(totalLines: number, threshold: number = LINES_PER_CHUNK): boolean { return totalLines > threshold; } /** * Generate reading suggestions for large files * @param totalLines Total number of lines * @param linesPerChunk Lines per chunk * @returns Array of suggestion strings */ export function generateReadingSuggestions(totalLines: number, linesPerChunk: number = LINES_PER_CHUNK): string[] { if (totalLines <= linesPerChunk) { return ['File is small enough to read in one request']; } const numChunks = Math.ceil(totalLines / linesPerChunk); const suggestions: string[] = [ `File is large (${totalLines} lines). Suggested approach:`, `1. Use search_code or search_in_file to find specific sections first`, `2. Read in chunks: read_file with start_line=0, then ${linesPerChunk}, then ${linesPerChunk * 2}`, `3. Each chunk reads ~${linesPerChunk} lines`, `4. Total chunks needed: ${numChunks}` ]; return suggestions; }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/lofcz/mcp-filesystem-smart'

If you have feedback or need assistance with the MCP directory API, please join our Discord server