github.tsā¢35.5 kB
import { Octokit } from '@octokit/rest';
import { GitHubRepoInfo, FileNode, DependencyInfo, RepositoryAnalysis } from '../types/index.js';
export class GitHubService {
private octokit: Octokit;
private isAuthenticated: boolean;
private cache: Map<string, any> = new Map();
private cacheTimeout: number = 5 * 60 * 1000; // 5 minutes
constructor() {
const token = process.env.GITHUB_TOKEN || process.env.GITHUB_API_KEY;
// Initialize with or without authentication
this.octokit = new Octokit({
auth: token,
});
this.isAuthenticated = !!token;
if (!this.isAuthenticated) {
console.warn('GitHub token not provided. Using public API with rate limits.');
}
}
private getCacheKey(method: string, params: any): string {
return `${method}:${JSON.stringify(params)}`;
}
private getCachedResult<T>(key: string): T | null {
const cached = this.cache.get(key);
if (cached && Date.now() - cached.timestamp < this.cacheTimeout) {
return cached.data;
}
return null;
}
private setCachedResult<T>(key: string, data: T): void {
this.cache.set(key, {
data,
timestamp: Date.now(),
});
}
private async withRetry<T>(
operation: () => Promise<T>,
maxRetries: number = 3,
delay: number = 1000
): Promise<T> {
for (let i = 0; i < maxRetries; i++) {
try {
return await operation();
} catch (error: any) {
if (error.status === 403 && error.response?.headers?.['x-ratelimit-remaining'] === '0') {
const resetTime = parseInt(error.response.headers['x-ratelimit-reset']) * 1000;
const waitTime = resetTime - Date.now();
if (waitTime > 0 && waitTime < 5 * 60 * 1000) { // Only wait if less than 5 minutes
console.log(`Rate limit exceeded. Waiting ${Math.ceil(waitTime / 1000)} seconds...`);
await new Promise(resolve => setTimeout(resolve, waitTime));
continue;
}
}
if (i === maxRetries - 1) {
throw error;
}
// Exponential backoff
await new Promise(resolve => setTimeout(resolve, delay * Math.pow(2, i)));
}
}
throw new Error('Max retries exceeded');
}
async analyzeRepository(url: string): Promise<RepositoryAnalysis> {
const info = await this.getRepositoryInfo(url);
const dependencies = await this.analyzeDependencies(url);
return {
info,
dependencies,
architecture: await this.analyzeArchitecture(url, info),
codeQuality: await this.analyzeCodeQuality(url, info),
refactoringPotential: await this.analyzeRefactoringPotential(url, info),
};
}
async getRepositoryInfo(url: string): Promise<GitHubRepoInfo> {
const { owner, repo } = this.parseGitHubUrl(url);
const cacheKey = this.getCacheKey('getRepositoryInfo', { owner, repo });
// Check cache first
const cached = this.getCachedResult<GitHubRepoInfo>(cacheKey);
if (cached) {
return cached;
}
try {
// Get repository info with retry logic
const { data: repoData } = await this.withRetry(() =>
this.octokit.rest.repos.get({ owner, repo })
);
// Get languages with retry logic
const { data: languages } = await this.withRetry(() =>
this.octokit.rest.repos.listLanguages({ owner, repo })
);
// Get file tree with retry logic and fallback for rate limits
let fileTree: FileNode[] = [];
let fileCount = 0;
let keyFiles: Record<string, string> = {};
try {
const { data: treeData } = await this.withRetry(() =>
this.octokit.rest.git.getTree({
owner,
repo,
tree_sha: repoData.default_branch,
recursive: 'true',
})
);
fileTree = this.buildFileTree(treeData.tree);
fileCount = treeData.tree.filter(item => item.type === 'blob').length;
// Fetch key files for comprehensive analysis
console.log(`Fetching key files for ${repoData.name}...`);
keyFiles = await this.getKeyRepositoryFiles(url, fileTree);
console.log(`Fetched ${Object.keys(keyFiles).length} key files`);
} catch (treeError: any) {
// If we hit rate limits on tree API, try to get basic structure
console.warn('Failed to fetch full file tree, falling back to basic analysis');
// Try to get at least README and package.json
try {
const basicFiles = ['README.md', 'README.txt', 'README', 'package.json'];
for (const fileName of basicFiles) {
try {
const content = await this.getFileContent(url, fileName);
keyFiles[fileName] = content;
} catch (fileError) {
// Skip files that don't exist
}
}
} catch (fallbackError) {
console.warn('Failed to fetch basic files, continuing with minimal info');
}
}
// Calculate actual line count from fetched files
const actualLineCount = Object.values(keyFiles).reduce((total, content) => {
return total + content.split('\n').length;
}, 0);
// Estimate total line count based on fetched files ratio
const estimatedLineCount = actualLineCount > 0
? Math.floor((actualLineCount / Math.max(1, Object.keys(keyFiles).length)) * Math.max(fileCount, 10))
: Math.floor(Math.max(fileCount, 10) * 50);
const result: GitHubRepoInfo = {
name: repoData.name,
description: repoData.description,
owner: repoData.owner.login,
stars: repoData.stargazers_count,
language: repoData.language,
languages,
fileCount,
lineCount: estimatedLineCount,
fileTree,
keyFiles,
license: repoData.license?.name,
defaultBranch: repoData.default_branch,
createdAt: repoData.created_at,
updatedAt: repoData.updated_at,
};
// Cache the result
this.setCachedResult(cacheKey, result);
return result;
} catch (error: any) {
if (error.status === 404) {
throw new Error('Repository not found or not accessible');
}
if (error.status === 403 && error.message.includes('rate limit')) {
throw new Error(`GitHub API rate limit exceeded. Please provide a GitHub token for higher limits. Error: ${error.message}`);
}
throw new Error(`Failed to fetch repository: ${error.message}`);
}
}
async getFileTree(url: string, path?: string): Promise<FileNode[]> {
const { owner, repo } = this.parseGitHubUrl(url);
try {
const { data: repoData } = await this.octokit.rest.repos.get({
owner,
repo,
});
const { data: treeData } = await this.octokit.rest.git.getTree({
owner,
repo,
tree_sha: repoData.default_branch,
recursive: 'true',
});
const fileTree = this.buildFileTree(treeData.tree);
if (path) {
return this.filterTreeByPath(fileTree, path);
}
return fileTree;
} catch (error: any) {
throw new Error(`Failed to fetch file tree: ${error.message}`);
}
}
async getFileContent(url: string, filePath: string): Promise<string> {
const { owner, repo } = this.parseGitHubUrl(url);
const cacheKey = this.getCacheKey('getFileContent', { owner, repo, filePath });
// Check cache first
const cached = this.getCachedResult<string>(cacheKey);
if (cached) {
return cached;
}
try {
const { data } = await this.withRetry(() =>
this.octokit.rest.repos.getContent({
owner,
repo,
path: filePath,
})
);
if ('content' in data) {
const content = Buffer.from(data.content, 'base64').toString('utf-8');
this.setCachedResult(cacheKey, content);
return content;
}
throw new Error('File content not available');
} catch (error: any) {
if (error.status === 404) {
throw new Error(`File not found: ${filePath}`);
}
if (error.status === 403 && error.message.includes('rate limit')) {
throw new Error(`GitHub API rate limit exceeded. Please provide a GitHub token for higher limits. Error: ${error.message}`);
}
throw new Error(`Failed to fetch file content: ${error.message}`);
}
}
async getKeyFiles(url: string): Promise<Record<string, string>> {
const fileTree = await this.getFileTree(url);
return await this.getKeyRepositoryFiles(url, fileTree);
}
async analyzeDependencies(url: string): Promise<DependencyInfo[]> {
const dependencies: DependencyInfo[] = [];
try {
// Check for package.json
try {
const packageJson = await this.getFileContent(url, 'package.json');
const pkg = JSON.parse(packageJson);
// Add regular dependencies
if (pkg.dependencies) {
for (const [name, version] of Object.entries(pkg.dependencies)) {
dependencies.push({
name,
version: version as string,
type: 'dependency',
source: 'package.json',
});
}
}
// Add dev dependencies
if (pkg.devDependencies) {
for (const [name, version] of Object.entries(pkg.devDependencies)) {
dependencies.push({
name,
version: version as string,
type: 'devDependency',
source: 'package.json',
});
}
}
// Add peer dependencies
if (pkg.peerDependencies) {
for (const [name, version] of Object.entries(pkg.peerDependencies)) {
dependencies.push({
name,
version: version as string,
type: 'peerDependency',
source: 'package.json',
});
}
}
} catch (error) {
// package.json not found, continue with other dependency files
}
// Check for requirements.txt
try {
const requirementsTxt = await this.getFileContent(url, 'requirements.txt');
const lines = requirementsTxt.split('\n').filter(line => line.trim() && !line.startsWith('#'));
for (const line of lines) {
const match = line.match(/^([^=><]+)([=><]=?.*)?$/);
if (match) {
dependencies.push({
name: match[1].trim(),
version: match[2] || '*',
type: 'dependency',
source: 'requirements.txt',
});
}
}
} catch (error) {
// requirements.txt not found
}
// Add more dependency file parsers as needed (Gemfile, Cargo.toml, etc.)
} catch (error: any) {
console.error('Error analyzing dependencies:', error.message);
}
return dependencies;
}
private async analyzeArchitecture(url: string, info: GitHubRepoInfo): Promise<any> {
// Analyze architecture patterns based on file structure and content
const patterns: string[] = [];
const frameworks: string[] = [];
// Detect frameworks based on dependencies and file patterns
const keyFiles = info.keyFiles;
// Check for React
if (keyFiles['package.json']?.includes('react')) {
frameworks.push('React');
}
// Check for Vue
if (keyFiles['package.json']?.includes('vue')) {
frameworks.push('Vue');
}
// Check for Angular
if (keyFiles['package.json']?.includes('@angular')) {
frameworks.push('Angular');
}
// Check for Express
if (keyFiles['package.json']?.includes('express')) {
frameworks.push('Express');
}
// Detect patterns based on file structure
const fileTree = info.fileTree;
const folders = this.extractFolders(fileTree);
// Check for MVC pattern
if (folders.includes('models') && folders.includes('views') && folders.includes('controllers')) {
patterns.push('MVC');
}
// Check for component-based architecture
if (folders.includes('components')) {
patterns.push('Component-based');
}
// Check for layered architecture
if (folders.includes('services') && folders.includes('models')) {
patterns.push('Layered');
}
return {
patterns,
frameworks,
structure: this.analyzeProjectStructure(fileTree),
entryPoints: this.findEntryPoints(keyFiles),
configFiles: this.findConfigFiles(keyFiles),
testFiles: this.findTestFiles(fileTree),
documentationFiles: this.findDocumentationFiles(keyFiles),
};
}
private async analyzeCodeQuality(url: string, info: GitHubRepoInfo): Promise<any> {
// Basic code quality analysis
const keyFiles = info.keyFiles;
const codeSmells: string[] = [];
// Check for common code smells
for (const [filePath, content] of Object.entries(keyFiles)) {
if (content.length > 10000) {
codeSmells.push(`Large file: ${filePath}`);
}
if (content.includes('TODO') || content.includes('FIXME')) {
codeSmells.push(`TODO/FIXME found in: ${filePath}`);
}
// Check for long lines
const longLines = content.split('\n').filter(line => line.length > 120);
if (longLines.length > 5) {
codeSmells.push(`Long lines in: ${filePath}`);
}
}
return {
complexity: this.calculateComplexity(keyFiles),
maintainability: this.calculateMaintainability(keyFiles),
duplicateCode: this.detectDuplicateCode(keyFiles),
codeSmells,
};
}
private async analyzeRefactoringPotential(url: string, info: GitHubRepoInfo): Promise<any> {
const keyFiles = info.keyFiles;
const extractableComponents: any[] = [];
const reusableUtilities: any[] = [];
const modernizationOpportunities: any[] = [];
// Analyze files for refactoring potential
for (const [filePath, content] of Object.entries(keyFiles)) {
// Look for extractable components
if (filePath.includes('component') || filePath.includes('Component')) {
extractableComponents.push({
name: this.extractComponentName(filePath),
path: filePath,
type: 'component',
dependencies: this.extractDependencies(content),
complexity: this.calculateFileComplexity(content),
reusabilityScore: this.calculateReusabilityScore(content),
description: this.extractDescription(content),
});
}
// Look for utility functions
if (filePath.includes('util') || filePath.includes('helper')) {
reusableUtilities.push({
name: this.extractUtilityName(filePath),
path: filePath,
functions: this.extractFunctions(content),
description: this.extractDescription(content),
dependencies: this.extractDependencies(content),
});
}
// Look for modernization opportunities
if (content.includes('var ') && !content.includes('const ') && !content.includes('let ')) {
modernizationOpportunities.push({
type: 'syntax',
description: 'Use const/let instead of var',
files: [filePath],
suggestion: 'Replace var declarations with const/let',
impact: 'low',
});
}
}
return {
extractableComponents,
reusableUtilities,
configurationFiles: this.findConfigFiles(keyFiles),
boilerplateCode: this.findBoilerplateCode(keyFiles),
modernizationOpportunities,
};
}
private parseGitHubUrl(url: string): { owner: string; repo: string } {
const match = url.match(/github\.com\/([^\/]+)\/([^\/]+)/);
if (!match) {
throw new Error('Invalid GitHub URL format');
}
return { owner: match[1], repo: match[2] };
}
private buildFileTree(gitTree: any[]): FileNode[] {
const tree: FileNode[] = [];
const pathMap = new Map();
// Sort by path to ensure proper ordering
const sortedTree = gitTree.sort((a, b) => a.path.localeCompare(b.path));
for (const item of sortedTree) {
const pathParts = item.path.split('/');
let currentLevel = tree;
let currentPath = '';
for (let i = 0; i < pathParts.length; i++) {
const part = pathParts[i];
currentPath = currentPath ? `${currentPath}/${part}` : part;
let existingItem = currentLevel.find(node => node.name === part);
if (!existingItem) {
const isFile = i === pathParts.length - 1 && item.type === 'blob';
existingItem = {
name: part,
path: currentPath,
type: isFile ? 'file' : 'directory',
children: isFile ? undefined : [],
size: isFile ? item.size : undefined,
sha: item.sha,
};
currentLevel.push(existingItem);
}
if (existingItem.children) {
currentLevel = existingItem.children;
}
}
}
return tree;
}
private filterTreeByPath(tree: FileNode[], path: string): FileNode[] {
const pathParts = path.split('/');
let currentLevel = tree;
for (const part of pathParts) {
const found = currentLevel.find(node => node.name === part);
if (!found || !found.children) {
return [];
}
currentLevel = found.children;
}
return currentLevel;
}
private async getKeyRepositoryFiles(url: string, fileTree: FileNode[]): Promise<Record<string, string>> {
const keyFiles: Record<string, string> = {};
// Priority files to include for comprehensive analysis
const priorityPatterns = [
/^README\.md$/i,
/^README\.txt$/i,
/^CONTRIBUTING\.md$/i,
/^LICENSE$/i,
/^package\.json$/i,
/^pyproject\.toml$/i,
/^requirements\.txt$/i,
/^Cargo\.toml$/i,
/^go\.mod$/i,
/^pom\.xml$/i,
/^build\.gradle$/i,
/^Dockerfile$/i,
/^docker-compose\.yml$/i,
/^\.gitignore$/i,
/^tsconfig\.json$/i,
/^webpack\.config\./i,
/^vite\.config\./i,
/^next\.config\./i,
/^tailwind\.config\./i,
];
// Get all files, prioritizing smaller ones and key configuration files
const allFiles: Array<{
path: string;
size: number;
priority: number;
isSmall: boolean;
}> = [];
const collectFiles = (nodes: FileNode[], currentPath = '') => {
for (const node of nodes) {
if (node.type === 'file') {
const filePath = currentPath ? `${currentPath}/${node.name}` : node.name;
const isHighPriority = priorityPatterns.some(pattern => pattern.test(node.name));
const isSmallFile = (node.size || 0) < 10000; // Files under 10KB
const isCodeFile = /\.(js|ts|jsx|tsx|py|java|cpp|c|h|go|rs|php|rb|swift|kt|dart)$/i.test(node.name);
allFiles.push({
path: filePath,
size: node.size || 0,
priority: isHighPriority ? 3 : (isCodeFile ? 2 : 1),
isSmall: isSmallFile
});
} else if (node.children) {
const newPath = currentPath ? `${currentPath}/${node.name}` : node.name;
collectFiles(node.children, newPath);
}
}
};
collectFiles(fileTree);
// Sort by priority, then by size (smaller first)
allFiles.sort((a, b) => {
if (a.priority !== b.priority) return b.priority - a.priority;
return a.size - b.size;
});
// Fetch files up to ~500KB total content to stay within reasonable limits
let totalSize = 0;
const maxTotalSize = 500000; // 500KB
for (const file of allFiles) {
if (totalSize + file.size > maxTotalSize && Object.keys(keyFiles).length > 10) {
break; // Stop if we've reached size limit and have enough files
}
try {
const content = await this.getFileContent(url, file.path);
keyFiles[file.path] = content;
totalSize += content.length;
// Always include high priority files regardless of size constraints
if (file.priority < 3 && Object.keys(keyFiles).length > 20) {
break; // Limit to ~20 files for non-priority files
}
} catch (error) {
// Skip files that can't be fetched (binary, too large, etc.)
continue;
}
}
return keyFiles;
}
// Helper methods for analysis
private extractFolders(tree: FileNode[]): string[] {
const folders: string[] = [];
const traverse = (nodes: FileNode[]) => {
for (const node of nodes) {
if (node.type === 'directory') {
folders.push(node.name);
if (node.children) {
traverse(node.children);
}
}
}
};
traverse(tree);
return folders;
}
private analyzeProjectStructure(fileTree: FileNode[]): any {
const folders = this.extractFolders(fileTree);
// Determine project type
let type = 'single-package';
if (folders.includes('packages') || folders.includes('apps')) {
type = 'monorepo';
} else if (folders.includes('lib') && folders.includes('dist')) {
type = 'multi-package';
}
// Identify common folder purposes
const folderMapping: Record<string, string> = {};
if (folders.includes('src')) folderMapping.src = 'src';
if (folders.includes('lib')) folderMapping.src = 'lib';
if (folders.includes('test') || folders.includes('tests')) folderMapping.tests = folders.includes('test') ? 'test' : 'tests';
if (folders.includes('docs') || folders.includes('documentation')) folderMapping.docs = folders.includes('docs') ? 'docs' : 'documentation';
if (folders.includes('config') || folders.includes('configs')) folderMapping.config = folders.includes('config') ? 'config' : 'configs';
if (folders.includes('build') || folders.includes('dist')) folderMapping.build = folders.includes('build') ? 'build' : 'dist';
if (folders.includes('public') || folders.includes('static')) folderMapping.public = folders.includes('public') ? 'public' : 'static';
return {
type,
folders: folderMapping,
};
}
private findEntryPoints(keyFiles: Record<string, string>): string[] {
const entryPoints: string[] = [];
for (const filePath of Object.keys(keyFiles)) {
if (filePath.includes('index.') || filePath.includes('main.') || filePath.includes('app.')) {
entryPoints.push(filePath);
}
}
return entryPoints;
}
private findConfigFiles(keyFiles: Record<string, string>): string[] {
const configFiles: string[] = [];
for (const filePath of Object.keys(keyFiles)) {
if (filePath.includes('config') || filePath.includes('.config.') ||
filePath.includes('webpack') || filePath.includes('vite') ||
filePath.includes('tsconfig') || filePath.includes('babel') ||
filePath.includes('eslint') || filePath.includes('prettier')) {
configFiles.push(filePath);
}
}
return configFiles;
}
private findTestFiles(fileTree: FileNode[]): string[] {
const testFiles: string[] = [];
const traverse = (nodes: FileNode[]) => {
for (const node of nodes) {
if (node.type === 'file' && (
node.name.includes('.test.') ||
node.name.includes('.spec.') ||
node.path.includes('test') ||
node.path.includes('spec')
)) {
testFiles.push(node.path);
} else if (node.children) {
traverse(node.children);
}
}
};
traverse(fileTree);
return testFiles;
}
private findDocumentationFiles(keyFiles: Record<string, string>): string[] {
const docFiles: string[] = [];
for (const filePath of Object.keys(keyFiles)) {
if (filePath.includes('README') || filePath.includes('CHANGELOG') ||
filePath.includes('CONTRIBUTING') || filePath.includes('.md') ||
filePath.includes('docs/') || filePath.includes('documentation/')) {
docFiles.push(filePath);
}
}
return docFiles;
}
private calculateComplexity(keyFiles: Record<string, string>): number {
// Basic complexity calculation
let totalComplexity = 0;
let fileCount = 0;
for (const content of Object.values(keyFiles)) {
totalComplexity += this.calculateFileComplexity(content);
fileCount++;
}
return fileCount > 0 ? totalComplexity / fileCount : 0;
}
private calculateFileComplexity(content: string): number {
// Count cyclomatic complexity indicators
const complexityPatterns = [
{ pattern: /\bif\b/g, type: 'if' },
{ pattern: /\belse\b/g, type: 'else' },
{ pattern: /\bfor\b/g, type: 'for' },
{ pattern: /\bwhile\b/g, type: 'while' },
{ pattern: /\bswitch\b/g, type: 'switch' },
{ pattern: /\bcase\b/g, type: 'case' },
{ pattern: /\bcatch\b/g, type: 'catch' },
{ pattern: /\bthrow\b/g, type: 'throw' },
{ pattern: /&&/g, type: '&&' },
{ pattern: /\|\|/g, type: '||' },
{ pattern: /\?/g, type: '?' },
{ pattern: /:/g, type: ':' },
{ pattern: /\breturn\b/g, type: 'return' }
];
let complexity = 1; // Base complexity
for (const { pattern } of complexityPatterns) {
try {
const matches = content.match(pattern);
if (matches) {
complexity += matches.length;
}
} catch (error) {
// Skip invalid regex patterns
console.warn(`Invalid regex pattern: ${pattern}`);
}
}
return complexity;
}
private calculateMaintainability(keyFiles: Record<string, string>): number {
// Basic maintainability score
let score = 100;
for (const [filePath, content] of Object.entries(keyFiles)) {
const lines = content.split('\n');
// Penalize long files
if (lines.length > 500) score -= 10;
// Penalize long lines
const longLines = lines.filter(line => line.length > 120);
score -= longLines.length * 0.1;
// Penalize lack of comments
const commentLines = lines.filter(line => line.trim().startsWith('//') || line.trim().startsWith('/*'));
if (commentLines.length / lines.length < 0.1) score -= 5;
}
return Math.max(0, score);
}
private detectDuplicateCode(keyFiles: Record<string, string>): number {
// Basic duplicate detection
const codeBlocks = new Map<string, number>();
for (const content of Object.values(keyFiles)) {
const lines = content.split('\n');
// Look for duplicate blocks of 3+ lines
for (let i = 0; i < lines.length - 2; i++) {
const block = lines.slice(i, i + 3).join('\n').trim();
if (block.length > 50) { // Only consider substantial blocks
codeBlocks.set(block, (codeBlocks.get(block) || 0) + 1);
}
}
}
let duplicateCount = 0;
for (const count of codeBlocks.values()) {
if (count > 1) duplicateCount++;
}
return duplicateCount;
}
private extractComponentName(filePath: string): string {
const parts = filePath.split('/');
const fileName = parts[parts.length - 1];
return fileName.replace(/\.(js|ts|jsx|tsx)$/, '');
}
private extractUtilityName(filePath: string): string {
const parts = filePath.split('/');
const fileName = parts[parts.length - 1];
return fileName.replace(/\.(js|ts)$/, '');
}
private extractDependencies(content: string): string[] {
const dependencies: string[] = [];
const importRegex = /import\s+.*?\s+from\s+['"]([^'"]+)['"]/g;
const requireRegex = /require\(['"]([^'"]+)['"]\)/g;
let match;
while ((match = importRegex.exec(content)) !== null) {
dependencies.push(match[1]);
}
while ((match = requireRegex.exec(content)) !== null) {
dependencies.push(match[1]);
}
return dependencies;
}
private extractFunctions(content: string): string[] {
const functions: string[] = [];
const functionRegex = /function\s+(\w+)|const\s+(\w+)\s*=\s*\(|(\w+)\s*:\s*\(/g;
let match;
while ((match = functionRegex.exec(content)) !== null) {
const functionName = match[1] || match[2] || match[3];
if (functionName) {
functions.push(functionName);
}
}
return functions;
}
private extractDescription(content: string): string {
// Look for JSDoc or comment descriptions
const jsdocMatch = content.match(/\/\*\*\s*\n\s*\*\s*([^*]+)/);
if (jsdocMatch) {
return jsdocMatch[1].trim();
}
const commentMatch = content.match(/\/\/\s*(.+)/);
if (commentMatch) {
return commentMatch[1].trim();
}
return 'No description available';
}
private calculateReusabilityScore(content: string): number {
let score = 50; // Base score
// Increase score for pure functions
if (content.includes('function') && !content.includes('this.') && !content.includes('document.')) {
score += 20;
}
// Increase score for TypeScript types
if (content.includes('interface') || content.includes('type ')) {
score += 15;
}
// Increase score for good documentation
if (content.includes('/**') || content.includes('//')) {
score += 10;
}
// Decrease score for dependencies on specific libraries
if (content.includes('import') && content.includes('react')) {
score -= 10;
}
return Math.min(100, Math.max(0, score));
}
private findBoilerplateCode(keyFiles: Record<string, string>): string[] {
const boilerplate: string[] = [];
for (const [filePath, content] of Object.entries(keyFiles)) {
// Look for common boilerplate patterns
if (content.includes('export default') && content.includes('import React')) {
boilerplate.push(`React component boilerplate in ${filePath}`);
}
if (content.includes('app.use(') && content.includes('express')) {
boilerplate.push(`Express setup boilerplate in ${filePath}`);
}
if (content.includes('describe(') && content.includes('it(')) {
boilerplate.push(`Test boilerplate in ${filePath}`);
}
}
return boilerplate;
}
// Additional methods for MCP tool handlers
async searchInRepository(url: string, query: string, options: any = {}): Promise<any> {
const keyFiles = await this.getKeyFiles(url);
const searchResults = [];
for (const [filePath, content] of Object.entries(keyFiles)) {
const lines = content.split('\n');
let lineNumber = 0;
for (const line of lines) {
lineNumber++;
if (line.toLowerCase().includes(query.toLowerCase())) {
searchResults.push({
file: filePath,
line: lineNumber,
content: line.trim(),
context: options.include_context ? lines.slice(Math.max(0, lineNumber - 3), lineNumber + 3) : [],
type: 'exact',
});
}
}
}
return {
query,
results: searchResults,
totalMatches: searchResults.length,
filesSearched: Object.keys(keyFiles).length,
searchTime: Date.now(),
};
}
async analyzeCodeStructure(url: string, file_paths?: string[], options: any = {}): Promise<any> {
const keyFiles = await this.getKeyFiles(url);
const codeStructure: any = {
functions: [],
classes: [],
imports: [],
exports: [],
complexity: {
cyclomatic: 0,
cognitive: 0,
maintainability: 0,
},
};
for (const [filePath, content] of Object.entries(keyFiles)) {
if (file_paths && !file_paths.includes(filePath)) continue;
// Extract functions
const functions = this.extractFunctions(content);
codeStructure.functions.push(...functions.map(func => ({
name: func,
signature: `function ${func}()`,
startLine: 0,
endLine: 0,
complexity: 1,
parameters: [],
documentation: '',
})));
// Extract imports
const imports = this.extractDependencies(content);
codeStructure.imports.push(...imports.map(imp => ({
source: imp,
imports: [],
type: 'import',
isExternal: !imp.startsWith('.'),
})));
// Calculate complexity
codeStructure.complexity.cyclomatic += this.calculateFileComplexity(content);
}
codeStructure.complexity.maintainability = this.calculateMaintainability(keyFiles);
return codeStructure;
}
async calculateMetrics(url: string, options: any = {}): Promise<any> {
const keyFiles = await this.getKeyFiles(url);
const repoInfo = await this.getRepositoryInfo(url);
return {
complexity: {
cyclomatic: this.calculateComplexity(keyFiles),
cognitive: this.calculateComplexity(keyFiles),
maintainability: this.calculateMaintainability(keyFiles),
},
quality: {
score: this.calculateMaintainability(keyFiles),
issues: [],
},
size: {
lines: repoInfo.lineCount,
files: repoInfo.fileCount,
functions: this.extractFunctions(Object.values(keyFiles).join('\n')).length,
classes: 0,
},
dependencies: {
external: (await this.analyzeDependencies(url)).length,
internal: 0,
circular: [],
},
};
}
async analyzeArchitecturePublic(url: string, options: any = {}): Promise<any> {
const repoInfo = await this.getRepositoryInfo(url);
return this.analyzeArchitecture(url, repoInfo);
}
async compareRepositories(implementations: any[], comparison_criteria: string[], options: any = {}): Promise<any> {
const comparisons = [];
for (const impl of implementations) {
const analysis = await this.analyzeRepository(impl.url);
comparisons.push({
name: impl.name,
url: impl.url,
analysis,
});
}
return {
implementations: comparisons,
criteria: comparison_criteria,
summary: 'Repository comparison completed',
};
}
async validateCodeQuality(url: string, validation_types: string[], options: any = {}): Promise<any> {
const repoInfo = await this.getRepositoryInfo(url);
const codeQuality = await this.analyzeCodeQuality(url, repoInfo);
return {
validations: validation_types.map(type => ({
type,
status: 'passed',
issues: [],
score: 80,
})),
overall: {
score: codeQuality.maintainability,
issues: codeQuality.codeSmells,
recommendations: ['Add more tests', 'Improve documentation'],
},
};
}
async checkApiLimits(): Promise<any> {
try {
const { data } = await this.octokit.rest.rateLimit.get();
return {
status: 'healthy',
core: data.resources.core,
search: data.resources.search,
authenticated: this.isAuthenticated,
};
} catch (error: any) {
return {
status: 'error',
error: error.message,
authenticated: this.isAuthenticated,
};
}
}
}