Skip to main content
Glama

Smart Code Search MCP Server

complexity_analyzer.py12.6 kB
""" Cyclomatic and Cognitive Complexity Analyzer """ import ast import re from pathlib import Path from typing import List, Dict, Any, Optional, Union from .base_metrics import BaseMetricsAnalyzer, FileMetrics, FunctionMetrics, ClassMetrics class ComplexityAnalyzer(BaseMetricsAnalyzer): """Analyzes cyclomatic and cognitive complexity of code""" def __init__(self, language: str = "python"): """Initialize complexity analyzer""" super().__init__(language) self.current_nesting = 0 self.max_nesting = 0 def analyze_file(self, file_path: Path, content: str) -> FileMetrics: """ Analyze complexity metrics for a file Args: file_path: Path to the file content: File content Returns: FileMetrics with complexity information """ metrics = FileMetrics(file_path=str(file_path)) # Count lines line_counts = self.count_lines(content) metrics.lines_of_code = line_counts['total'] metrics.source_lines_of_code = line_counts['code'] metrics.comment_lines = line_counts['comment'] metrics.blank_lines = line_counts['blank'] if self.language == "python": self._analyze_python(content, metrics) elif self.language in ["javascript", "typescript"]: self._analyze_javascript(content, metrics) # Calculate aggregate metrics if metrics.functions: metrics.cyclomatic_complexity = sum( f.cyclomatic_complexity for f in metrics.functions ) metrics.cognitive_complexity = sum( f.cognitive_complexity for f in metrics.functions ) metrics.max_nesting_depth = max( f.nesting_depth for f in metrics.functions ) # Detect issues metrics.issues = self.detect_issues(metrics) return metrics def _analyze_python(self, content: str, metrics: FileMetrics): """Analyze Python code using AST""" try: tree = ast.parse(content) # Visit all nodes for node in ast.walk(tree): if isinstance(node, ast.FunctionDef) or isinstance(node, ast.AsyncFunctionDef): func_metrics = self._analyze_python_function(node, content) metrics.functions.append(func_metrics) elif isinstance(node, ast.ClassDef): class_metrics = self._analyze_python_class(node, content) metrics.classes.append(class_metrics) except SyntaxError: # If AST parsing fails, fall back to basic analysis metrics.cyclomatic_complexity = self.calculate_cyclomatic_complexity(content) metrics.cognitive_complexity = self.calculate_cognitive_complexity(content) def _analyze_python_function(self, node: Union[ast.FunctionDef, ast.AsyncFunctionDef], content: str) -> FunctionMetrics: """Analyze a Python function node""" func = FunctionMetrics( name=node.name, line_number=node.lineno, parameters=len(node.args.args) ) # Reset nesting tracking self.current_nesting = 0 self.max_nesting = 0 # Calculate cyclomatic complexity func.cyclomatic_complexity = self._calculate_python_cyclomatic(node) # Calculate cognitive complexity func.cognitive_complexity = self._calculate_python_cognitive(node) # Count lines if hasattr(node, 'end_lineno'): func.lines_of_code = node.end_lineno - node.lineno + 1 # Count return statements func.return_points = sum( 1 for n in ast.walk(node) if isinstance(n, ast.Return) ) # Record max nesting depth func.nesting_depth = self.max_nesting return func def _analyze_python_class(self, node: ast.ClassDef, content: str) -> ClassMetrics: """Analyze a Python class node""" cls = ClassMetrics( name=node.name, line_number=node.lineno ) # Count methods and fields for item in node.body: if isinstance(item, (ast.FunctionDef, ast.AsyncFunctionDef)): cls.methods_count += 1 # Add method complexity to weighted methods method_complexity = self._calculate_python_cyclomatic(item) cls.weighted_methods += method_complexity elif isinstance(item, ast.Assign): cls.fields_count += len(item.targets) # Count inheritance depth (simplified - just direct bases) cls.inheritance_depth = len(node.bases) return cls def _calculate_python_cyclomatic(self, node: ast.AST) -> int: """ Calculate cyclomatic complexity for Python AST node Cyclomatic complexity = 1 + number of decision points """ complexity = 1 for child in ast.walk(node): # Decision points if isinstance(child, (ast.If, ast.While, ast.For, ast.AsyncFor)): complexity += 1 elif isinstance(child, ast.ExceptHandler): complexity += 1 elif isinstance(child, ast.With): complexity += 1 elif isinstance(child, ast.Assert): complexity += 1 elif isinstance(child, ast.comprehension): complexity += 1 # Boolean operators add complexity elif isinstance(child, ast.BoolOp): # Each 'and'/'or' adds a branch complexity += len(child.values) - 1 return complexity def _calculate_python_cognitive(self, node: ast.AST, nesting: int = 0) -> int: """ Calculate cognitive complexity for Python AST node Cognitive complexity considers: - Nesting depth - Break in linear flow - Logical operators """ complexity = 0 # Track max nesting if nesting > self.max_nesting: self.max_nesting = nesting for child in node.body if hasattr(node, 'body') else []: if isinstance(child, ast.If): # If statements add 1 + nesting level complexity += 1 + nesting # Analyze if body complexity += self._calculate_python_cognitive(child, nesting + 1) # Analyze else body if child.orelse: if len(child.orelse) == 1 and isinstance(child.orelse[0], ast.If): # elif doesn't increase nesting complexity += self._calculate_python_cognitive(child.orelse[0], nesting) else: complexity += 1 # else adds 1 for else_child in child.orelse: complexity += self._calculate_python_cognitive(else_child, nesting + 1) elif isinstance(child, (ast.For, ast.AsyncFor, ast.While)): # Loops add 1 + nesting level complexity += 1 + nesting complexity += self._calculate_python_cognitive(child, nesting + 1) elif isinstance(child, ast.ExceptHandler): # Exception handling adds complexity complexity += 1 + nesting for handler_child in child.body: complexity += self._calculate_python_cognitive(handler_child, nesting + 1) elif isinstance(child, ast.With): # Context managers add slight complexity complexity += 1 complexity += self._calculate_python_cognitive(child, nesting) elif isinstance(child, ast.BoolOp): # Boolean operators add complexity complexity += len(child.values) - 1 elif hasattr(child, 'body'): # Recursively analyze nested structures complexity += self._calculate_python_cognitive(child, nesting) return complexity def _analyze_javascript(self, content: str, metrics: FileMetrics): """Analyze JavaScript/TypeScript code using regex patterns""" # Basic pattern-based analysis for JS/TS # In production, would use a proper parser like Esprima or Tree-sitter # Find functions function_pattern = r'(?:function\s+(\w+)|(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?(?:function|\([^)]*\)\s*=>))' for match in re.finditer(function_pattern, content): func_name = match.group(1) or match.group(2) or "anonymous" line_num = content[:match.start()].count('\n') + 1 func = FunctionMetrics( name=func_name, line_number=line_num ) # Simple complexity estimation for JS func.cyclomatic_complexity = self._estimate_js_complexity(content, match.start()) func.cognitive_complexity = func.cyclomatic_complexity # Simplified metrics.functions.append(func) # Find classes class_pattern = r'class\s+(\w+)' for match in re.finditer(class_pattern, content): class_name = match.group(1) line_num = content[:match.start()].count('\n') + 1 cls = ClassMetrics( name=class_name, line_number=line_num ) metrics.classes.append(cls) def _estimate_js_complexity(self, content: str, start_pos: int) -> int: """Estimate complexity for JavaScript function""" # Find the function body (simplified) brace_count = 0 in_function = False func_content = "" for i in range(start_pos, len(content)): char = content[i] if char == '{': brace_count += 1 in_function = True elif char == '}': brace_count -= 1 if brace_count == 0 and in_function: func_content = content[start_pos:i+1] break if in_function: func_content += char if not func_content: return 1 # Count decision points complexity = 1 # Control flow keywords keywords = ['if', 'else if', 'while', 'for', 'switch', 'catch', 'case'] for keyword in keywords: complexity += len(re.findall(r'\b' + keyword + r'\b', func_content)) # Logical operators complexity += len(re.findall(r'&&|\|\|', func_content)) # Ternary operators complexity += len(re.findall(r'\?[^:]*:', func_content)) return complexity def calculate_cyclomatic_complexity(self, code: str) -> int: """ Calculate cyclomatic complexity for code string Args: code: Source code Returns: Cyclomatic complexity score """ if self.language == "python": try: tree = ast.parse(code) return self._calculate_python_cyclomatic(tree) except SyntaxError: return 1 else: # Simplified for other languages return self._estimate_js_complexity(code, 0) def calculate_cognitive_complexity(self, code: str) -> int: """ Calculate cognitive complexity for code string Args: code: Source code Returns: Cognitive complexity score """ if self.language == "python": try: tree = ast.parse(code) return self._calculate_python_cognitive(tree) except SyntaxError: return 1 else: # Simplified for other languages return self._estimate_js_complexity(code, 0)

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/stevenjjobson/scs-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server