analyze_complexity
Measure and evaluate code complexity by analyzing specific files within a project. Outputs detailed complexity metrics to enhance codebase understanding and maintainability.
Instructions
Analyze code complexity.
Args:
project: Project name
file_path: Path to the file
Returns:
Complexity metrics
Input Schema
TableJSON Schema
| Name | Required | Description | Default |
|---|---|---|---|
| file_path | Yes | ||
| project | Yes |
Implementation Reference
- MCP tool handler for 'analyze_complexity'. This decorated function is called when the tool is invoked, delegating to the core analysis function.@mcp_server.tool() def analyze_complexity(project: str, file_path: str) -> Dict[str, Any]: """Analyze code complexity. Args: project: Project name file_path: Path to the file Returns: Complexity metrics """ from ..tools.analysis import analyze_code_complexity return analyze_code_complexity( project_registry.get_project(project), file_path, language_registry, )
- src/mcp_server_tree_sitter/tools/registration.py:514-531 (registration)Registration of the 'analyze_complexity' tool via @mcp_server.tool() decorator in the central register_tools function.@mcp_server.tool() def analyze_complexity(project: str, file_path: str) -> Dict[str, Any]: """Analyze code complexity. Args: project: Project name file_path: Path to the file Returns: Complexity metrics """ from ..tools.analysis import analyze_code_complexity return analyze_code_complexity( project_registry.get_project(project), file_path, language_registry, )
- Core implementation of code complexity analysis, calculating metrics like line counts, comments, functions, classes, and cyclomatic complexity using Tree-sitter AST.def analyze_code_complexity( project: Any, file_path: str, language_registry: Any, ) -> Dict[str, Any]: """ Analyze code complexity. Args: project: Project object file_path: Path to the file relative to project root language_registry: Language registry object Returns: Complexity metrics """ abs_path = project.get_file_path(file_path) try: validate_file_access(abs_path, project.root_path) except SecurityError as e: raise SecurityError(f"Access denied: {e}") from e language = language_registry.language_for_file(file_path) if not language: raise ValueError(f"Could not detect language for {file_path}") # Parse file try: # Get language object language_obj = language_registry.get_language(language) safe_lang = ensure_language(language_obj) # Parse with cached tree tree, source_bytes = parse_with_cached_tree(abs_path, language, safe_lang) # Calculate basic metrics # Read lines from file using utility lines = read_text_file(abs_path) line_count = len(lines) empty_lines = sum(1 for line in lines if line.strip() == "") comment_lines = 0 # Language-specific comment detection using utility comment_prefix = get_comment_prefix(language) if comment_prefix: # Count comments for text lines comment_lines = sum(1 for line in lines if line.strip().startswith(comment_prefix)) # Get function and class definitions, excluding methods from count symbols = extract_symbols( project, file_path, language_registry, ["functions", "classes"], exclude_class_methods=True, ) function_count = len(symbols.get("functions", [])) class_count = len(symbols.get("classes", [])) # Calculate cyclomatic complexity using AST complexity_nodes = { "python": [ "if_statement", "for_statement", "while_statement", "try_statement", ], "javascript": [ "if_statement", "for_statement", "while_statement", "try_statement", ], "typescript": [ "if_statement", "for_statement", "while_statement", "try_statement", ], # Add more languages... } cyclomatic_complexity = 1 # Base complexity if language in complexity_nodes: # Count decision points decision_types = complexity_nodes[language] def count_nodes(node: Any, types: List[str]) -> int: safe_node = ensure_node(node) count = 0 if safe_node.type in types: count += 1 for child in safe_node.children: count += count_nodes(child, types) return count cyclomatic_complexity += count_nodes(tree.root_node, decision_types) # Calculate maintainability metrics code_lines = line_count - empty_lines - comment_lines comment_ratio = comment_lines / line_count if line_count > 0 else 0 # Estimate average function length avg_func_lines = float(code_lines / function_count if function_count > 0 else code_lines) return { "line_count": line_count, "code_lines": code_lines, "empty_lines": empty_lines, "comment_lines": comment_lines, "comment_ratio": comment_ratio, "function_count": function_count, "class_count": class_count, "avg_function_lines": round(avg_func_lines, 2), "cyclomatic_complexity": cyclomatic_complexity, "language": language, } except Exception as e: raise ValueError(f"Error analyzing complexity in {file_path}: {e}") from e