get_merge_request_details
Retrieve comprehensive information about a specific GitLab merge request, including its status, changes, and discussions, to facilitate review and management.
Instructions
Get detailed information about a specific merge request
Input Schema
TableJSON Schema
| Name | Required | Description | Default |
|---|---|---|---|
| merge_request_iid | Yes | Internal ID of the merge request |
Implementation Reference
- Primary tool handler: fetches MR details, pipeline, changes, reviews in parallel via API helpers, analyzes readiness, calculates stats, and generates comprehensive Markdown report with status icons, action items, and quick action links.async def get_merge_request_details(gitlab_url, project_id, access_token, args): logging.info(f"get_merge_request_details called with args: {args}") mr_iid = args["merge_request_iid"] tasks = [ api_get_merge_request_details(gitlab_url, project_id, access_token, mr_iid), get_merge_request_pipeline(gitlab_url, project_id, access_token, mr_iid), get_merge_request_changes(gitlab_url, project_id, access_token, mr_iid), get_merge_request_reviews(gitlab_url, project_id, access_token, mr_iid), ] try: details_result, pipeline_result, changes_result, reviews_result = await asyncio.gather(*tasks) except Exception as e: logging.error(f"Error in parallel API calls: {e}") raise Exception(f"Error fetching merge request data: {e}") mr_status, mr_data, mr_error = details_result pipeline_status, pipeline_data, pipeline_error = pipeline_result changes_status, changes_data, changes_error = changes_result if mr_status != 200: logging.error(f"Error fetching merge request details: {mr_status} - {mr_error}") raise Exception(f"Error fetching merge request details: {mr_status} - {mr_error}") state_icon = "β " if mr_data["state"] == "merged" else "π" if mr_data["state"] == "opened" else "β" result = f"# {state_icon} Merge Request !{mr_data['iid']}: {mr_data['title']}\n\n" result += "## π Overview\n" result += f"**π€ Author**: {mr_data['author']['name']} (@{mr_data['author']['username']})\n" result += f"**π Status**: {mr_data['state']} ({get_state_explanation(mr_data['state'])})\n" result += f"**π·οΈ Priority**: {get_mr_priority(mr_data)}\n" result += f"**π Created**: {format_date(mr_data['created_at'])}\n" result += f"**π Updated**: {format_date(mr_data['updated_at'])}\n" result += f"**πΏ Branches**: `{mr_data['source_branch']}` β `{mr_data['target_branch']}`\n" if pipeline_status == 200 and pipeline_data: pipeline_icon = get_pipeline_status_icon(pipeline_data.get("status")) result += f"**π§ Pipeline**: {pipeline_icon} {pipeline_data.get('status', 'unknown')}\n" if pipeline_data.get("web_url"): result += f" *[View Pipeline]({pipeline_data['web_url']})*\n" elif mr_data.get("pipeline"): pipeline_status = mr_data["pipeline"].get("status") pipeline_icon = get_pipeline_status_icon(pipeline_status) result += f"**π§ Pipeline**: {pipeline_icon} {pipeline_status or 'unknown'}\n" if changes_status == 200: change_stats = calculate_change_stats(changes_data) result += f"**π Changes**: {change_stats}\n" readiness = analyze_mr_readiness(mr_data, pipeline_data) result += f"**π¦ Merge Status**: {readiness}\n" if mr_data.get("labels"): labels_str = ", ".join(f"`{label}`" for label in mr_data["labels"]) result += f"**π·οΈ Labels**: {labels_str}\n" if mr_data.get("draft") or mr_data.get("work_in_progress"): result += "**β οΈ Status**: π§ Draft/Work in Progress\n" if mr_data.get("has_conflicts"): result += "**β οΈ Warning**: π₯ Has merge conflicts\n" result += f"**π URL**: {mr_data['web_url']}\n\n" if mr_data.get("description"): result += "## π Description\n" result += f"{mr_data['description']}\n\n" result += "## π§ Technical Details\n" if mr_data.get("merge_commit_sha"): result += f"**π¦ Merge Commit**: `{mr_data['merge_commit_sha'][:8]}`\n" if mr_data.get("squash_commit_sha"): result += f"**π Squash Commit**: `{mr_data['squash_commit_sha'][:8]}`\n" merge_options = [] if mr_data.get("squash"): merge_options.append("π Squash commits") if mr_data.get("remove_source_branch"): merge_options.append("ποΈ Remove source branch") if mr_data.get("force_remove_source_branch"): merge_options.append("ποΈ Force remove source branch") if merge_options: result += f"**βοΈ Merge Options**: {', '.join(merge_options)}\n" if mr_data.get("assignees"): assignees = ", ".join(f"@{user['username']}" for user in mr_data["assignees"]) result += f"**π₯ Assignees**: {assignees}\n" if mr_data.get("reviewers"): reviewers = ", ".join(f"@{user['username']}" for user in mr_data["reviewers"]) result += f"**π Reviewers**: {reviewers}\n" if mr_data.get("milestone"): result += f"**π― Milestone**: {mr_data['milestone']['title']}\n" result += "\n" if reviews_result and "discussions" in reviews_result: discussions_status, discussions, _ = reviews_result["discussions"] approvals_status, approvals, _ = reviews_result["approvals"] result += "## π¬ Reviews Summary\n" if discussions_status == 200 and discussions: total_discussions = len(discussions) resolved_count = sum(1 for d in discussions if d.get("resolved")) unresolved_count = total_discussions - resolved_count result += ( f"**Discussions**: {total_discussions} total, " f"{resolved_count} resolved, {unresolved_count} unresolved\n" ) if unresolved_count > 0: result += f"β οΈ **{unresolved_count} unresolved discussion{'s' if unresolved_count > 1 else ''}**\n" if approvals_status == 200 and approvals: approved_by = approvals.get("approved_by", []) approvals_left = approvals.get("approvals_left", 0) if approved_by: result += f"**Approvals**: β {len(approved_by)} approval{'s' if len(approved_by) > 1 else ''}\n" if approvals_left > 0: result += f"**Needed**: β³ {approvals_left} more approval{'s' if approvals_left > 1 else ''}\n" result += "\n" result += "## π Action Items\n" action_items = [] if mr_data.get("draft") or mr_data.get("work_in_progress"): action_items.append("π§ Remove draft/WIP status") if mr_data.get("has_conflicts"): action_items.append("β οΈ Resolve merge conflicts") if pipeline_status == 200 and pipeline_data and pipeline_data.get("status") == "failed": action_items.append("β Fix failing pipeline") elif pipeline_status == 200 and pipeline_data and pipeline_data.get("status") == "running": action_items.append("π Wait for pipeline completion") if reviews_result and "discussions" in reviews_result: discussions_status, discussions, _ = reviews_result["discussions"] approvals_status, approvals, _ = reviews_result["approvals"] if discussions_status == 200 and discussions: unresolved_count = sum(1 for d in discussions if not d.get("resolved")) if unresolved_count > 0: plural = "s" if unresolved_count > 1 else "" action_items.append(f"π¬ Resolve {unresolved_count} pending discussion{plural}") if approvals_status == 200 and approvals and approvals.get("approvals_left", 0) > 0: approvals_left = approvals["approvals_left"] plural = "s" if approvals_left > 1 else "" action_items.append(f"π₯ Obtain {approvals_left} more approval{plural}") if mr_data["state"] == "opened" and not action_items: action_items.append("β Ready to merge!") if action_items: for item in action_items: result += f"β’ {item}\n" else: result += "β No action items identified\n" result += "\n## π Quick Actions\n" if mr_data["state"] == "opened": result += f"β’ [π Edit MR]({mr_data['web_url']}/edit)\n" result += f"β’ [π¬ Add Comment]({mr_data['web_url']}#note_form)\n" result += f"β’ [π View Changes]({mr_data['web_url']}/diffs)\n" if pipeline_data and pipeline_data.get("web_url"): result += f"β’ [π§ View Pipeline]({pipeline_data['web_url']})\n" return [TextContent(type="text", text=result)]
- main.py:85-100 (schema)Input schema definition for the tool: requires 'merge_request_iid' as a positive integer.Tool( name="get_merge_request_details", description=("Get detailed information about a specific " "merge request"), inputSchema={ "type": "object", "properties": { "merge_request_iid": { "type": "integer", "minimum": 1, "description": ("Internal ID of the merge request"), } }, "required": ["merge_request_iid"], "additionalProperties": False, }, ),
- main.py:308-311 (registration)Tool dispatch/registration in MCP server's call_tool handler: routes 'get_merge_request_details' calls to the tool function with config params.elif name == "get_merge_request_details": return await get_merge_request_details( self.config["gitlab_url"], self.config["project_id"], self.config["access_token"], arguments )
- gitlab_api.py:103-109 (helper)Core API helper: performs HTTP GET to GitLab API endpoint for raw merge request details (imported as api_get_merge_request_details).async def get_merge_request_details(gitlab_url, project_id, access_token, mr_iid): url = f"{gitlab_url}/api/v4/projects/{project_id}/merge_requests/{mr_iid}" headers = _headers(access_token) async with aiohttp.ClientSession() as session: async with session.get(url, headers=headers) as response: return (response.status, await response.json(), await response.text())
- tools/__init__.py:11-22 (registration)Package-level import and export (__all__) of the tool handler for use in main.py.from .get_merge_request_details import get_merge_request_details from .get_merge_request_pipeline import get_merge_request_pipeline from .get_merge_request_reviews import get_merge_request_reviews from .get_merge_request_test_report import get_merge_request_test_report from .get_pipeline_test_summary import get_pipeline_test_summary from .list_merge_requests import list_merge_requests from .reply_to_review_comment import create_review_comment, reply_to_review_comment, resolve_review_discussion __all__ = [ "list_merge_requests", "get_merge_request_reviews", "get_merge_request_details",