Skip to main content
Glama

get_branch_merge_requests

Retrieve all merge requests associated with a specific branch to track development progress and review pending code changes.

Instructions

Get all merge requests for a specific branch

Input Schema

NameRequiredDescriptionDefault
branch_nameYesName of the branch

Input Schema (JSON Schema)

{ "properties": { "branch_name": { "description": "Name of the branch", "type": "string" } }, "required": [ "branch_name" ], "type": "object" }

Implementation Reference

  • Main tool handler: fetches merge requests for a branch via GitLab API, enhances with pipeline and changes data using parallel fetches, analyzes readiness/priority, and returns richly formatted Markdown summary with action items.
    async def get_branch_merge_requests(gitlab_url, project_id, access_token, args): logging.info(f"get_branch_merge_requests called with args: {args}") branch_name = args["branch_name"] status, data, error = await api_get_branch_merge_requests(gitlab_url, project_id, access_token, branch_name) if status != 200: logging.error(f"Error fetching branch merge requests: {status} - {error}") raise Exception(f"Error fetching branch merge requests: {status} - {error}") result = f"# 🌿 Merge Requests for branch: **{branch_name}**\n" result += f"*Found {len(data)} merge request" f"{'s' if len(data) != 1 else ''}*\n\n" if not data: result += "šŸ“­ No merge requests found for this branch.\n" result += "šŸ’” **Tip**: Create a merge request to start the " "review process.\n" return [TextContent(type="text", text=result)] enhanced_data_tasks = [] for mr in data: task = get_enhanced_mr_data(gitlab_url, project_id, access_token, mr["iid"]) enhanced_data_tasks.append(task) try: enhanced_results = await asyncio.gather(*enhanced_data_tasks) except Exception as e: logging.warning(f"Error in parallel enhanced data fetch: {e}") enhanced_results = [(None, None)] * len(data) for i, mr in enumerate(data): pipeline_data, changes_data = enhanced_results[i] if mr["state"] == "merged": state_icon = "āœ…" elif mr["state"] == "opened": state_icon = "šŸ”„" else: state_icon = "āŒ" result += f"## {state_icon} !{mr['iid']}: {mr['title']}\n" author_name = mr["author"]["name"] author_username = mr["author"]["username"] result += f"**šŸ‘¤ Author**: {author_name} (@{author_username})\n" result += f"**šŸ“Š Status**: {mr['state']} " result += f"({get_state_explanation(mr['state'])})\n" priority = get_mr_priority(mr) readiness = analyze_mr_readiness(mr, pipeline_data) result += f"**šŸ·ļø Priority**: {priority}\n" result += f"**🚦 Merge Status**: {readiness}\n" result += f"**šŸ“… Created**: {format_date(mr['created_at'])}\n" result += f"**šŸ”„ Updated**: {format_date(mr['updated_at'])}\n" source_branch = mr["source_branch"] target_branch = mr["target_branch"] result += f"**🌿 Branches**: `{source_branch}` → `{target_branch}`\n" if pipeline_data: pipeline_status = pipeline_data.get("status") pipeline_icon = get_pipeline_status_icon(pipeline_status) result += f"**šŸ”§ Pipeline**: {pipeline_icon} {pipeline_status}\n" if pipeline_data.get("web_url"): result += f" *[View Pipeline]({pipeline_data['web_url']})*\n" elif mr.get("pipeline"): pipeline_status = mr["pipeline"].get("status") pipeline_icon = get_pipeline_status_icon(pipeline_status) result += f"**šŸ”§ Pipeline**: {pipeline_icon} {pipeline_status or 'unknown'}\n" if changes_data: change_stats = calculate_change_stats(changes_data) result += f"**šŸ“ˆ Changes**: {change_stats}\n" if mr.get("labels"): labels_str = ", ".join(f"`{label}`" for label in mr["labels"]) result += f"**šŸ·ļø Labels**: {labels_str}\n" if mr.get("draft") or mr.get("work_in_progress"): result += "**āš ļø Status**: 🚧 Draft/Work in Progress\n" if mr.get("has_conflicts"): result += "**āš ļø Warning**: šŸ”„ Has merge conflicts\n" result += f"**šŸ”— Actions**: [View MR]({mr['web_url']})" if mr["state"] == "opened": result += f" | [Review & Approve]({mr['web_url']})" result += "\n" result += "\n---\n\n" result += "## šŸ“Š Summary\n" state_counts = {} for mr in data: state = mr["state"] state_counts[state] = state_counts.get(state, 0) + 1 result += "**State Breakdown**:\n" for state, count in state_counts.items(): icon = "āœ…" if state == "merged" else "šŸ”„" if state == "opened" else "āŒ" result += f" • {icon} {state.title()}: {count}\n" result += "\n**šŸŽÆ Action Items**:\n" opened_mrs = [mr for mr in data if mr["state"] == "opened"] if opened_mrs: has_conflicts = sum(1 for mr in opened_mrs if mr.get("has_conflicts")) drafts = sum(1 for mr in opened_mrs if mr.get("draft") or mr.get("work_in_progress")) if has_conflicts: result += f" • šŸ”„ {has_conflicts} MR{'s' if has_conflicts > 1 else ''} with merge conflicts\n" if drafts: result += f" • 🚧 {drafts} draft MR{'s' if drafts > 1 else ''} in progress\n" ready_count = len(opened_mrs) - has_conflicts - drafts if ready_count > 0: result += f" • āœ… {ready_count} MR{'s' if ready_count > 1 else ''} ready for review\n" else: result += " • šŸŽ‰ No open merge requests - branch is clean!\n" return [TextContent(type="text", text=result)]
  • Tool schema definition including input validation for 'branch_name' parameter.
    Tool( name="get_branch_merge_requests", description=("Get all merge requests for a specific branch"), inputSchema={ "type": "object", "properties": {"branch_name": {"type": "string", "description": "Name of the branch"}}, "required": ["branch_name"], "additionalProperties": False, }, ),
  • main.py:327-330 (registration)
    Tool dispatch registration in the call_tool handler, injecting config and calling the handler function.
    elif name == "get_branch_merge_requests": return await get_branch_merge_requests( self.config["gitlab_url"], self.config["project_id"], self.config["access_token"], arguments )
  • Low-level GitLab API helper that queries merge requests filtered by source_branch.
    async def get_branch_merge_requests(gitlab_url, project_id, access_token, branch_name): """Get merge requests for a specific branch""" params = {"source_branch": branch_name, "state": "all", "per_page": 100} url = f"{gitlab_url}/api/v4/projects/{project_id}/merge_requests" headers = _headers(access_token) async with aiohttp.ClientSession() as session: async with session.get(url, headers=headers, params=params) as response: return (response.status, await response.json(), await response.text())
  • Helper function to fetch pipeline and changes data in parallel for each MR to enhance the display.
    async def get_enhanced_mr_data(gitlab_url, project_id, access_token, mr_iid): """Get enhanced data for a single MR using parallel API calls""" try: pipeline_task = get_merge_request_pipeline(gitlab_url, project_id, access_token, mr_iid) changes_task = get_merge_request_changes(gitlab_url, project_id, access_token, mr_iid) pipeline_result, changes_result = await asyncio.gather(pipeline_task, changes_task, return_exceptions=True) if isinstance(pipeline_result, Exception): pipeline_data = None logging.warning(f"Pipeline fetch failed for MR {mr_iid}: {pipeline_result}") else: pipeline_status, pipeline_data, _ = pipeline_result if pipeline_status != 200: pipeline_data = None if isinstance(changes_result, Exception): changes_data = None logging.warning(f"Changes fetch failed for MR {mr_iid}: {changes_result}") else: changes_status, changes_data, _ = changes_result if changes_status != 200: changes_data = None return pipeline_data, changes_data except Exception as e: logging.warning(f"Error fetching enhanced data for MR {mr_iid}: {e}") return None, None

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/amirsina-mandegari/gitlab-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server