post_dag_run
Trigger an Apache Airflow DAG by specifying its ID to initiate workflow execution.
Instructions
Trigger a DAG by ID
Input Schema
TableJSON Schema
| Name | Required | Description | Default |
|---|---|---|---|
| dag_id | Yes | ||
| dag_run_id | No | ||
| data_interval_end | No | ||
| data_interval_start | No | ||
| end_date | No | ||
| execution_date | No | ||
| external_trigger | No | ||
| last_scheduling_decision | No | ||
| logical_date | No | ||
| note | No | ||
| run_type | No | ||
| start_date | No |
Implementation Reference
- src/airflow/dagrun.py:36-67 (handler)The async handler function implementing the 'post_dag_run' tool logic. It creates a DAGRun object from input parameters and triggers it via the Airflow DAGRunApi.async def post_dag_run( dag_id: str, dag_run_id: Optional[str] = None, data_interval_end: Optional[datetime] = None, data_interval_start: Optional[datetime] = None, end_date: Optional[datetime] = None, execution_date: Optional[datetime] = None, external_trigger: Optional[bool] = None, last_scheduling_decision: Optional[datetime] = None, logical_date: Optional[datetime] = None, note: Optional[str] = None, run_type: Optional[str] = None, start_date: Optional[datetime] = None, # state: Optional[str] = None, # TODO: add state ) -> List[Union[types.TextContent, types.ImageContent, types.EmbeddedResource]]: dag_run = DAGRun( dag_id=dag_id, dag_run_id=dag_run_id, data_interval_end=data_interval_end, data_interval_start=data_interval_start, end_date=end_date, execution_date=execution_date, external_trigger=external_trigger, last_scheduling_decision=last_scheduling_decision, logical_date=logical_date, note=note, run_type=run_type, start_date=start_date, state=None, ) response = dag_run_api.post_dag_run(dag_id=dag_id, dag_run=dag_run) return [types.TextContent(type="text", text=str(response.to_dict()))]
- src/airflow/dagrun.py:17-29 (registration)Registration of the 'post_dag_run' tool (and other DAG run tools) via the get_all_functions() which returns tuples used in src/main.py to register with the MCP app.add_tool().def get_all_functions() -> list[tuple[Callable, str, str, bool]]: """Return list of (function, name, description, is_read_only) tuples for registration.""" return [ (post_dag_run, "post_dag_run", "Trigger a DAG by ID", False), (get_dag_runs, "get_dag_runs", "Get DAG runs by ID", True), (get_dag_runs_batch, "get_dag_runs_batch", "List DAG runs (batch)", True), (get_dag_run, "get_dag_run", "Get a DAG run by DAG ID and DAG run ID", True), (update_dag_run_state, "update_dag_run_state", "Update a DAG run state by DAG ID and DAG run ID", False), (delete_dag_run, "delete_dag_run", "Delete a DAG run by DAG ID and DAG run ID", False), (clear_dag_run, "clear_dag_run", "Clear a DAG run", False), (set_dag_run_note, "set_dag_run_note", "Update the DagRun note", False), (get_upstream_dataset_events, "get_upstream_dataset_events", "Get dataset events for a DAG run", True), ]
- src/main.py:8-98 (registration)Central registration logic in main.py that imports get_all_functions from dagrun module and uses it to add the 'post_dag_run' tool (among others) to the MCP server via app.add_tool().from src.airflow.dagrun import get_all_functions as get_dagrun_functions from src.airflow.dagstats import get_all_functions as get_dagstats_functions from src.airflow.dataset import get_all_functions as get_dataset_functions from src.airflow.eventlog import get_all_functions as get_eventlog_functions from src.airflow.importerror import get_all_functions as get_importerror_functions from src.airflow.monitoring import get_all_functions as get_monitoring_functions from src.airflow.plugin import get_all_functions as get_plugin_functions from src.airflow.pool import get_all_functions as get_pool_functions from src.airflow.provider import get_all_functions as get_provider_functions from src.airflow.taskinstance import get_all_functions as get_taskinstance_functions from src.airflow.variable import get_all_functions as get_variable_functions from src.airflow.xcom import get_all_functions as get_xcom_functions from src.enums import APIType APITYPE_TO_FUNCTIONS = { APIType.CONFIG: get_config_functions, APIType.CONNECTION: get_connection_functions, APIType.DAG: get_dag_functions, APIType.DAGRUN: get_dagrun_functions, APIType.DAGSTATS: get_dagstats_functions, APIType.DATASET: get_dataset_functions, APIType.EVENTLOG: get_eventlog_functions, APIType.IMPORTERROR: get_importerror_functions, APIType.MONITORING: get_monitoring_functions, APIType.PLUGIN: get_plugin_functions, APIType.POOL: get_pool_functions, APIType.PROVIDER: get_provider_functions, APIType.TASKINSTANCE: get_taskinstance_functions, APIType.VARIABLE: get_variable_functions, APIType.XCOM: get_xcom_functions, } def filter_functions_for_read_only(functions: list[tuple]) -> list[tuple]: """ Filter functions to only include read-only operations. Args: functions: List of (func, name, description, is_read_only) tuples Returns: List of (func, name, description, is_read_only) tuples with only read-only functions """ return [ (func, name, description, is_read_only) for func, name, description, is_read_only in functions if is_read_only ] @click.command() @click.option( "--transport", type=click.Choice(["stdio", "sse"]), default="stdio", help="Transport type", ) @click.option( "--apis", type=click.Choice([api.value for api in APIType]), default=[api.value for api in APIType], multiple=True, help="APIs to run, default is all", ) @click.option( "--read-only", is_flag=True, help="Only expose read-only tools (GET operations, no CREATE/UPDATE/DELETE)", ) def main(transport: str, apis: list[str], read_only: bool) -> None: from src.server import app for api in apis: logging.debug(f"Adding API: {api}") get_function = APITYPE_TO_FUNCTIONS[APIType(api)] try: functions = get_function() except NotImplementedError: continue # Filter functions for read-only mode if requested if read_only: functions = filter_functions_for_read_only(functions) for func, name, description, *_ in functions: app.add_tool(func, name=name, description=description) if transport == "sse": logging.debug("Starting MCP server for Apache Airflow with SSE transport") app.run(transport="sse") else: logging.debug("Starting MCP server for Apache Airflow with stdio transport") app.run(transport="stdio")