Skip to main content
Glama

MCP Server Airflow Token

post_dag_run

Trigger an Apache Airflow DAG by ID to execute data workflows, supporting custom scheduling parameters and external run initiation.

Instructions

Trigger a DAG by ID

Input Schema

NameRequiredDescriptionDefault
dag_idYes
dag_run_idNo
data_interval_endNo
data_interval_startNo
end_dateNo
execution_dateNo
external_triggerNo
last_scheduling_decisionNo
logical_dateNo
noteNo
run_typeNo
start_dateNo

Input Schema (JSON Schema)

{ "properties": { "dag_id": { "title": "Dag Id", "type": "string" }, "dag_run_id": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "title": "Dag Run Id" }, "data_interval_end": { "anyOf": [ { "format": "date-time", "type": "string" }, { "type": "null" } ], "default": null, "title": "Data Interval End" }, "data_interval_start": { "anyOf": [ { "format": "date-time", "type": "string" }, { "type": "null" } ], "default": null, "title": "Data Interval Start" }, "end_date": { "anyOf": [ { "format": "date-time", "type": "string" }, { "type": "null" } ], "default": null, "title": "End Date" }, "execution_date": { "anyOf": [ { "format": "date-time", "type": "string" }, { "type": "null" } ], "default": null, "title": "Execution Date" }, "external_trigger": { "anyOf": [ { "type": "boolean" }, { "type": "null" } ], "default": null, "title": "External Trigger" }, "last_scheduling_decision": { "anyOf": [ { "format": "date-time", "type": "string" }, { "type": "null" } ], "default": null, "title": "Last Scheduling Decision" }, "logical_date": { "anyOf": [ { "format": "date-time", "type": "string" }, { "type": "null" } ], "default": null, "title": "Logical Date" }, "note": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "title": "Note" }, "run_type": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "title": "Run Type" }, "start_date": { "anyOf": [ { "format": "date-time", "type": "string" }, { "type": "null" } ], "default": null, "title": "Start Date" } }, "required": [ "dag_id" ], "type": "object" }

Implementation Reference

  • The async handler function that implements the post_dag_run tool logic: constructs a DAGRun from input params and calls the Airflow API to trigger it.
    async def post_dag_run( dag_id: str, dag_run_id: Optional[str] = None, data_interval_end: Optional[datetime] = None, data_interval_start: Optional[datetime] = None, end_date: Optional[datetime] = None, execution_date: Optional[datetime] = None, external_trigger: Optional[bool] = None, last_scheduling_decision: Optional[datetime] = None, logical_date: Optional[datetime] = None, note: Optional[str] = None, run_type: Optional[str] = None, start_date: Optional[datetime] = None, # state: Optional[str] = None, # TODO: add state ) -> List[Union[types.TextContent, types.ImageContent, types.EmbeddedResource]]: dag_run = DAGRun( dag_id=dag_id, dag_run_id=dag_run_id, data_interval_end=data_interval_end, data_interval_start=data_interval_start, end_date=end_date, execution_date=execution_date, external_trigger=external_trigger, last_scheduling_decision=last_scheduling_decision, logical_date=logical_date, note=note, run_type=run_type, start_date=start_date, state=None, ) response = dag_run_api.post_dag_run(dag_id=dag_id, dag_run=dag_run) return [types.TextContent(type="text", text=str(response.to_dict()))]
  • Module-level registration of DAG run tools, including the post_dag_run tool with its name, description, and read-only flag.
    def get_all_functions() -> list[tuple[Callable, str, str, bool]]: """Return list of (function, name, description, is_read_only) tuples for registration.""" return [ (post_dag_run, "post_dag_run", "Trigger a DAG by ID", False), (get_dag_runs, "get_dag_runs", "Get DAG runs by ID", True), (get_dag_runs_batch, "get_dag_runs_batch", "List DAG runs (batch)", True), (get_dag_run, "get_dag_run", "Get a DAG run by DAG ID and DAG run ID", True), (update_dag_run_state, "update_dag_run_state", "Update a DAG run state by DAG ID and DAG run ID", False), (delete_dag_run, "delete_dag_run", "Delete a DAG run by DAG ID and DAG run ID", False), (clear_dag_run, "clear_dag_run", "Clear a DAG run", False), (set_dag_run_note, "set_dag_run_note", "Update the DagRun note", False), (get_upstream_dataset_events, "get_upstream_dataset_events", "Get dataset events for a DAG run", True), ]
  • src/main.py:78-92 (registration)
    Top-level registration loop that imports and adds tools from all modules, including dagrun.py's post_dag_run via app.add_tool.
    for api in apis: logging.debug(f"Adding API: {api}") get_function = APITYPE_TO_FUNCTIONS[APIType(api)] try: functions = get_function() except NotImplementedError: continue # Filter functions for read-only mode if requested if read_only: functions = filter_functions_for_read_only(functions) for func, name, description, *_ in functions: app.add_tool(func, name=name, description=description)

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/nikhil-ganage/mcp-server-airflow-token'

If you have feedback or need assistance with the MCP directory API, please join our Discord server