mcp_tools.json•3.81 kB
{
"tools": [
{
"name": "create_folder",
"description": "Create a new folder/directory locally",
"inputSchema": {
"type": "object",
"properties": {
"path": {
"type": "string",
"description": "Path to the folder to create"
}
},
"required": ["path"]
}
},
{
"name": "create_py_file",
"description": "Create a new Python file with specified content",
"inputSchema": {
"type": "object",
"properties": {
"path": {
"type": "string",
"description": "Path to the Python file to create (must end with .py)"
},
"content": {
"type": "string",
"description": "Content to write to the file"
}
},
"required": ["path"]
}
},
{
"name": "edit_file",
"description": "Edit an existing file by overwriting its content",
"inputSchema": {
"type": "object",
"properties": {
"path": {
"type": "string",
"description": "Path to the file to edit"
},
"content": {
"type": "string",
"description": "New content to write to the file"
}
},
"required": ["path", "content"]
}
},
{
"name": "submit_code",
"description": "Submit Python code to a Databricks cluster for execution",
"inputSchema": {
"type": "object",
"properties": {
"code": {
"type": "string",
"description": "Python code to execute on Databricks"
},
"cluster_id": {
"type": "string",
"description": "Target Databricks cluster ID"
}
},
"required": ["code", "cluster_id"]
}
},
{
"name": "create_job",
"description": "Create a new Databricks job",
"inputSchema": {
"type": "object",
"properties": {
"job_config": {
"type": "object",
"description": "Job configuration including name, cluster settings, and notebook path"
}
},
"required": ["job_config"]
}
},
{
"name": "run_job",
"description": "Run an existing Databricks job",
"inputSchema": {
"type": "object",
"properties": {
"job_id": {
"type": "string",
"description": "ID of the job to run"
}
},
"required": ["job_id"]
}
},
{
"name": "create_dlt_pipeline",
"description": "Create a Delta Live Tables pipeline",
"inputSchema": {
"type": "object",
"properties": {
"pipeline_config": {
"type": "object",
"description": "DLT pipeline configuration including name, storage, and notebook path"
}
},
"required": ["pipeline_config"]
}
},
{
"name": "get_job_error",
"description": "Get error details for a failed Databricks job run",
"inputSchema": {
"type": "object",
"properties": {
"run_id": {
"type": "string",
"description": "Run ID to check for errors"
}
},
"required": ["run_id"]
}
},
{
"name": "check_job_status",
"description": "Check the status of a Databricks job run",
"inputSchema": {
"type": "object",
"properties": {
"job_id": {
"type": "string",
"description": "Job ID"
},
"run_id": {
"type": "string",
"description": "Run ID"
}
},
"required": ["job_id", "run_id"]
}
}
]
}