mcp_config_dataset_management.json•2 kB
{
"server": {
"url": "/api"
},
"authentication": [
{
"schema_parameters": {
"scheme": "bearer"
},
"type": "http",
"value": "BEARER_TOKEN"
}
],
"operations": [
{
"description": "Retrieves a list of datasets for a given project",
"name": "get_datasets"
},
{
"description": "Download data from public repositories",
"name": "import_public_dataset"
},
{
"description": "Registers a dataset in the system that you upload files into",
"name": "upload_dataset"
},
{
"description": "Deletes the dataset, files are saved according to the project\u0027s retention time.",
"name": "delete_dataset"
},
{
"description": "Gets detailed information about a dataset",
"name": "get_dataset"
},
{
"description": "Update info on a dataset",
"name": "update_dataset"
},
{
"description": "Gets a listing of files, charts, and other assets available for the dataset",
"name": "get_dataset_manifest"
},
{
"description": "Rerun sample ingest.",
"name": "ingest_samples"
},
{
"description": "Regenerate dataset file listing.",
"name": "regenerate_manifest"
},
{
"description": "Rerun data transforms and web optimization.",
"name": "rerun_transform"
},
{
"description": "Retrieves a list of samples associated with a dataset along with their metadata",
"name": "get_dataset_samples"
},
{
"description": "Gets the tasks submitted by the workflow execution",
"name": "get_tasks_for_execution"
},
{
"description": "Gets the log output from an individual task",
"name": "get_task_logs"
},
{
"description": "Get dataset listing for a share",
"name": "get_shared_datasets"
},
{
"description": "Moves a dataset to a different project. The underlying S3 data is not transferred and will need to be done manually. It is expected the user will also transfer all datasets in the lineage.",
"name": "move_dataset"
}
]
}