We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/Anwesh43/ollama-apify-mcp'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
{
"actorSpecification": 1,
"name": "ollama-mcp-server",
"title": "Python MCP Server",
"description": "Python Model Context Protocol (MCP) server using FastMCP.",
"version": "0.0",
"buildTag": "latest",
"usesStandbyMode": true,
"meta": {
"templateId": "python-mcp-empty",
"generatedBy": "<FILL-IN-MODEL>"
},
"input": {
"title": "Actor input schema",
"description": "This is Actor input schema",
"type": "object",
"schemaVersion": 1,
"properties": {},
"required": []
},
"dockerfile": "../Dockerfile",
"webServerMcpPath": "/mcp",
"storages": {
"dataset": "./dataset_schema.json"
},
"output": {
"actorOutputSchemaVersion": 1,
"title": "Output schema of the Ollama MCP Server",
"description": "MCP server endpoint and interaction logs",
"properties": {
"mcpEndpoint": {
"type": "string",
"title": "MCP Endpoint",
"description": "URL to access the MCP server endpoint",
"template": "{{run.containerUrl}}/mcp"
},
"overview": {
"type": "string",
"title": "Overview",
"description": "All MCP tool call interactions and results",
"template": "{{links.apiDefaultDatasetUrl}}/items?view=overview"
},
"models": {
"type": "string",
"title": "Models",
"description": "List of available Ollama models",
"template": "{{links.apiDefaultDatasetUrl}}/items?view=models"
},
"generations": {
"type": "string",
"title": "Generations",
"description": "Text generation results from prompts",
"template": "{{links.apiDefaultDatasetUrl}}/items?view=generations"
},
"chats": {
"type": "string",
"title": "Chats",
"description": "Chat conversation results",
"template": "{{links.apiDefaultDatasetUrl}}/items?view=chats"
}
}
}
}