Skip to main content
Glama

mcp-server-ollama-deep-researcher

MIT License
13
  • Apple
  • Linux
pyproject.toml428 B
[project] name = "ollama-deep-researcher" version = "1.0.0" description = "MCP server for deep research using Ollama LLMs" requires-python = ">=3.9" dependencies = [ "langgraph>=0.0.20", "langchain-core>=0.1.22", "langchain-ollama>=0.0.1", "tavily-python>=0.5.1", "pplx>=0.0.1" ] [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [tool.hatch.build.targets.wheel] packages = ["src"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Cam10001110101/mcp-server-ollama-deep-researcher'

If you have feedback or need assistance with the MCP directory API, please join our Discord server