Skip to main content
Glama
pyproject.toml1.38 kB
[project] name = "omnimcp" version = "0.1.0" description = "Semantic router for MCP ecosystems - Discover and execute tools across multiple MCP servers without context bloat" readme = "README.md" license = { text = "MIT" } authors = [ { name = "BA Ibrahima", email = "ibrahima.elmokhtar@gmail.com" } ] requires-python = ">=3.12" keywords = ["mcp", "model-context-protocol", "semantic-search", "llm", "ai-agents", "tool-routing", "omnimcp"] classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.12", "Topic :: Software Development :: Libraries :: Python Modules", ] dependencies = [ "click>=8.3.1", "fastmcp>=2.13.1", "mcp>=1.22.0", "openai>=2.8.1", "pydantic>=2.12.4", "pydantic-settings>=2.12.0", "pyzmq>=27.1.0", "qdrant-client>=1.16.0", "tenacity>=9.1.2", "tiktoken>=0.12.0", ] [project.urls] Homepage = "https://github.com/milkymap/omnimcp" Repository = "https://github.com/milkymap/omnimcp" Issues = "https://github.com/milkymap/omnimcp/issues" [project.scripts] omnimcp = "omnimcp:main" [build-system] requires = ["uv_build>=0.8.17,<0.9.0"] build-backend = "uv_build" [dependency-groups] dev = [ "pytest>=9.0.1", "pytest-asyncio>=1.3.0", ]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/milkymap/omnimcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server