Skip to main content
Glama

LocalMCP

by leolech14
MIT License
pyproject.toml1.8 kB
[build-system] requires = ["setuptools>=64", "wheel"] build-backend = "setuptools.build_meta" [project] name = "localmcp" version = "0.1.0" description = "Advanced MCP-Based AI Agent System with Intelligent Tool Orchestration" readme = "README.md" authors = [ {name = "LocalMCP Contributors"}, ] license = {text = "MIT"} requires-python = ">=3.11" classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Scientific/Engineering :: Artificial Intelligence", ] dependencies = [ "fastapi>=0.104.0", "uvicorn>=0.24.0", "pydantic>=2.4.0", "websockets>=12.0", "openai>=1.3.0", "anthropic>=0.8.0", "faiss-cpu>=1.7.4", "sentence-transformers>=2.2.2", "redis>=5.0.0", "prometheus-client>=0.19.0", "structlog>=23.2.0", "click>=8.1.0", "rich>=13.7.0", ] [project.optional-dependencies] dev = [ "pytest>=7.4.0", "pytest-asyncio>=0.21.0", "pytest-cov>=4.1.0", "black>=23.11.0", "flake8>=6.1.0", "mypy>=1.7.0", "pre-commit>=3.5.0", ] [project.scripts] localmcp = "localmcp.cli:main" [project.urls] Homepage = "https://github.com/leolech14/LocalMCP" Issues = "https://github.com/leolech14/LocalMCP/issues" Documentation = "https://github.com/leolech14/LocalMCP/blob/main/README.md" [tool.black] line-length = 88 target-version = ['py311'] [tool.mypy] python_version = "3.11" warn_return_any = true warn_unused_configs = true ignore_missing_imports = true [tool.pytest.ini_options] testpaths = ["tests"] asyncio_mode = "auto"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/leolech14/LocalMCP'

If you have feedback or need assistance with the MCP directory API, please join our Discord server