pyproject.tomlโข1.29 kB
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[project]
name = "llama4-maverick-mcp"
version = "0.9.0"
description = "Python MCP server for Llama 4 Maverick - Bridge Llama models with Claude Desktop via Ollama"
readme = "README.md"
authors = [
{ name = "Yobie Benjamin", email = "yobie@example.com" }
]
license = { text = "MIT" }
requires-python = ">=3.9"
dependencies = [
"mcp>=0.1.0",
"ollama>=0.3.0",
"httpx>=0.24.0",
"pydantic>=2.0.0",
"python-dotenv>=1.0.0",
"asyncio>=3.4.3",
"aiohttp>=3.9.0",
"rich>=13.0.0",
"structlog>=24.0.0",
"typer>=0.9.0",
"uvloop>=0.19.0; platform_system != 'Windows'",
]
[project.optional-dependencies]
dev = [
"pytest>=7.4.0",
"pytest-asyncio>=0.21.0",
"pytest-cov>=4.1.0",
"black>=23.0.0",
"ruff>=0.1.0",
"mypy>=1.5.0",
"pre-commit>=3.5.0",
]
[project.scripts]
llama4-mcp = "llama4_maverick_mcp.server:main"
[tool.ruff]
line-length = 100
target-version = "py39"
select = ["E", "F", "I", "N", "W", "B", "UP"]
[tool.black]
line-length = 100
target-version = ["py39"]
[tool.mypy]
python_version = "3.9"
warn_return_any = true
warn_unused_configs = true
ignore_missing_imports = true
[tool.pytest.ini_options]
testpaths = ["tests"]
asyncio_mode = "auto"