pyproject.toml•1.1 kB
[project]
name = "outsource-mcp"
version = "0.1.0"
description = "MCP server for outsourcing tasks to AI agents"
readme = "README.md"
requires-python = ">=3.11"
dependencies = [
"agno>=1.5.5",
"aiohttp>=3.12.2",
"anthropic>=0.52.0",
"azure-ai-inference>=1.0.0b9",
"azure-ai-ml>=1.27.1",
"boto3>=1.38.24",
"cerebras-cloud-sdk>=1.35.0",
"cohere>=5.15.0",
"deepseek>=1.0.0",
"google-genai>=1.16.1",
"google-generativeai>=0.8.5",
"groq>=0.25.0",
"ibm-watsonx-ai>=1.3.20",
"litellm>=1.71.1",
"llama-api-client>=0.1.1",
"mcp[cli]>=1.9.1",
"mistralai>=1.8.0",
"ollama>=0.4.9",
"openai>=1.82.0",
"together>=1.5.8",
]
[project.scripts]
outsource-mcp = "server:main"
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.hatch.build.targets.wheel]
packages = ["."]
[tool.pytest.ini_options]
asyncio_mode = "auto"
asyncio_default_fixture_loop_scope = "function"
[dependency-groups]
dev = [
"black>=25.1.0",
"mypy>=1.15.0",
"pytest>=8.3.5",
"pytest-asyncio>=1.0.0",
"ruff>=0.11.11",
]