Skip to main content
Glama
pyproject.toml735 B
[build-system] requires = ["hatchling"] build-backend = "hatchling.build" [project] name = "mcp-omnienv-nix" version = "0.0.1" description = "MCP server that spins up Nix-powered ephemeral environments for multiple languages." readme = "README.md" requires-python = ">=3.11" license = {text = "MIT"} authors = [ {name = "Erik Parawell"} ] dependencies = [ "fastmcp>=2.11.0", "requests>=2.32.0", ] [project.optional-dependencies] dev = [ "pytest>=8.3", ] [tool.pytest.ini_options] pythonpath = ["."] testpaths = ["tests"] markers = [ "integration: runs nix shell to verify real package availability" ] [project.scripts] mcp-omnienv-nix = "mcp_omnienv_nix.server:main" [tool.hatch.metadata] allow-direct-references = true

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/StealthBadger747/mcp-omnienv-nix'

If you have feedback or need assistance with the MCP directory API, please join our Discord server