Skip to main content
Glama

Shell MCP Server

by odysseus0
pyproject.toml812 B
[project] name = "mcp-server-shell" version = "0.1.0" description = "A Model Context Protocol server providing shell command execution capabilities for LLMs" readme = "README.md" requires-python = ">=3.10" keywords = ["shell", "command", "mcp", "llm"] license = { text = "MIT" } classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", ] dependencies = [ "mcp>=1.0.0", "pydantic>=2.0.0", ] [project.scripts] mcp-server-shell = "mcp_server_shell.server:serve" [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [tool.uv] dev-dependencies = [ "pyright>=1.1.389", "pytest>=8.3.3", "ruff>=0.8.1", ]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/odysseus0/mcp-server-shell'

If you have feedback or need assistance with the MCP directory API, please join our Discord server