Skip to main content
Glama

mcp4mcp

by hmatt1
.replit1.06 kB
entrypoint = "main.py" modules = ["python-3.11"] [nix] channel = "stable-24_05" packages = ["libxcrypt"] [unitTest] language = "python3" [gitHubImport] requiredFiles = [".replit", "replit.nix"] [deployment] run = ["python3", "main.py"] deploymentTarget = "cloudrun" [workflows] runButton = "Start mcp4mcp Server" [[workflows.workflow]] name = "Start mcp4mcp Server" author = 44620470 mode = "sequential" [[workflows.workflow.tasks]] task = "shell.exec" args = "python server.py" [[workflows.workflow]] name = "Test Suite" author = 44620470 mode = "sequential" [[workflows.workflow.tasks]] task = "shell.exec" args = "python -m pytest tests/ -v --tb=short > out.txt 1>&1" [[workflows.workflow]] name = "LLM" mode = "sequential" author = 44620470 [[workflows.workflow.tasks]] task = "shell.exec" args = "python llm.py" [[workflows.workflow]] name = "diag" mode = "sequential" author = 44620470 [[workflows.workflow.tasks]] task = "shell.exec" args = "python run_diagnostic.py > out.txt 2>&1"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/hmatt1/mcp4mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server