We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/wx-b/long-context-mcp'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
doctor.sh•1.01 KiB
#!/bin/bash
echo "=== RLM MCP Doctor ==="
EXIT_CODE=0
# 1. Check Env Vars
echo -n "Checking API keys... "
if [ -n "$OPENAI_API_KEY" ] || [ -n "$OPENROUTER_API_KEY" ]; then
echo "✓ Found (OpenAI or OpenRouter)"
else
echo "⚠ No API keys found in environment. (Only local providers will work)"
fi
# 2. Run Smoke Test
echo "Running smoke test..."
if command -v uv &> /dev/null; then
uv run scripts/mcp_smoketest.py
else
python3 scripts/mcp_smoketest.py
fi
if [ $? -eq 0 ]; then
echo "✓ Smoke test passed"
else
echo "✗ Smoke test failed"
EXIT_CODE=1
fi
# 3. Docker check
echo -n "Checking Docker... "
if command -v docker &> /dev/null; then
if docker info &> /dev/null; then
echo "✓ Running"
else
echo "⚠ Docker daemon not responding"
fi
else
echo "⚠ Docker not found (local execution only)"
fi
if [ $EXIT_CODE -eq 0 ]; then
echo ""
echo "=== Everything looks good! ==="
else
echo ""
echo "=== Some issues found. ==="
fi
exit $EXIT_CODE