Skip to main content
Glama

Context Engineering MCP Platform

start_context_engineering.shโ€ข1.11 kB
#!/bin/bash echo "๐Ÿง  Context Engineering ใ‚ทใ‚นใƒ†ใƒ ใ‚’่ตทๅ‹•ไธญ..." # ไพๅญ˜้–ขไฟ‚ใ‚’ใ‚คใƒณใ‚นใƒˆใƒผใƒซ echo "๐Ÿ“ฆ ไพๅญ˜้–ขไฟ‚ใ‚’ใ‚คใƒณใ‚นใƒˆใƒผใƒซไธญ..." pip install -r requirements.txt # ็’ฐๅขƒๅค‰ๆ•ฐใ‚’ใƒใ‚งใƒƒใ‚ฏ if [ -z "$GEMINI_API_KEY" ]; then echo "โš ๏ธ GEMINI_API_KEY็’ฐๅขƒๅค‰ๆ•ฐใŒ่จญๅฎšใ•ใ‚Œใฆใ„ใพใ›ใ‚“" echo " .envใƒ•ใ‚กใ‚คใƒซใพใŸใฏ็’ฐๅขƒๅค‰ๆ•ฐใงGEMINI_API_KEYใ‚’่จญๅฎšใ—ใฆใใ ใ•ใ„" exit 1 fi echo "โœ… ็’ฐๅขƒๅค‰ๆ•ฐ็ขบ่ชๅฎŒไบ†" # ใƒ†ใƒณใƒ—ใƒฌใƒผใƒˆใƒ‡ใ‚ฃใƒฌใ‚ฏใƒˆใƒชใ‚’ไฝœๆˆ mkdir -p templates echo "๐ŸŒ Context Engineering API ใ‚ตใƒผใƒใƒผใ‚’่ตทๅ‹•ไธญ..." echo "" echo "๐Ÿ“ ใ‚ขใ‚ฏใ‚ปใ‚นๆ–นๆณ•:" echo " - ใƒ€ใƒƒใ‚ทใƒฅใƒœใƒผใƒ‰: http://localhost:9001" echo " - API ใƒ‰ใ‚ญใƒฅใƒกใƒณใƒˆ: http://localhost:9001/docs" echo " - WebSocket: ws://localhost:9001/ws" echo "" echo "๐Ÿ”ง ๅˆฉ็”จๅฏ่ƒฝใชๆฉŸ่ƒฝ:" echo " โœจ ใ‚ณใƒณใƒ†ใ‚ญใ‚นใƒˆๅˆ†ๆžใƒปๆœ€้ฉๅŒ–" echo " ๐Ÿ“‹ ใƒ—ใƒญใƒณใƒ—ใƒˆใƒ†ใƒณใƒ—ใƒฌใƒผใƒˆ็ฎก็†" echo " ๐ŸŽจ ใƒžใƒซใƒใƒขใƒผใƒ€ใƒซๅฏพๅฟœ" echo " ๐Ÿ”— RAG็ตฑๅˆ" echo " ๐Ÿ“Š ใƒชใ‚ขใƒซใ‚ฟใ‚คใƒ ๅฏ่ฆ–ๅŒ–" echo "" python context_api.py

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ShunsukeHayashi/context_engineering_MCP'

If you have feedback or need assistance with the MCP directory API, please join our Discord server