setup-ollama.shā¢1.18 kB
#!/bin/bash
echo "š Setting up Ollama models for MCP Server..."
# Check if Ollama is running
echo "1. Checking Ollama status..."
if curl -s http://localhost:11434/api/tags > /dev/null; then
echo "ā
Ollama is running"
else
echo "ā Ollama is not running. Please start it first:"
echo " docker-compose up -d ollama"
exit 1
fi
# Check existing models
echo -e "\n2. Checking existing models..."
MODELS=$(curl -s http://localhost:11434/api/tags)
echo "Current models: $MODELS"
# Install Llama2 if not present
echo -e "\n3. Installing Llama2 model..."
if echo "$MODELS" | grep -q "llama2"; then
echo "ā
Llama2 already installed"
else
echo "š„ Installing Llama2 (this may take a few minutes)..."
curl -X POST http://localhost:11434/api/pull -d '{"name": "llama2:7b"}'
fi
# Verify installation
echo -e "\n4. Verifying installation..."
curl -s http://localhost:11434/api/tags | jq .
echo -e "\nā
Ollama setup complete!"
echo "You can now test your MCP server with:"
echo "curl -X POST http://localhost:3000/mcp/tools/call -H 'Content-Type: application/json' -d '{\"name\": \"generate_text\", \"arguments\": {\"prompt\": \"Hello, world!\"}}'"