#!/bin/bash
# MCP Memory System - One-Click Local Installation
# Fully self-hosted memory system for AI assistants
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
CYAN='\033[0;36m'
GRAY='\033[0;37m'
NC='\033[0m' # No Color
FORCE=${FORCE:-false}
VERBOSE=${VERBOSE:-false}
echo -e "${CYAN}๐ง MCP Memory System - Local Installation${NC}"
echo -e "${CYAN}=========================================${NC}"
# Check if Docker is installed and running
check_docker() {
echo -n -e "๐ณ Checking Docker..."
if command -v docker >/dev/null 2>&1 && docker info >/dev/null 2>&1; then
echo -e " ${GREEN}โ
${NC}"
return 0
else
echo -e " ${RED}โ${NC}"
echo -e "${RED}Docker is not installed or not running. Please install Docker and start it.${NC}"
echo -e "${YELLOW}Download: https://www.docker.com/products/docker-desktop${NC}"
return 1
fi
}
# Create environment file
create_env_file() {
echo -n -e "โ๏ธ Creating environment configuration..."
cat > .env.local << 'EOF'
# MCP Memory System Configuration
LLM_PROVIDER=basic
OLLAMA_HOST=http://ollama:11434
OLLAMA_MODEL=phi3:mini
POSTGRES_HOST=postgres
POSTGRES_PORT=5432
POSTGRES_DB=postgres
POSTGRES_USER=postgres
POSTGRES_PASSWORD=postgres
POSTGRES_COLLECTION_NAME=memories
HOST=0.0.0.0
PORT=3000
EOF
echo -e " ${GREEN}โ
${NC}"
}
# Pull and start services
start_services() {
echo -e "${YELLOW}๐ Starting services...${NC}"
# Build and start containers
docker-compose -f docker-compose.local.yml up -d --build
if [ $? -eq 0 ]; then
echo -e "${GREEN}โ
Services started successfully!${NC}"
else
echo -e "${RED}โ Failed to start services${NC}"
exit 1
fi
}
# Download and setup Ollama models
initialize_ollama_models() {
echo -e "${YELLOW}๐ค Setting up AI models...${NC}"
# Wait for Ollama to be ready
echo -n -e " Waiting for Ollama service..."
max_attempts=30
attempt=0
while [ $attempt -lt $max_attempts ]; do
sleep 2
attempt=$((attempt + 1))
if curl -s http://localhost:11434/ >/dev/null 2>&1; then
echo -e " ${GREEN}โ
${NC}"
break
fi
if [ $attempt -ge $max_attempts ]; then
echo -e " ${RED}โ Timeout${NC}"
echo -e "${RED}Ollama service failed to start. Check Docker logs: docker logs mem0-ollama${NC}"
exit 1
fi
echo -n "."
done
# Pull required models
echo -n -e " Downloading phi3:mini model (LLM)..."
if docker exec mem0-ollama ollama pull phi3:mini >/dev/null 2>&1; then
echo -e " ${GREEN}โ
${NC}"
else
echo -e " ${RED}โ${NC}"
fi
echo -n -e " Downloading nomic-embed-text model (Embeddings)..."
if docker exec mem0-ollama ollama pull nomic-embed-text >/dev/null 2>&1; then
echo -e " ${GREEN}โ
${NC}"
else
echo -e " ${RED}โ${NC}"
fi
}
# Test the installation
test_installation() {
echo -e "${YELLOW}๐งช Testing installation...${NC}"
# Wait for MCP server to be ready
echo -n -e " Waiting for MCP server..."
max_attempts=20
attempt=0
while [ $attempt -lt $max_attempts ]; do
sleep 3
attempt=$((attempt + 1))
if curl -s http://localhost:3000/ >/dev/null 2>&1; then
echo -e " ${GREEN}โ
${NC}"
return 0
fi
if [ $attempt -ge $max_attempts ]; then
echo -e " ${RED}โ Timeout${NC}"
echo -e "${RED}MCP server failed to start. Check logs: docker logs mem0-mcp-server${NC}"
return 1
fi
echo -n "."
done
}
# Generate Claude Desktop configuration
generate_claude_config() {
echo -n -e "๐ Generating Claude Desktop configuration..."
# Determine Claude config path based on OS
if [[ "$OSTYPE" == "darwin"* ]]; then
# macOS
claude_config_path="$HOME/Library/Application Support/Claude/claude_desktop_config.json"
else
# Linux
claude_config_path="$HOME/.config/claude/claude_desktop_config.json"
fi
claude_dir=$(dirname "$claude_config_path")
# Create Claude config directory if it doesn't exist
mkdir -p "$claude_dir"
# Get absolute path to server.py
server_path=$(realpath "src/server.py")
# Create or update configuration
cat > "$claude_config_path" << EOF
{
"mcpServers": {
"mem0-local": {
"command": "python",
"args": ["$server_path"],
"env": {
"LLM_PROVIDER": "basic",
"OLLAMA_HOST": "http://localhost:11434",
"OLLAMA_MODEL": "phi3:mini",
"POSTGRES_HOST": "localhost",
"POSTGRES_PORT": "5432",
"POSTGRES_DB": "postgres",
"POSTGRES_USER": "postgres",
"POSTGRES_PASSWORD": "postgres",
"POSTGRES_COLLECTION_NAME": "memories"
}
}
}
}
EOF
echo -e " ${GREEN}โ
${NC}"
echo -e " ${GRAY}Config saved to: $claude_config_path${NC}"
}
# Main installation process
main() {
echo ""
# Check prerequisites
if ! check_docker; then
exit 1
fi
# Stop existing services if running
if [ "$FORCE" != "true" ]; then
echo -n -e "๐ Stopping existing services..."
docker-compose -f docker-compose.local.yml down >/dev/null 2>&1 || true
echo -e " ${GREEN}โ
${NC}"
fi
# Create environment configuration
create_env_file
# Start services
start_services
# Setup AI models
initialize_ollama_models
# Test installation
if test_installation; then
echo -e "${GREEN}โ
Installation successful!${NC}"
else
echo -e "${RED}โ Installation failed during testing${NC}"
exit 1
fi
# Generate Claude Desktop configuration
generate_claude_config
echo ""
echo -e "${GREEN}๐ MCP Memory System is ready!${NC}"
echo -e "${GREEN}=========================================${NC}"
echo -e "${NC}Services running on:${NC}"
echo -e " ${GRAY}โข MCP Server: http://localhost:3000${NC}"
echo -e " ${GRAY}โข PostgreSQL: localhost:5432${NC}"
echo -e " ${GRAY}โข Ollama: http://localhost:11434${NC}"
echo ""
echo -e "${NC}Next steps:${NC}"
echo -e " ${GRAY}1. Restart Claude Desktop to load the new MCP server${NC}"
echo -e " ${GRAY}2. Test with: /mcp${NC}"
echo -e " ${GRAY}3. Try: 'Remember that I like working on AI projects'${NC}"
echo ""
echo -e "${NC}Management commands:${NC}"
echo -e " ${GRAY}โข View logs: docker-compose -f docker-compose.local.yml logs${NC}"
echo -e " ${GRAY}โข Stop: docker-compose -f docker-compose.local.yml down${NC}"
echo -e " ${GRAY}โข Restart: docker-compose -f docker-compose.local.yml restart${NC}"
}
# Make script executable if not already
chmod +x "$0"
# Run installation
main "$@"