#!/usr/bin/env python3
"""
Verify AI configuration is set up correctly.
"""
import os
import subprocess
import requests
import json
def verify_ai_configuration():
"""Verify AI configuration status."""
print("π DP-MCP AI Configuration Verification")
print("="*50)
# 1. Check .env.ai file
print("1οΈβ£ Configuration File Check:")
if os.path.exists('.env.ai'):
print(" β
.env.ai file exists")
# Check if placeholder keys are still there
with open('.env.ai', 'r') as f:
content = f.read()
if 'XXXX-REPLACE-WITH-YOUR-ACTUAL' in content:
print(" β οΈ Placeholder API keys detected - replace with real keys for cloud models")
else:
print(" β
API keys appear to be configured")
if 'OLLAMA_BASE_URL=http://localhost:11434' in content:
print(" β
Ollama configuration found")
else:
print(" β .env.ai file not found")
return
# 2. Check Ollama
print("\n2οΈβ£ Ollama Service Check:")
try:
response = requests.get('http://localhost:11434/api/tags', timeout=5)
if response.status_code == 200:
models = response.json().get('models', [])
print(f" β
Ollama running with {len(models)} models:")
for model in models:
size_gb = model['size'] / (1024**3)
print(f" β’ {model['name']}: {size_gb:.1f} GB")
else:
print(f" β Ollama API error: {response.status_code}")
except Exception as e:
print(f" β Ollama not accessible: {e}")
print(" π‘ Try: ollama serve")
# 3. Check MCP Server
print("\n3οΈβ£ MCP Server Check:")
try:
response = requests.get('http://127.0.0.1:8888/mcp/', timeout=5)
print(f" β
MCP Server running (HTTP {response.status_code})")
except Exception as e:
print(f" β MCP Server not accessible: {e}")
print(" π‘ Try: uv run python src/dp_mcp/server.py --ai-env production --debug")
# 4. Model Size Summary
print("\n4οΈβ£ Model Size Reference:")
model_sizes = {
'phi3': '2.2 GB',
'mistral': '4.1 GB',
'llama2': '3.8 GB',
'codellama': '3.8 GB',
'llama2:13b': '7.3 GB'
}
for model, size in model_sizes.items():
print(f" β’ {model}: {size}")
# 5. Disk Space Check
print("\n5οΈβ£ Disk Space Check:")
try:
result = subprocess.run(['df', '-h', '/'], capture_output=True, text=True)
lines = result.stdout.strip().split('\n')
if len(lines) >= 2:
header = lines[0]
data = lines[1].split()
available = data[3] if len(data) > 3 else "Unknown"
print(f" Available space: {available}")
# Extract numeric value for comparison
if 'G' in available:
available_gb = float(available.replace('G', ''))
if available_gb > 10:
print(" β
Sufficient space for AI models")
else:
print(" β οΈ Low disk space - consider cleanup")
except:
print(" β Could not check disk space")
print("\nπ― Configuration Summary:")
print(" β’ .env.ai: β
Ready")
print(" β’ Local Models: Ready via Ollama")
print(" β’ Cloud Models: Configure API keys to enable")
print(" β’ MCP Server: Ready for AI tools")
print("\nπ Next Steps:")
print(" 1. Replace API key placeholders in .env.ai (optional)")
print(" 2. Install more Ollama models: ollama pull mistral")
print(" 3. Test AI tools via MCP protocol")
print(" 4. Use natural language queries in your applications")
if __name__ == "__main__":
verify_ai_configuration()