Skip to main content
Glama

DP-MCP Server

by devraj21
check_model_sizes.py1.97 kB
#!/usr/bin/env python3 """ Check Ollama model sizes from available models. """ import subprocess import json def check_model_sizes(): """Check available Ollama model sizes.""" print("📊 Ollama Model Size Information") print("="*50) # Common models and their approximate sizes model_info = { "phi3": {"size": "2.2 GB", "params": "3.8B", "description": "Fast, efficient for general tasks"}, "mistral": {"size": "4.1 GB", "params": "7B", "description": "Balanced performance and quality"}, "llama2": {"size": "3.8 GB", "params": "7B", "description": "Meta's general-purpose model"}, "codellama": {"size": "3.8 GB", "params": "7B", "description": "Specialized for code generation"}, "llama2:13b": {"size": "7.3 GB", "params": "13B", "description": "Higher quality, slower"}, "mistral:7b": {"size": "4.1 GB", "params": "7B", "description": "Default Mistral variant"} } print("Available models for download:") print() for model, info in model_info.items(): print(f"• {model}") print(f" Size: {info['size']}") print(f" Parameters: {info['params']}") print(f" Use case: {info['description']}") print() # Check currently installed print("Currently installed:") try: result = subprocess.run(['ollama', 'list'], capture_output=True, text=True) if result.returncode == 0: print(result.stdout) else: print("Could not list installed models") except Exception as e: print(f"Error checking installed models: {e}") print("Recommendations:") print("• phi3 (2.2GB) - Best for development/testing") print("• mistral (4.1GB) - Best balance of size/performance") print("• llama2 (3.8GB) - Good general purpose model") print("• codellama (3.8GB) - Best for code-related queries") if __name__ == "__main__": check_model_sizes()

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/devraj21/dp-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server