#!/usr/bin/env python3
"""
CodeGenMCP - Local Code Generation MCP Server
This server wraps local Ollama LLM calls to generate code without passing
code through JSON parameters. It handles all the complexity internally.
Architecture:
- Conversation LLM (qwen2.5:14b) orchestrates and calls this server
- This server internally calls Ollama with a code model
- Generated code is written directly to files
- Returns simple success/error responses
"""
from mcp.server.fastmcp import FastMCP
from pathlib import Path
from typing import Optional, Dict, Any
from pydantic import Field
import ollama
import os
import sys
mcp = FastMCP("CodeGenMCP")
# Use the same model as the main JARVIS system
LLM_MODEL = os.getenv("LLM_MODEL") or "qwen2.5-coder:32b"
# Get the SuperMCP directory
SUPERMCP_DIR = Path(__file__).parent.parent
# MCP Server Template
MCP_SERVER_TEMPLATE = '''#!/usr/bin/env python3
"""
{server_name} - {description}
Auto-generated by CodeGenMCP
"""
from mcp.server.fastmcp import FastMCP
from typing import Optional, Dict, Any
from pydantic import Field
mcp = FastMCP("{server_name}")
{tools_code}
if __name__ == "__main__":
mcp.run()
'''
@mcp.tool()
def create_mcp_server(
server_name: str = Field(description="Name of the MCP server (e.g., 'WeatherMCP')"),
description: str = Field(description="What the server does"),
tools_description: str = Field(description="Describe the tools needed and their parameters")
) -> Dict[str, Any]:
"""
Create a new MCP server by generating code with local LLM.
This tool:
1. Reads the EchoMCP template as reference
2. Calls local Ollama to generate FastMCP tools
3. Writes the complete server to available_mcps/<server_name>/server.py
4. Returns the path and preview
Example:
create_mcp_server(
server_name="WeatherMCP",
description="Provides weather information",
tools_description="get_weather(city, country) - returns temperature and conditions"
)
"""
try:
# Read EchoMCP as a template reference
echo_path = SUPERMCP_DIR / "EchoMCP" / "server.py"
if echo_path.exists():
template_code = echo_path.read_text()
else:
template_code = "# Template not available"
# Build prompt for code generation
prompt = f"""You are creating a FastMCP server. Here's a reference example:
```python
{template_code}
```
Now create tools for a new MCP server:
Server Name: {server_name}
Description: {description}
Tools Needed: {tools_description}
Generate ONLY the tools section (the @mcp.tool() decorated functions).
Each tool must:
1. Use @mcp.tool() decorator
2. Have type hints and Field() descriptions for all parameters
3. Return Dict[str, Any]
4. Include try/except error handling
5. Return {{"success": True, "result": ...}} on success or {{"success": False, "error": "..."}} on error
Generate the Python code for the tools now:"""
# Call local Ollama for code generation
response = ollama.chat(
model=LLM_MODEL,
messages=[
{
'role': 'system',
'content': 'You are an expert Python developer. Generate clean, working FastMCP server code. Output ONLY the code, no explanations.'
},
{
'role': 'user',
'content': prompt
}
],
options={
'temperature': 0.2, # Lower temperature for more consistent code
}
)
generated_tools = response['message']['content']
# Clean up markdown code blocks if present
if '```python' in generated_tools:
generated_tools = generated_tools.split('```python')[1].split('```')[0].strip()
elif '```' in generated_tools:
generated_tools = generated_tools.split('```')[1].split('```')[0].strip()
# Create the complete server code
server_code = MCP_SERVER_TEMPLATE.format(
server_name=server_name,
description=description,
tools_code=generated_tools
)
# Create directory and write file
server_dir = SUPERMCP_DIR / server_name
server_dir.mkdir(exist_ok=True)
server_file = server_dir / "server.py"
server_file.write_text(server_code, encoding='utf-8')
# Create requirements.txt if needed
reqs_file = server_dir / "requirements.txt"
if not reqs_file.exists():
reqs_file.write_text("# Add your dependencies here\n", encoding='utf-8')
return {
"success": True,
"message": f"Created {server_name} successfully!",
"server_path": str(server_file),
"code_preview": server_code[:400] + "..." if len(server_code) > 400 else server_code,
"next_steps": [
f"1. Review code: {server_file}",
f"2. Test: python {server_file}",
"3. Call reload_servers() to make it available to JARVIS",
f"4. Inspect: inspect_server({server_name})"
]
}
except Exception as e:
return {
"success": False,
"error": f"Failed to create MCP server: {str(e)}",
"model_used": LLM_MODEL
}
@mcp.tool()
def generate_python_function(
function_description: str = Field(description="What the function should do"),
function_name: str = Field(description="Name of the function"),
parameters: Optional[str] = Field(default=None, description="Parameters description (e.g., 'x: int, y: int')")
) -> Dict[str, Any]:
"""
Generate a Python function using local LLM.
Returns the generated code as a string that can be saved to a file.
"""
try:
prompt = f"""Generate a Python function:
Name: {function_name}
Purpose: {function_description}
Parameters: {parameters or 'none'}
Requirements:
- Include type hints
- Add docstring
- Include error handling if appropriate
- Return appropriate values
Generate only the function code:"""
response = ollama.chat(
model=LLM_MODEL,
messages=[
{'role': 'system', 'content': 'You are an expert Python developer. Generate clean, well-documented code.'},
{'role': 'user', 'content': prompt}
],
options={'temperature': 0.2}
)
code = response['message']['content']
# Clean markdown
if '```python' in code:
code = code.split('```python')[1].split('```')[0].strip()
elif '```' in code:
code = code.split('```')[1].split('```')[0].strip()
return {
"success": True,
"code": code,
"function_name": function_name
}
except Exception as e:
return {
"success": False,
"error": str(e)
}
@mcp.tool()
def list_available_templates() -> Dict[str, Any]:
"""
List all available MCP server templates that can be used as references.
"""
try:
templates = []
for item in SUPERMCP_DIR.iterdir():
if item.is_dir() and (item / "server.py").exists():
templates.append({
"name": item.name,
"path": str(item / "server.py")
})
return {
"success": True,
"templates": templates,
"count": len(templates)
}
except Exception as e:
return {
"success": False,
"error": str(e)
}
if __name__ == "__main__":
print(f"CodeGenMCP Server starting...")
print(f"Using Model: {LLM_MODEL}")
print(f"SuperMCP Directory: {SUPERMCP_DIR}")
mcp.run()