MCP JinaAI Reader Server
by spences10
Verified
- mcp_deepseek_agent
from fastapi import FastAPI, HTTPException, Response, Header
from typing import Optional, Dict, Any
import httpx
import uvicorn
import os
from mcp_deepseek_agent.config import Config, load_config
from mcp_deepseek_agent.message_converter import MessageConverter, MCPMessage
import logging
# Set up logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
app = FastAPI(title="MCP DeepSeek Server", version="1.0.0")
# Global state
config = load_config()
converter = MessageConverter(backend="ollama")
async def handle_message(message: MCPMessage, mcp_version: Optional[str] = "1.0") -> MCPMessage:
"""Handle incoming messages by forwarding them to Ollama."""
try:
logger.info(f"Received message: {message}")
ollama_request = converter.mcp_to_ollama(message, {
"temperature": config.temperature,
"top_p": config.top_p,
"max_tokens": config.max_tokens,
"repeat_penalty": config.repeat_penalty,
"num_ctx": config.context_window
})
async with httpx.AsyncClient() as client:
response = await client.post(
f"{config.api_url}/api/generate",
json=ollama_request,
timeout=config.timeout,
headers={"Content-Type": "application/json"}
)
response.raise_for_status()
return converter.ollama_to_mcp(response.json())
except httpx.TimeoutError:
raise HTTPException(status_code=504, detail="Request to Ollama timed out")
except httpx.HTTPStatusError as e:
raise HTTPException(status_code=502, detail=f"Ollama error: {str(e)}")
except Exception as e:
logger.error(f"Error handling message: {str(e)}")
raise HTTPException(status_code=500, detail=str(e))
@app.post("/v1/messages")
async def messages(
message: MCPMessage,
mcp_version: Optional[str] = Header(None, alias="X-MCP-Version")
) -> MCPMessage:
"""Handle incoming messages according to MCP protocol."""
return await handle_message(message, mcp_version)
@app.get("/healthz")
async def healthcheck() -> Response:
"""Health check endpoint required by MCP."""
try:
# Check Ollama connection
async with httpx.AsyncClient() as client:
response = await client.get(f"{config.api_url}/api/tags")
response.raise_for_status()
return Response(status_code=200)
except Exception:
raise HTTPException(status_code=503, detail="Ollama service unavailable")
@app.get("/info")
async def info() -> Dict[str, Any]:
"""Return server information according to MCP spec."""
return {
"name": "mcp-deepseek-agent",
"version": "1.0.0",
"protocol_version": "1.0",
"model": config.model_name,
"capabilities": {
"chat": True,
"function_calling": False, # Not implemented yet
"streaming": False # Not implemented yet
},
"tools": [{
"name": "chat",
"description": "Chat with Deepseek model",
"auto_approve": True
}],
"config": {
"temperature": config.temperature,
"top_p": config.top_p,
"context_window": config.context_window
}
}
def main():
"""Entry point for the MCP server."""
port = int(os.getenv("MCP_PORT", "8080"))
host = os.getenv("MCP_HOST", "localhost")
print(f"Starting MCP server with {config.model_name}...")
uvicorn.run(app, host=host, port=port)
if __name__ == "__main__":
main()