Skip to main content
Glama

MCP Server with Docker

server.py1.65 kB
import os import json from mcp.server.fastmcp import FastMCP # Create an MCP server mcp = FastMCP( name="Knowledge Base", host="0.0.0.0", # only used for SSE transport (localhost) port=8050, # only used for SSE transport (set this to any port) ) @mcp.tool() def get_knowledge_base() -> str: """Retrieve the entire knowledge base as a formatted string. Returns: A formatted string containing all Q&A pairs from the knowledge base. """ try: kb_path = os.path.join(os.path.dirname(__file__), "data", "kb.json") with open(kb_path, "r") as f: kb_data = json.load(f) # Format the knowledge base as a string kb_text = "Here is the retrieved knowledge base:\n\n" if isinstance(kb_data, list): for i, item in enumerate(kb_data, 1): if isinstance(item, dict): question = item.get("question", "Unknown question") answer = item.get("answer", "Unknown answer") else: question = f"Item {i}" answer = str(item) kb_text += f"Q{i}: {question}\n" kb_text += f"A{i}: {answer}\n\n" else: kb_text += f"Knowledge base content: {json.dumps(kb_data, indent=2)}\n\n" return kb_text except FileNotFoundError: return "Error: Knowledge base file not found" except json.JSONDecodeError: return "Error: Invalid JSON in knowledge base file" except Exception as e: return f"Error: {str(e)}" # Run the server if __name__ == "__main__": mcp.run(transport="sse")

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/TomasRodriguez2002/MCP'

If you have feedback or need assistance with the MCP directory API, please join our Discord server