#!/usr/bin/env python3
"""
Test script for FastMCP server
"""
import asyncio
import json
import sys
from io import StringIO
async def test_mcp_server():
"""Test the MCP server functionality"""
print("๐งช Testing FastMCP Server...")
try:
# Import the MCP server
from mcp_server import mcp
from config import MCPConfig
print(f"โ Server Name: {MCPConfig.SERVER_NAME}")
print(f"โ Server Version: {MCPConfig.SERVER_VERSION}")
print(f"โ LLM Service URL: {MCPConfig.LOCAL_LLM_SERVICE_URL}")
# Test tool listing
tools = await mcp.list_tools()()
print(f"โ Available Tools: {len(tools)}")
for tool in tools:
print(f" - {tool.name}: {tool.description}")
# Test prompt listing
prompts = await mcp.list_prompts()()
print(f"โ Available Prompts: {len(prompts)}")
for prompt in prompts:
print(f" - {prompt.name}: {prompt.description}")
# Test resource listing
resources = await mcp.list_resources()()
print(f"โ Available Resources: {len(resources)}")
for resource in resources:
print(f" - {resource.name}: {resource.description}")
print("\n๐ FastMCP Server test completed successfully!")
print("\n๐ Next steps:")
print("1. Start your local LLM service on port 5001")
print("2. Run the MCP server: python run_server.py")
print("3. Connect with an MCP client (Claude Desktop, MCP Inspector)")
except ImportError as e:
print(f"โ Import error: {e}")
print("Make sure to install dependencies: pip install -r requirements.txt")
sys.exit(1)
except Exception as e:
print(f"โ Test failed: {e}")
sys.exit(1)
if __name__ == "__main__":
asyncio.run(test_mcp_server())
MCP directory API
We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/Dev00355/custom-mcp'
If you have feedback or need assistance with the MCP directory API, please join our Discord server