Skip to main content
Glama

FastMCP

by Dev00355
test_mcp.pyโ€ข1.91 kB
#!/usr/bin/env python3 """ Test script for FastMCP server """ import asyncio import json import sys from io import StringIO async def test_mcp_server(): """Test the MCP server functionality""" print("๐Ÿงช Testing FastMCP Server...") try: # Import the MCP server from mcp_server import mcp from config import MCPConfig print(f"โœ… Server Name: {MCPConfig.SERVER_NAME}") print(f"โœ… Server Version: {MCPConfig.SERVER_VERSION}") print(f"โœ… LLM Service URL: {MCPConfig.LOCAL_LLM_SERVICE_URL}") # Test tool listing tools = await mcp.list_tools()() print(f"โœ… Available Tools: {len(tools)}") for tool in tools: print(f" - {tool.name}: {tool.description}") # Test prompt listing prompts = await mcp.list_prompts()() print(f"โœ… Available Prompts: {len(prompts)}") for prompt in prompts: print(f" - {prompt.name}: {prompt.description}") # Test resource listing resources = await mcp.list_resources()() print(f"โœ… Available Resources: {len(resources)}") for resource in resources: print(f" - {resource.name}: {resource.description}") print("\n๐ŸŽ‰ FastMCP Server test completed successfully!") print("\n๐Ÿ“‹ Next steps:") print("1. Start your local LLM service on port 5001") print("2. Run the MCP server: python run_server.py") print("3. Connect with an MCP client (Claude Desktop, MCP Inspector)") except ImportError as e: print(f"โŒ Import error: {e}") print("Make sure to install dependencies: pip install -r requirements.txt") sys.exit(1) except Exception as e: print(f"โŒ Test failed: {e}") sys.exit(1) if __name__ == "__main__": asyncio.run(test_mcp_server())

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Dev00355/custom-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server