We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/0xsaju/mcp-server'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
test_server.pyβ’1.5 KiB
#!/usr/bin/env python3
"""
Test script for the MCP Local LLM Server
"""
import asyncio
import json
import sys
from mcp_server import MCPLocalLLMServer
async def test_server():
"""Test the MCP server functionality"""
print("π§ͺ Testing MCP Local LLM Server...")
server = MCPLocalLLMServer()
# Test list tools
print("\nπ Testing list_tools...")
try:
if server.server.tools_handler:
tools_result = await server.server.tools_handler()
print(f"β Found {len(tools_result.tools)} tools:")
for tool in tools_result.tools:
print(f" - {tool.name}: {tool.description}")
except Exception as e:
print(f"β Error testing tools: {e}")
# Test list resources
print("\nπ Testing list_resources...")
try:
if server.server.resources_handler:
resources_result = await server.server.resources_handler()
print(f"β Found {len(resources_result.resources)} resources:")
for resource in resources_result.resources:
print(f" - {resource.uri}: {resource.name}")
except Exception as e:
print(f"β Error testing resources: {e}")
print("\nπ Basic server tests completed!")
print("\nTo test with actual LLM:")
print("1. Run: python mcp_server.py")
print("2. Send JSON-RPC requests via stdin")
print("3. Or integrate with Claude Desktop using the provided config")
if __name__ == "__main__":
asyncio.run(test_server())