Skip to main content
Glama

Crawl4AI MCP Server

test_mcp_tools.py•6.65 kB
#!/usr/bin/env python3 """ MCP Tools Functional Testing Script This script tests all tools in the Crawl4AI MCP server to validate they work correctly through the MCP protocol. """ import asyncio import json import sys from fastmcp import Client from crawl4ai_mcp_server import mcp async def test_server_status(): """Test the server_status tool.""" print("šŸ” Testing server_status tool...") try: async with Client(mcp) as client: result = await client.call_tool("server_status", {}) if result and hasattr(result, 'content') and result.content: # Handle both list and single content formats content = result.content[0] if isinstance(result.content, list) else result.content data = json.loads(content.text) print(f"āœ… server_status: {data.get('status', 'unknown')}") print(f" Server: {data.get('server_name', 'unknown')}") print(f" Capabilities: {len(data.get('capabilities', []))}") return True else: print("āŒ server_status: No result returned") return False except Exception as e: print(f"āŒ server_status: {str(e)}") return False async def test_get_page_structure(): """Test the get_page_structure tool.""" print("\nšŸ” Testing get_page_structure tool...") try: async with Client(mcp) as client: # Test with a simple, reliable webpage test_url = "https://httpbin.org/html" result = await client.call_tool("get_page_structure", { "url": test_url, "format": "html" }) if result and hasattr(result, 'content') and result.content: content_item = result.content[0] if isinstance(result.content, list) else result.content content = content_item.text if "html" in content.lower() and len(content) > 100: print(f"āœ… get_page_structure: Retrieved {len(content)} characters") print(f" URL: {test_url}") print(" Format: HTML") return True else: print("āŒ get_page_structure: Invalid content received") return False else: print("āŒ get_page_structure: No result returned") return False except Exception as e: print(f"āŒ get_page_structure: {str(e)}") return False async def test_crawl_with_schema(): """Test the crawl_with_schema tool.""" print("\nšŸ” Testing crawl_with_schema tool...") try: async with Client(mcp) as client: # Test with a simple schema test_url = "https://httpbin.org/html" test_schema = json.dumps({ "title": "h1", "body": "p" }) result = await client.call_tool("crawl_with_schema", { "url": test_url, "extraction_schema": test_schema }) if result and hasattr(result, 'content') and result.content: content_item = result.content[0] if isinstance(result.content, list) else result.content response = json.loads(content_item.text) # Check for either success=True OR extracted_data field (empty data is valid) if response.get("success") or "extracted_data" in response: print("āœ… crawl_with_schema: Schema extraction completed") print(f" URL: {test_url}") print(f" Schema fields: {list(json.loads(test_schema).keys())}") extracted = response.get('extracted_data', {}) if extracted: print(f" Extracted data: {extracted}") else: print(f" Result: {response.get('message', 'No data matched schema')}") return True else: print("āŒ crawl_with_schema: Extraction failed") return False else: print("āŒ crawl_with_schema: No result returned") return False except Exception as e: print(f"āŒ crawl_with_schema: {str(e)}") return False async def test_take_screenshot(): """Test the take_screenshot tool.""" print("\nšŸ” Testing take_screenshot tool...") try: async with Client(mcp) as client: # Test with a simple webpage test_url = "https://httpbin.org/html" result = await client.call_tool("take_screenshot", { "url": test_url }) if result and hasattr(result, 'content') and result.content: content_item = result.content[0] if isinstance(result.content, list) else result.content response = json.loads(content_item.text) if response.get("success") and "screenshot_data" in response: screenshot_size = len(response.get("screenshot_data", "")) print("āœ… take_screenshot: Screenshot captured successfully") print(f" URL: {test_url}") print(f" Format: {response.get('format', 'unknown')}") print(f" Data size: {screenshot_size} characters") return True else: print("āŒ take_screenshot: Screenshot capture failed") return False else: print("āŒ take_screenshot: No result returned") return False except Exception as e: print(f"āŒ take_screenshot: {str(e)}") return False async def main(): """Run all tool tests.""" print("šŸš€ Starting Crawl4AI MCP Server Tool Testing") print("=" * 50) tests = [ test_server_status, test_get_page_structure, test_crawl_with_schema, test_take_screenshot ] passed = 0 total = len(tests) for test in tests: success = await test() if success: passed += 1 print("\n" + "=" * 50) print(f"šŸŽÆ Test Results: {passed}/{total} tests passed") if passed == total: print("āœ… All MCP tools are working correctly!") return True else: print("āŒ Some MCP tools have issues that need attention") return False if __name__ == "__main__": success = asyncio.run(main()) sys.exit(0 if success else 1)

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Nexus-Digital-Automations/crawl4ai-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server