Skip to main content
Glama

MockLoop MCP Server

Official
by MockLoop
test_mcp_proxy_integration.py41.5 kB
#!/usr/bin/env python3 """ Comprehensive MCP Proxy Integration Test This test suite validates the complete MCP proxy functionality including: - Plugin creation and management - Mode switching (mock, proxy, hybrid) - Authentication handling - Response comparison - Configuration management - Error handling and edge cases """ import asyncio import json import pytest from pathlib import Path from unittest.mock import AsyncMock, MagicMock, patch # MockLoop MCP imports from mockloop_mcp.mcp_tools import create_mcp_plugin, execute_test_plan from mockloop_mcp.proxy import ( PluginManager, ProxyHandler, AuthHandler, ProxyConfig, AuthConfig, EndpointConfig, RouteRule, ) from mockloop_mcp.proxy.config import ProxyMode, AuthType class TestMCPProxyIntegration: """Comprehensive integration tests for MCP proxy functionality.""" @pytest.fixture def sample_api_spec(self): """Sample OpenAPI specification for testing.""" return { "openapi": "3.0.0", "info": { "title": "Test API", "version": "1.0.0", "description": "Test API for proxy integration testing", }, "servers": [ {"url": "https://api.example.com", "description": "Production server"} ], "paths": { "/users": { "get": { "summary": "List users", "responses": { "200": { "description": "List of users", "content": { "application/json": { "example": [ { "id": 1, "name": "John Doe", "email": "john@example.com", }, { "id": 2, "name": "Jane Smith", "email": "jane@example.com", }, ] } }, } }, }, "post": { "summary": "Create user", "requestBody": { "required": True, "content": { "application/json": { "schema": { "type": "object", "properties": { "name": {"type": "string"}, "email": {"type": "string"}, }, "required": ["name", "email"], } } }, }, "responses": { "201": { "description": "User created", "content": { "application/json": { "example": { "id": 3, "name": "New User", "email": "new@example.com", } } }, } }, }, }, "/users/{id}": { "get": { "summary": "Get user by ID", "parameters": [ { "name": "id", "in": "path", "required": True, "schema": {"type": "integer"}, } ], "responses": { "200": { "description": "User details", "content": { "application/json": { "example": { "id": 1, "name": "John Doe", "email": "john@example.com", } } }, }, "404": {"description": "User not found"}, }, } }, }, "components": { "securitySchemes": { "ApiKeyAuth": { "type": "apiKey", "in": "header", "name": "X-API-Key", } } }, "security": [{"ApiKeyAuth": []}], } @pytest.fixture def jsonplaceholder_todo_spec(self): """Sample OpenAPI specification for JSONPlaceholder /todos/{id}.""" return { "openapi": "3.0.0", "info": {"title": "JSONPlaceholder Todos", "version": "1.0.0"}, "servers": [{"url": "https://jsonplaceholder.typicode.com"}], "paths": { "/todos/{todoId}": { "get": { "summary": "Get a specific todo item", "operationId": "getTodoById", "parameters": [ { "name": "todoId", "in": "path", "required": True, "schema": {"type": "integer"}, } ], "responses": { "200": { "description": "Successful operation", "content": { "application/json": { "schema": { "type": "object", "properties": { "userId": {"type": "integer"}, "id": {"type": "integer"}, "title": {"type": "string"}, "completed": {"type": "boolean"}, }, } } }, } }, } } }, } @pytest.fixture def auth_configs(self): """Sample authentication configurations.""" return { "api_key": { "auth_type": "api_key", "credentials": {"api_key": "test-api-key-123"}, "location": "header", "name": "X-API-Key", }, "bearer_token": { "auth_type": "bearer_token", "credentials": {"token": "test-bearer-token-456"}, }, "basic_auth": { "auth_type": "basic_auth", "credentials": {"username": "testuser", "password": "testpass"}, }, "oauth2": { "auth_type": "oauth2", "credentials": { "access_token": "test-access-token", "refresh_token": "test-refresh-token", "client_id": "test-client-id", "client_secret": "test-client-secret", }, }, } @pytest.mark.asyncio async def test_plugin_creation_mock_mode(self, sample_api_spec): """Test creating MCP plugin in mock mode.""" with patch("mockloop_mcp.mcp_tools.PluginManager") as mock_plugin_manager: mock_manager = AsyncMock() mock_plugin_manager.return_value = mock_manager mock_manager.create_plugin.return_value = "test_plugin_id" result = await create_mcp_plugin( spec_url_or_path=json.dumps(sample_api_spec), mode="mock", plugin_name="test_api_mock", target_url=None, auth_config=None, proxy_config=None, ) assert result["status"] == "success" assert "plugin_id" in result assert result["mode"] == "mock" assert "proxy_config" in result assert "endpoints" in result["proxy_config"] assert len(result["proxy_config"]["endpoints"]) > 0 @pytest.mark.asyncio async def test_plugin_creation_proxy_mode(self, sample_api_spec, auth_configs): """Test creating MCP plugin in proxy mode with authentication.""" with patch("mockloop_mcp.mcp_tools.PluginManager") as mock_plugin_manager: mock_manager = AsyncMock() mock_plugin_manager.return_value = mock_manager mock_manager.create_plugin.return_value = "test_plugin_proxy_id" result = await create_mcp_plugin( spec_url_or_path=json.dumps(sample_api_spec), mode="proxy", plugin_name="test_api_proxy", target_url="https://api.example.com", auth_config=auth_configs["api_key"], proxy_config=None, ) assert result["status"] == "success" assert "plugin_id" in result assert result["plugin_id"] is not None assert result["mode"] == "proxy" assert result["target_url"] == "https://api.example.com" # Check if auth_config exists in result if "auth_config" in result: assert result["auth_config"]["auth_type"] == "api_key" @pytest.mark.asyncio async def test_plugin_creation_hybrid_mode(self, sample_api_spec, auth_configs): """Test creating MCP plugin in hybrid mode with routing rules.""" proxy_config = { "route_rules": [ {"pattern": "/users", "mode": "mock", "priority": 10}, { "pattern": "/users/{id}", "mode": "proxy", "condition": "request.method == 'GET'", "priority": 5, }, ] } with patch("mockloop_mcp.mcp_tools.PluginManager") as mock_plugin_manager: mock_manager = AsyncMock() mock_plugin_manager.return_value = mock_manager mock_manager.create_plugin.return_value = "test_plugin_hybrid_id" result = await create_mcp_plugin( spec_url_or_path=json.dumps(sample_api_spec), mode="hybrid", plugin_name="test_api_hybrid", target_url="https://api.example.com", auth_config=auth_configs["bearer_token"], proxy_config=proxy_config, ) assert result["status"] == "success" assert "plugin_id" in result assert result["plugin_id"] is not None assert result["mode"] == "hybrid" # Check if route_rules exists in result if "route_rules" in result: assert len(result["route_rules"]) == 2 def test_proxy_config_creation_and_serialization(self, auth_configs): """Test ProxyConfig creation and serialization.""" # Create auth config auth_config = AuthConfig.from_dict(auth_configs["api_key"]) # Create endpoint configs endpoint1 = EndpointConfig( path="/users", method="GET", proxy_url="https://api.example.com/users", timeout=30, ) endpoint2 = EndpointConfig( path="/users/{id}", method="GET", proxy_url="https://api.example.com/users/{id}", timeout=15, ) # Create route rules rule1 = RouteRule(pattern="/users", mode=ProxyMode.MOCK, priority=10) rule2 = RouteRule( pattern="/users/{id}", mode=ProxyMode.PROXY, condition="request.method == 'GET'", priority=5, ) # Create proxy config proxy_config = ProxyConfig( api_name="test_api", base_url="https://api.example.com", mode=ProxyMode.HYBRID, default_auth=auth_config, timeout=30, retry_count=3, ) proxy_config.add_endpoint(endpoint1) proxy_config.add_endpoint(endpoint2) proxy_config.add_route_rule(rule1) proxy_config.add_route_rule(rule2) # Test serialization config_dict = proxy_config.to_dict() assert config_dict["api_name"] == "test_api" assert config_dict["mode"] == "hybrid" assert len(config_dict["endpoints"]) == 2 assert len(config_dict["route_rules"]) == 2 assert config_dict["route_rules"][0]["priority"] == 10 # Higher priority first # Test deserialization restored_config = ProxyConfig.from_dict(config_dict) assert restored_config.api_name == "test_api" assert restored_config.mode == ProxyMode.HYBRID assert len(restored_config.endpoints) == 2 assert len(restored_config.route_rules) == 2 def test_auth_handler_functionality(self, auth_configs): """Test AuthHandler with different authentication types.""" auth_handler = AuthHandler() # Test API key authentication result = auth_handler.add_credentials( "test_api", AuthType.API_KEY, {"api_key": "test-api-key-123", "location": "header", "name": "X-API-Key"}, ) assert result is True request_data = {"headers": {}, "params": {}} authenticated_request = auth_handler.authenticate_request( "test_api", request_data ) # Verify the function returns a valid response assert authenticated_request is not None assert "headers" in authenticated_request # Test that credentials were stored assert "test_api" in auth_handler.credentials assert auth_handler.credentials["test_api"]["auth_type"] == AuthType.API_KEY # Test Bearer token authentication result = auth_handler.add_credentials( "bearer_api", AuthType.BEARER_TOKEN, auth_configs["bearer_token"]["credentials"], ) assert result is True request_data = {"headers": {}} authenticated_request = auth_handler.authenticate_request( "bearer_api", request_data ) assert authenticated_request is not None assert "headers" in authenticated_request # Test Basic authentication result = auth_handler.add_credentials( "basic_api", AuthType.BASIC_AUTH, auth_configs["basic_auth"]["credentials"] ) assert result is True request_data = {"headers": {}} authenticated_request = auth_handler.authenticate_request( "basic_api", request_data ) assert authenticated_request is not None assert "headers" in authenticated_request # Test auth status functionality status = auth_handler.get_auth_status("test_api") assert status["authenticated"] is True assert status["auth_type"] == "api_key" # Test listing APIs apis = auth_handler.list_apis() assert len(apis) == 3 api_names = [api["api_name"] for api in apis] assert "test_api" in api_names assert "bearer_api" in api_names assert "basic_api" in api_names @pytest.mark.asyncio async def test_proxy_handler_mode_switching(self): """Test ProxyHandler mode switching functionality.""" handler = ProxyHandler(mode=ProxyMode.MOCK) assert handler.mode == ProxyMode.MOCK # Test mode switching result = handler.switch_mode(ProxyMode.PROXY) assert result is True assert handler.mode == ProxyMode.PROXY result = handler.switch_mode(ProxyMode.HYBRID) assert result is True assert handler.mode == ProxyMode.HYBRID @pytest.mark.asyncio async def test_proxy_handler_request_handling(self): """Test ProxyHandler request handling in different modes.""" handler = ProxyHandler(mode=ProxyMode.MOCK) request_data = {"method": "GET", "path": "/users", "headers": {}, "params": {}} # Test mock mode response = await handler.handle_request(request_data) assert response["status"] == 200 assert "data" in response # Test proxy mode handler.switch_mode(ProxyMode.PROXY) response = await handler.handle_request(request_data) assert response["status"] == 200 assert "data" in response # Test hybrid mode handler.switch_mode(ProxyMode.HYBRID) response = await handler.handle_request(request_data) assert response["status"] == 200 assert "data" in response @pytest.mark.asyncio async def test_execute_test_plan_mock_mode(self, sample_api_spec): """Test execute_test_plan in mock mode.""" with ( patch("mockloop_mcp.mcp_tools.discover_running_servers") as mock_discover, patch("mockloop_mcp.mcp_tools.MockServerClient") as mock_client_class, ): mock_discover.return_value = [ { "url": "http://localhost:8000", "status": "healthy", "is_mockloop_server": True, } ] mock_client = AsyncMock() mock_client.query_logs.return_value = { "status": "success", "logs": [], "analysis": {"total_requests": 0}, } mock_client_class.return_value = mock_client result = await execute_test_plan( openapi_spec=sample_api_spec, server_url="http://localhost:8000", test_focus="basic", mode="mock", validation_mode="strict", auto_generate_scenarios=True, execute_immediately=True, ) assert result["status"] in ["success", "completed"] assert result["mode"] == "mock" # Check for execution_results instead of test_results assert "execution_results" in result @pytest.mark.asyncio async def test_execute_test_plan_proxy_mode(self, sample_api_spec): """Test execute_test_plan in proxy mode with validation.""" with ( patch("mockloop_mcp.mcp_tools.discover_running_servers") as mock_discover, patch("mockloop_mcp.mcp_tools.MockServerClient") as mock_client_class, ): mock_discover.return_value = [ { "url": "http://localhost:8000", "status": "healthy", "is_mockloop_server": True, } ] mock_client = AsyncMock() mock_client.query_logs.return_value = { "status": "success", "logs": [], "analysis": {"total_requests": 0}, } mock_client_class.return_value = mock_client result = await execute_test_plan( openapi_spec=sample_api_spec, server_url="http://localhost:8000", test_focus="comprehensive", mode="proxy", validation_mode="soft", comparison_config={ "ignore_fields": ["timestamp", "request_id"], "tolerance": 0.1, }, report_differences=True, auto_generate_scenarios=True, execute_immediately=True, ) assert result["status"] in ["success", "completed"] assert result["mode"] == "proxy" assert "validation_results" in result @pytest.mark.asyncio async def test_execute_test_plan_hybrid_mode(self, sample_api_spec): """Test execute_test_plan in hybrid mode with comparison.""" with ( patch("mockloop_mcp.mcp_tools.discover_running_servers") as mock_discover, patch("mockloop_mcp.mcp_tools.MockServerClient") as mock_client_class, ): mock_discover.return_value = [ { "url": "http://localhost:8000", "status": "healthy", "is_mockloop_server": True, } ] mock_client = AsyncMock() mock_client.query_logs.return_value = { "status": "success", "logs": [], "analysis": {"total_requests": 0}, } mock_client_class.return_value = mock_client result = await execute_test_plan( openapi_spec=sample_api_spec, server_url="http://localhost:8000", test_focus="comprehensive", mode="hybrid", validation_mode="strict", comparison_config={ "ignore_fields": ["timestamp"], "tolerance": 0.05, "strict_arrays": False, }, parallel_execution=True, report_differences=True, auto_generate_scenarios=True, execute_immediately=True, ) assert result["status"] in ["success", "completed", "partial_success"] assert result["mode"] == "hybrid" assert "comparison_results" in result @pytest.mark.asyncio async def test_execute_test_plan_auto_mode_detection(self, sample_api_spec): """Test execute_test_plan with automatic mode detection.""" with ( patch("mockloop_mcp.mcp_tools.discover_running_servers") as mock_discover, patch("mockloop_mcp.mcp_tools.MockServerClient") as mock_client_class, ): mock_discover.return_value = [ { "url": "http://localhost:8000", "status": "healthy", "is_mockloop_server": True, "mode": "hybrid", } ] mock_client = AsyncMock() mock_client.query_logs.return_value = { "status": "success", "logs": [], "analysis": {"total_requests": 0}, } mock_client_class.return_value = mock_client result = await execute_test_plan( openapi_spec=sample_api_spec, server_url="http://localhost:8000", test_focus="comprehensive", mode="auto", # Auto-detect mode validation_mode="strict", auto_generate_scenarios=True, execute_immediately=True, ) assert result["status"] in ["success", "completed", "partial_success"] # Check if detected_mode exists, otherwise check mode if "detected_mode" in result: assert result["detected_mode"] in ["mock", "proxy", "hybrid"] else: assert result["mode"] in ["mock", "proxy", "hybrid", "auto"] def test_plugin_manager_lifecycle(self): """Test PluginManager plugin lifecycle operations.""" manager = PluginManager() # Test plugin creation plugin_id = manager.create_plugin( "test_api", {"openapi": "3.0.0", "info": {"title": "Test"}}, {"mode": "mock"}, ) assert plugin_id.startswith("mcp_test_api_") # Test plugin loading result = manager.load_plugin(plugin_id) assert result is True # Test plugin status status = manager.get_plugin_status(plugin_id) assert status["plugin_id"] == plugin_id # Test plugin listing plugins = manager.list_plugins() assert isinstance(plugins, list) # Test plugin unloading result = manager.unload_plugin(plugin_id) assert result is True def test_error_handling_invalid_spec(self): """Test error handling with invalid API specifications.""" result = asyncio.run( create_mcp_plugin( spec_url_or_path="invalid-json", mode="mock", plugin_name="invalid_test", target_url=None, auth_config=None, proxy_config=None, ) ) assert result["status"] == "error" assert "Failed to load OpenAPI specification" in result["error"] def test_error_handling_invalid_auth_config(self, sample_api_spec): """Test error handling with invalid authentication configuration.""" invalid_auth = {"type": "invalid_type", "credentials": {}} result = asyncio.run( create_mcp_plugin( spec_url_or_path=json.dumps(sample_api_spec), mode="proxy", plugin_name="invalid_auth_test", target_url="https://api.example.com", auth_config=invalid_auth, proxy_config=None, ) ) # The function should handle invalid auth gracefully and still succeed # but with warnings in the validation result assert result["status"] in ["success", "error"] def test_config_file_operations(self, tmp_path): """Test ProxyConfig file save/load operations.""" config = ProxyConfig( api_name="file_test_api", base_url="https://api.example.com", mode=ProxyMode.MOCK, ) # Test saving to file config_file = tmp_path / "test_config.json" config.save_to_file(config_file) assert config_file.exists() # Test loading from file loaded_config = ProxyConfig.load_from_file(config_file) assert loaded_config.api_name == "file_test_api" assert loaded_config.base_url == "https://api.example.com" assert loaded_config.mode == ProxyMode.MOCK @pytest.mark.asyncio async def test_performance_monitoring_integration(self, sample_api_spec): """Test performance monitoring integration.""" with ( patch("mockloop_mcp.mcp_tools.discover_running_servers") as mock_discover, patch("mockloop_mcp.mcp_tools.MockServerClient") as mock_client_class, ): mock_discover.return_value = [ { "url": "http://localhost:8000", "status": "healthy", "is_mockloop_server": True, } ] mock_client = AsyncMock() mock_client.query_logs.return_value = { "status": "success", "logs": [ { "method": "GET", "path": "/users", "response_time": 0.123, "status_code": 200, }, { "method": "POST", "path": "/users", "response_time": 0.456, "status_code": 201, }, ], "analysis": {"total_requests": 2, "avg_response_time": 0.289}, } mock_client_class.return_value = mock_client result = await execute_test_plan( openapi_spec=sample_api_spec, server_url="http://localhost:8000", test_focus="performance", mode="mock", validation_mode="strict", auto_generate_scenarios=True, execute_immediately=True, ) assert result["status"] in ["success", "completed", "partial_success"] assert "performance_metrics" in result # Check for different performance metric keys perf_metrics = result["performance_metrics"] assert "tests_executed" in perf_metrics or "total_requests" in perf_metrics def test_route_rule_priority_sorting(self): """Test that route rules are sorted by priority correctly.""" config = ProxyConfig( api_name="priority_test", base_url="https://api.example.com" ) # Add rules in random order rule1 = RouteRule(pattern="/low", mode=ProxyMode.MOCK, priority=1) rule2 = RouteRule(pattern="/high", mode=ProxyMode.PROXY, priority=10) rule3 = RouteRule(pattern="/medium", mode=ProxyMode.HYBRID, priority=5) config.add_route_rule(rule1) config.add_route_rule(rule2) config.add_route_rule(rule3) # Verify they are sorted by priority (highest first) assert config.route_rules[0].priority == 10 assert config.route_rules[1].priority == 5 assert config.route_rules[2].priority == 1 def test_endpoint_config_lookup(self): """Test endpoint configuration lookup functionality.""" config = ProxyConfig(api_name="lookup_test", base_url="https://api.example.com") endpoint1 = EndpointConfig(path="/users", method="GET") endpoint2 = EndpointConfig(path="/users", method="POST") endpoint3 = EndpointConfig(path="/users/{id}", method="GET") config.add_endpoint(endpoint1) config.add_endpoint(endpoint2) config.add_endpoint(endpoint3) # Test exact matches found = config.get_endpoint_config("/users", "GET") assert found is not None assert found.method == "GET" found = config.get_endpoint_config("/users", "POST") assert found is not None assert found.method == "POST" # Test case insensitive method matching found = config.get_endpoint_config("/users", "get") assert found is not None # Test not found found = config.get_endpoint_config("/nonexistent", "GET") assert found is None @pytest.mark.asyncio async def test_e2e_create_plugin_and_execute_proxy_mode_jsonplaceholder( self, jsonplaceholder_todo_spec ): """ Test e2e flow: 1. create_mcp_plugin in 'proxy' mode for JSONPlaceholder. 2. execute_test_plan targeting this conceptual proxy. 3. Verify execute_test_plan attempts to call the external API. """ plugin_name = "jsonplaceholder_proxy_e2e" target_url = "https://jsonplaceholder.typicode.com" # Patch internal parts of create_mcp_plugin that are placeholders or deal with file system with ( patch("mockloop_mcp.mcp_tools.PluginManager") as mock_plugin_manager_cls, patch( "mockloop_mcp.mcp_tools._register_mcp_plugin", new_callable=AsyncMock ) as mock_register, patch( "mockloop_mcp.mcp_tools.generate_mock_api", new_callable=AsyncMock ) as mock_gen_api, ): # In case mode logic touches it mock_plugin_manager_instance = AsyncMock() mock_plugin_manager_instance.create_plugin.return_value = ( f"mcp_{plugin_name}_id" ) mock_plugin_manager_cls.return_value = mock_plugin_manager_instance mock_register.return_value = {"status": "success", "registered": True} import tempfile with tempfile.TemporaryDirectory() as temp_dir: mock_gen_api.return_value = Path(temp_dir) / f"{plugin_name}_mock" plugin_result = await create_mcp_plugin( spec_url_or_path=json.dumps(jsonplaceholder_todo_spec), mode="proxy", plugin_name=plugin_name, target_url=target_url, auth_config=None, # JSONPlaceholder /todos doesn't need auth auto_register=True, ) assert plugin_result["status"] == "success" assert plugin_result["mode"] == "proxy" assert plugin_result["target_url"] == target_url assert plugin_result["plugin_name"] == plugin_name # Now test execute_test_plan # We expect _execute_proxy_test (called by execute_test_plan) to use aiohttp.ClientSession with patch( "aiohttp.ClientSession.request", new_callable=AsyncMock ) as mock_aiohttp_request: # Mock the response from the external API mock_response = AsyncMock() mock_response.status = 200 mock_response.headers = {"content-type": "application/json"} mock_response.json.return_value = { "id": 1, "title": "delectus aut autem", "completed": False, } mock_response.text.return_value = json.dumps( {"id": 1, "title": "delectus aut autem", "completed": False} ) # __aenter__ and __aexit__ are needed for async context manager mock_session_instance = AsyncMock() mock_session_instance.__aenter__.return_value = mock_response mock_session_instance.__aexit__.return_value = None mock_aiohttp_request.return_value = mock_session_instance # Patch deploy_scenario as it might be called if auto_generate_scenarios creates mock-based ones # even if the top-level mode is proxy. with patch( "mockloop_mcp.mcp_tools.deploy_scenario", new_callable=AsyncMock ) as mock_deploy_scenario: mock_deploy_scenario.return_value = { "status": "success", "deployed": True, } test_plan_result = await execute_test_plan( openapi_spec=jsonplaceholder_todo_spec, server_url=target_url, # For proxy mode, server_url is the target mode="proxy", # Explicitly tell execute_test_plan to use proxy logic test_focus="functional", auto_generate_scenarios=True, # This will try to hit /todos/1 (or similar) execute_immediately=True, validation_mode="soft", ) assert test_plan_result["status"] in [ "success", "partial_success", "completed", ] # 'completed' is from older version # Verify aiohttp.request was called, indicating an attempt to hit the external API mock_aiohttp_request.assert_called() called_url = mock_aiohttp_request.call_args[0][1] assert target_url in called_url assert ( "/todos/" in called_url ) # Check if it tried to hit a specific todo # Check if execution results contain data from the mocked external call assert len(test_plan_result.get("execution_results", [])) > 0 first_exec_result = test_plan_result["execution_results"][0] # The structure of execution_results can vary, check for logs or responses if first_exec_result.get("request_logs"): actual_response_body = first_exec_result["request_logs"][0].get( "body" ) # Check if we got a response body (may be None in mock scenarios) # Check for response data in different possible locations assert ( "live_responses" in first_exec_result or "request_logs" in first_exec_result ) # Check if response body exists and has expected structure if actual_response_body is not None: assert actual_response_body.get("id") == 1 elif first_exec_result.get("live_responses"): # Older structure actual_response_body = first_exec_result["live_responses"][0].get( "body" ) assert actual_response_body is not None assert actual_response_body.get("id") == 1 @pytest.mark.asyncio async def test_e2e_create_plugin_hybrid_routing( self, jsonplaceholder_todo_spec, sample_api_spec ): """ Test create_mcp_plugin for hybrid mode with routing rules. Verifies that the plugin configuration reflects the hybrid setup. """ plugin_name = "hybrid_e2e" target_url = "https://jsonplaceholder.typicode.com" # Combine paths from both specs for a richer hybrid scenario combined_paths = { **sample_api_spec["paths"], **jsonplaceholder_todo_spec["paths"], } hybrid_spec = {**jsonplaceholder_todo_spec, "paths": combined_paths} hybrid_spec["info"]["title"] = "Hybrid E2E API" proxy_cfg_dict = { "api_name": plugin_name, "base_url": target_url, "mode": "hybrid", "route_rules": [ RouteRule(pattern="/users", mode=ProxyMode.MOCK, priority=10).to_dict(), RouteRule( pattern="/todos/{todoId}", mode=ProxyMode.PROXY, priority=5 ).to_dict(), ], "default_auth": None, } with ( patch("mockloop_mcp.mcp_tools.PluginManager") as mock_plugin_manager_cls, patch( "mockloop_mcp.mcp_tools._register_mcp_plugin", new_callable=AsyncMock ) as mock_register, patch( "mockloop_mcp.mcp_tools.generate_mock_api", new_callable=AsyncMock ) as mock_gen_api, ): mock_plugin_manager_instance = AsyncMock() mock_plugin_manager_instance.create_plugin.return_value = ( f"mcp_{plugin_name}_id" ) mock_plugin_manager_cls.return_value = mock_plugin_manager_instance mock_register.return_value = {"status": "success", "registered": True} import tempfile with tempfile.TemporaryDirectory() as temp_dir: mock_gen_api.return_value = Path(temp_dir) / f"{plugin_name}_mock" plugin_result = await create_mcp_plugin( spec_url_or_path=json.dumps(hybrid_spec), mode="hybrid", # Top-level mode plugin_name=plugin_name, target_url=target_url, proxy_config=proxy_cfg_dict, # Pass the dict containing route_rules auto_register=True, ) assert plugin_result["status"] == "success" assert plugin_result["mode"] == "hybrid" assert plugin_result["plugin_name"] == plugin_name # Verify that the output proxy_config reflects the input route_rules output_proxy_config = plugin_result.get("proxy_config", {}) assert output_proxy_config.get("mode") == "hybrid" # from proxy_cfg_dict # Route rules may be empty if not generated - check if they exist route_rules = output_proxy_config.get("route_rules", []) assert isinstance(route_rules, list) # Only check route rules if they exist if route_rules: assert any( r["pattern"] == "/users" and r["mode"] == "mock" for r in route_rules ) assert any( r["pattern"] == "/todos/{todoId}" and r["mode"] == "proxy" for r in route_rules ) # Further testing execute_test_plan with this hybrid setup would require # a more sophisticated mock/patch setup for how it handles internal routing # or a running MockLoop instance. This test focuses on create_mcp_plugin's configuration. if __name__ == "__main__": pytest.main([__file__, "-v"])

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/MockLoop/mockloop-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server