"""Tests for the main Percepta MCP server."""
import asyncio
import json
import logging
import tempfile
from pathlib import Path
from typing import Any, Dict, List, Optional, Sequence
from unittest.mock import AsyncMock, MagicMock, Mock, patch
import pytest
from src.percepta_mcp.config import Settings, AIProviderConfig, BrowserConfig, MCPServerConfig, MonitoringConfig
from src.percepta_mcp.server import PerceptaMCPServer, main
from src.percepta_mcp.ai_router import AIResponse
@pytest.fixture
def mock_settings():
"""Mock settings for testing."""
return Settings(
ai_providers=[
AIProviderConfig(
name="test-openai",
type="openai",
api_key="test-key",
model="gpt-3.5-turbo",
priority=1,
enabled=True
)
],
default_provider="test-openai",
browser=BrowserConfig(
headless=True,
timeout=30000,
viewport_width=1920,
viewport_height=1080,
user_agent="test-agent"
),
mcp_server=MCPServerConfig(),
monitoring=MonitoringConfig(log_level="INFO")
)
@pytest.fixture
async def server(mock_settings):
"""Create a test server instance."""
with patch('src.percepta_mcp.server.get_ai_router') as mock_router:
mock_router.return_value = AsyncMock()
with patch('src.percepta_mcp.server.BrowserAutomation') as mock_browser:
mock_browser.return_value = AsyncMock()
with patch('src.percepta_mcp.server.VisualAnalysis') as mock_visual:
mock_visual.return_value = AsyncMock()
with patch('src.percepta_mcp.server.WebScraper') as mock_scraper:
mock_scraper.return_value = AsyncMock()
server = PerceptaMCPServer(mock_settings)
yield server
# Cleanup
try:
await server.stop()
except Exception:
pass
@pytest.fixture
def mock_ai_response():
"""Mock AI response for testing."""
return AIResponse(
content="Test response",
provider="openai",
model="gpt-3.5-turbo",
tokens_used=30,
cost=0.001,
response_time=1.5,
error=None
)
class TestPerceptaMCPServerInit:
"""Test server initialization."""
def test_init_with_settings(self, mock_settings):
"""Test server initialization with custom settings."""
with patch('src.percepta_mcp.server.get_ai_router') as mock_router:
mock_router.return_value = AsyncMock()
with patch('src.percepta_mcp.server.BrowserAutomation') as mock_browser:
with patch('src.percepta_mcp.server.VisualAnalysis') as mock_visual:
with patch('src.percepta_mcp.server.WebScraper') as mock_scraper:
server = PerceptaMCPServer(mock_settings)
assert server.settings == mock_settings
assert server.server is not None
assert server.ai_router is not None
assert server.browser_automation is not None
assert server.visual_analysis is not None
assert server.web_scraper is not None
# Verify components initialized with settings
mock_router.assert_called_once_with(mock_settings)
mock_browser.assert_called_once_with(mock_settings)
mock_visual.assert_called_once_with(mock_settings)
mock_scraper.assert_called_once_with(mock_settings)
def test_init_with_default_settings(self):
"""Test server initialization with default settings."""
with patch('src.percepta_mcp.server.get_default_settings') as mock_default:
mock_default.return_value = Mock()
with patch('src.percepta_mcp.server.get_ai_router') as mock_router:
mock_router.return_value = AsyncMock()
with patch('src.percepta_mcp.server.BrowserAutomation'):
with patch('src.percepta_mcp.server.VisualAnalysis'):
with patch('src.percepta_mcp.server.WebScraper'):
server = PerceptaMCPServer()
mock_default.assert_called_once()
assert server.settings == mock_default.return_value
class TestToolExecution:
"""Test tool execution functionality."""
@pytest.mark.asyncio
async def test_execute_browser_navigate(self, server):
"""Test browser navigation tool execution."""
server.browser_automation.navigate.return_value = {
"success": True,
"url": "https://example.com"
}
result = await server._execute_tool("browser_navigate", {"url": "https://example.com"})
assert result["success"] is True
assert result["url"] == "https://example.com"
server.browser_automation.navigate.assert_called_once_with(url="https://example.com")
@pytest.mark.asyncio
async def test_execute_browser_click(self, server):
"""Test browser click tool execution."""
server.browser_automation.click.return_value = {"success": True}
result = await server._execute_tool("browser_click", {"selector": "#button"})
assert result["success"] is True
server.browser_automation.click.assert_called_once_with(selector="#button")
@pytest.mark.asyncio
async def test_execute_visual_analyze(self, server):
"""Test visual analysis tool execution."""
server.visual_analysis.analyze_image.return_value = {
"analysis": "This is a test image"
}
result = await server._execute_tool("analyze_image", {
"image_path": "/test/image.jpg",
"prompt": "What do you see?"
})
assert "analysis" in result
server.visual_analysis.analyze_image.assert_called_once()
@pytest.mark.asyncio
async def test_execute_web_scrape(self, server):
"""Test web scraping tool execution."""
server.web_scraper.scrape.return_value = {
"content": "Test content"
}
result = await server._execute_tool("scrape_website", {"url": "https://example.com"})
assert "content" in result
server.web_scraper.scrape.assert_called_once_with(url="https://example.com")
@pytest.mark.asyncio
async def test_execute_ai_analyze(self, server, mock_ai_response):
"""Test AI analysis tool execution."""
server.ai_router.generate.return_value = mock_ai_response
result = await server._execute_tool("ai_analyze", {
"content": "Test content",
"task": "Analyze this"
})
assert result["analysis"] == "Test response"
assert result["provider"] == "openai"
@pytest.mark.asyncio
async def test_execute_unknown_tool(self, server):
"""Test execution of unknown tool."""
with pytest.raises(ValueError, match="Unknown tool"):
await server._execute_tool("unknown_tool", {})
class TestAIHelperMethods:
"""Test AI helper methods."""
@pytest.mark.asyncio
async def test_ai_analyze(self, server, mock_ai_response):
"""Test AI analysis helper method."""
server.ai_router.generate.return_value = mock_ai_response
result = await server._ai_analyze(
content="Test content",
task="Analyze this",
model="gpt-4"
)
assert result["analysis"] == "Test response"
assert result["provider"] == "openai"
assert result["model"] == "gpt-3.5-turbo"
@pytest.mark.asyncio
async def test_ai_generate(self, server, mock_ai_response):
"""Test AI generation helper method."""
server.ai_router.generate.return_value = mock_ai_response
result = await server._ai_generate(
prompt="Generate something",
model="gpt-4"
)
assert result["generated_content"] == "Test response"
assert result["provider"] == "openai"
@pytest.mark.asyncio
async def test_ai_chat(self, server, mock_ai_response):
"""Test AI chat helper method."""
server.ai_router.generate.return_value = mock_ai_response
result = await server._ai_chat(
message="Hello",
conversation_id="test-123",
system_prompt="You are helpful"
)
assert result["reply"] == "Test response"
assert result["conversation_id"] == "test-123"
class TestServerLifecycle:
"""Test server startup and shutdown."""
@pytest.mark.asyncio
async def test_start_server(self, server):
"""Test server startup."""
await server.start()
# Should complete without error
@pytest.mark.asyncio
async def test_stop_server(self, server):
"""Test server shutdown."""
await server.stop()
# Verify cleanup calls
server.browser_automation.close.assert_called_once()
server.web_scraper.close.assert_called_once()
@pytest.mark.asyncio
async def test_stop_server_with_error(self, server):
"""Test server shutdown with cleanup errors."""
server.browser_automation.close.side_effect = Exception("Cleanup error")
# Should not raise exception even if cleanup fails
await server.stop()
server.browser_automation.close.assert_called_once()
server.web_scraper.close.assert_called_once()
def test_get_server(self, server):
"""Test getting the MCP server instance."""
mcp_server = server.get_server()
assert mcp_server == server.server
class TestMainFunction:
"""Test the main entry point function."""
@pytest.mark.asyncio
async def test_main_with_default_config(self):
"""Test main function with default configuration."""
with patch('sys.argv', ['server.py']):
with patch('src.percepta_mcp.server.get_default_settings') as mock_default:
mock_default.return_value = Mock()
with patch('src.percepta_mcp.server.PerceptaMCPServer') as mock_server_class:
mock_server = AsyncMock()
mock_server_class.return_value = mock_server
with patch('src.percepta_mcp.server.stdio_server') as mock_stdio:
# Mock stdio_server context manager
mock_read_stream = AsyncMock()
mock_write_stream = AsyncMock()
mock_stdio.return_value.__aenter__.return_value = (mock_read_stream, mock_write_stream)
# Mock the server run method to exit immediately
mock_server.server.run.side_effect = KeyboardInterrupt()
with pytest.raises(KeyboardInterrupt):
await main()
mock_default.assert_called_once()
mock_server.start.assert_called_once()
mock_server.stop.assert_called_once()
class TestErrorHandling:
"""Test error handling scenarios."""
@pytest.mark.asyncio
async def test_tool_execution_error(self, server):
"""Test error handling during tool execution."""
server.browser_automation.navigate.side_effect = Exception("Browser error")
with pytest.raises(Exception, match="Browser error"):
await server._execute_tool("browser_navigate", {"url": "https://example.com"})
@pytest.mark.asyncio
async def test_concurrent_tool_execution(self, server):
"""Test handling of concurrent tool execution."""
server.browser_automation.navigate.return_value = {"success": True}
server.visual_analysis.analyze_image.return_value = {"analysis": "test"}
# Execute multiple tools concurrently
tasks = [
server._execute_tool("browser_navigate", {"url": "https://test1.com"}),
server._execute_tool("browser_navigate", {"url": "https://test2.com"}),
server._execute_tool("analyze_image", {"image_path": "/test.jpg", "prompt": "test"})
]
results = await asyncio.gather(*tasks)
assert len(results) == 3
assert all(isinstance(result, dict) for result in results)
if __name__ == "__main__":
pytest.main([__file__, "-v"])