"""
Tests for the test generator module.
"""
import asyncio
import json
import pytest
from pathlib import Path
from unittest.mock import AsyncMock, Mock, patch, MagicMock
from typing import Any
from src.percepta_mcp.tools.test_generator import AutomatedTestGenerator
from src.percepta_mcp.config import Settings
from src.percepta_mcp.ai_router import AIResponse
@pytest.fixture
def mock_settings():
"""Create mock settings."""
return Mock(spec=Settings)
@pytest.fixture
def mock_ai_router():
"""Create mock AI router."""
router = AsyncMock()
router.generate = AsyncMock()
return router
@pytest.fixture
def mock_browser_automation():
"""Create mock browser automation."""
browser = AsyncMock()
browser.navigate = AsyncMock()
browser.get_page_info = AsyncMock()
browser.extract_text = AsyncMock()
return browser
@pytest.fixture
def test_generator(mock_settings, mock_ai_router, mock_browser_automation):
"""Create AutomatedTestGenerator instance."""
return AutomatedTestGenerator(mock_settings, mock_ai_router, mock_browser_automation)
class TestTestGenerator:
"""Test cases for AutomatedTestGenerator class."""
@pytest.mark.asyncio
async def test_init(self, mock_settings):
"""Test AutomatedTestGenerator initialization."""
generator = AutomatedTestGenerator(mock_settings)
assert generator.settings == mock_settings
assert generator.ai_router is None
assert generator.browser_automation is not None
assert generator.test_results_dir.exists()
@pytest.mark.asyncio
async def test_generate_test_case_success(self, test_generator, mock_ai_router, mock_browser_automation):
"""Test successful test case generation."""
# Setup mocks
mock_browser_automation.navigate.return_value = {"success": True}
mock_browser_automation.get_page_info.return_value = {
"title": "Test Page",
"url": "https://example.com"
}
mock_browser_automation.extract_text.return_value = "Sample page content"
mock_ai_response = AIResponse(
content="""import pytest
from playwright.async_api import async_playwright
async def test_navigation():
async with async_playwright() as p:
browser = await p.chromium.launch()
page = await browser.new_page()
await page.goto("https://example.com")
assert page.title() == "Test Page"
await browser.close()
""",
provider="test-provider",
model="test-model",
tokens_used=150,
cost=0.001,
response_time=1.2,
error=None
)
mock_ai_router.generate.return_value = mock_ai_response
# Execute test
result = await test_generator.generate_test_case(
url="https://example.com",
description="Test navigation to homepage",
test_type="navigation"
)
# Verify results
assert result["success"] is True
assert "test_case" in result
test_case = result["test_case"]
assert test_case["url"] == "https://example.com"
assert test_case["description"] == "Test navigation to homepage"
assert test_case["type"] == "navigation"
assert "script" in test_case
assert "import pytest" in test_case["script"]
# Verify mocks were called
mock_browser_automation.navigate.assert_called_once_with("https://example.com")
mock_ai_router.generate.assert_called_once()
@pytest.mark.asyncio
async def test_generate_test_case_navigation_failure(self, test_generator, mock_browser_automation):
"""Test test case generation when navigation fails."""
# Setup mock to fail navigation
mock_browser_automation.navigate.return_value = {
"success": False,
"error": "Failed to load page"
}
# Execute test
result = await test_generator.generate_test_case(
url="https://invalid-url.com",
description="Test invalid URL"
)
# Verify failure
assert result["success"] is False
assert "Failed to navigate to URL" in result["error"]
assert result["test_case"] is None
@pytest.mark.asyncio
async def test_generate_test_case_ai_error(self, test_generator, mock_ai_router, mock_browser_automation):
"""Test test case generation when AI generation fails."""
# Setup successful navigation
mock_browser_automation.navigate.return_value = {"success": True}
mock_browser_automation.get_page_info.return_value = {"title": "Test", "url": "https://example.com"}
mock_browser_automation.extract_text.return_value = "content"
# Setup AI to fail
mock_ai_response = AIResponse(
content="",
provider="test-provider",
model="test-model",
tokens_used=0,
cost=0.0,
response_time=0.0,
error="AI generation failed"
)
mock_ai_router.generate.return_value = mock_ai_response
# Execute test
result = await test_generator.generate_test_case(
url="https://example.com",
description="Test with AI failure"
)
# Verify failure
assert result["success"] is False
assert "AI generation error" in result["error"]
assert result["test_case"] is None
@pytest.mark.asyncio
async def test_generate_test_case_no_ai_router(self, mock_settings, mock_browser_automation):
"""Test test case generation without AI router."""
generator = AutomatedTestGenerator(mock_settings, None, mock_browser_automation)
result = await generator.generate_test_case(
url="https://example.com",
description="Test without AI"
)
assert result["success"] is False
assert "AI router not initialized" in result["error"]
assert result["test_case"] is None
@pytest.mark.asyncio
async def test_generate_test_case_with_elements(self, test_generator, mock_ai_router, mock_browser_automation):
"""Test test case generation with specific elements."""
# Setup mocks
mock_browser_automation.navigate.return_value = {"success": True}
mock_browser_automation.get_page_info.return_value = {"title": "Form Page", "url": "https://example.com/form"}
mock_browser_automation.extract_text.return_value = "Form content"
mock_ai_response = AIResponse(
content="# Test script with elements",
provider="test-provider",
model="test-model",
tokens_used=100,
cost=0.001,
response_time=1.0,
error=None
)
mock_ai_router.generate.return_value = mock_ai_response
elements = [
{"description": "Username field", "selector": "#username"},
{"description": "Password field", "selector": "#password"}
]
# Execute test
result = await test_generator.generate_test_case(
url="https://example.com/form",
description="Test form submission",
test_type="form",
elements=elements
)
# Verify success
assert result["success"] is True
assert result["test_case"]["type"] == "form"
# Check that AI was called with elements in prompt
call_args = mock_ai_router.generate.call_args[0][0]
assert "Username field" in call_args
assert "#username" in call_args
@pytest.mark.asyncio
async def test_execute_test_case_success(self, test_generator):
"""Test successful test case execution."""
# Create a temporary test file
test_content = """
import pytest
def test_simple():
assert True
"""
test_file = test_generator.test_results_dir / "test_temp.py"
test_file.write_text(test_content)
try:
# Mock subprocess execution
with patch('asyncio.create_subprocess_shell') as mock_subprocess:
mock_process = AsyncMock()
mock_process.communicate.return_value = (b"PASSED", b"")
mock_process.returncode = 0
mock_subprocess.return_value = mock_process
# Execute test
result = await test_generator.execute_test_case(test_file)
# Verify results
assert result["success"] is True
assert result["exit_code"] == 0
assert "PASSED" in result["output"]
assert result["errors"] is None
finally:
# Cleanup
if test_file.exists():
test_file.unlink()
@pytest.mark.asyncio
async def test_execute_test_case_failure(self, test_generator):
"""Test test case execution failure."""
# Mock subprocess execution with failure
with patch('asyncio.create_subprocess_shell') as mock_subprocess:
mock_process = AsyncMock()
mock_process.communicate.return_value = (b"FAILED", b"Test failed")
mock_process.returncode = 1
mock_subprocess.return_value = mock_process
# Execute test with non-existent file
result = await test_generator.execute_test_case("non_existent_test.py")
# Verify failure is detected
assert result["success"] is False
assert "Test file not found" in result["error"]
@pytest.mark.asyncio
async def test_execute_test_case_file_not_found(self, test_generator):
"""Test test case execution with missing file."""
result = await test_generator.execute_test_case("missing_file.py")
assert result["success"] is False
assert "Test file not found" in result["error"]
assert result["results"] is None
@pytest.mark.asyncio
async def test_generate_and_execute_test_success(self, test_generator, mock_ai_router, mock_browser_automation):
"""Test complete generate and execute workflow."""
# Setup successful generation
mock_browser_automation.navigate.return_value = {"success": True}
mock_browser_automation.get_page_info.return_value = {"title": "Test", "url": "https://example.com"}
mock_browser_automation.extract_text.return_value = "content"
mock_ai_response = AIResponse(
content="def test_example(): assert True",
provider="test-provider",
model="test-model",
tokens_used=50,
cost=0.001,
response_time=1.0,
error=None
)
mock_ai_router.generate.return_value = mock_ai_response
# Mock successful execution
with patch.object(test_generator, 'execute_test_case') as mock_execute:
mock_execute.return_value = {
"success": True,
"exit_code": 0,
"output": "PASSED",
"errors": None
}
# Execute combined workflow
result = await test_generator.generate_and_execute_test(
url="https://example.com",
description="Test combined workflow"
)
# Verify results
assert result["success"] is True
assert "test_case" in result
assert "execution" in result
assert result["execution"]["success"] is True
@pytest.mark.asyncio
async def test_generate_and_execute_test_generation_failure(self, test_generator, mock_browser_automation):
"""Test generate and execute when generation fails."""
# Setup navigation failure
mock_browser_automation.navigate.return_value = {
"success": False,
"error": "Navigation failed"
}
# Execute combined workflow
result = await test_generator.generate_and_execute_test(
url="https://invalid.com",
description="Test failure case"
)
# Verify generation failure is returned
assert result["success"] is False
assert "Failed to navigate to URL" in result["error"]
@pytest.mark.asyncio
async def test_different_test_types(self, test_generator, mock_ai_router, mock_browser_automation):
"""Test generation of different test types."""
# Setup mocks
mock_browser_automation.navigate.return_value = {"success": True}
mock_browser_automation.get_page_info.return_value = {"title": "Test", "url": "https://example.com"}
mock_browser_automation.extract_text.return_value = "content"
mock_ai_response = AIResponse(
content="test script",
provider="test-provider",
model="test-model",
tokens_used=50,
cost=0.001,
response_time=1.0,
error=None
)
mock_ai_router.generate.return_value = mock_ai_response
test_types = ["navigation", "form", "visual", "accessibility", "e2e"]
for test_type in test_types:
result = await test_generator.generate_test_case(
url="https://example.com",
description=f"Test {test_type}",
test_type=test_type
)
assert result["success"] is True
assert result["test_case"]["type"] == test_type
@pytest.mark.asyncio
async def test_error_handling_in_generation(self, test_generator):
"""Test error handling during test generation."""
with patch.object(test_generator.browser_automation, 'navigate', side_effect=Exception("Unexpected error")):
result = await test_generator.generate_test_case(
url="https://example.com",
description="Test error handling"
)
assert result["success"] is False
assert "Failed to generate test case" in result["error"]
assert "Unexpected error" in result["error"]
@pytest.mark.asyncio
async def test_error_handling_in_execution(self, test_generator):
"""Test error handling during test execution."""
# Create a test file first to bypass the file existence check
test_file = test_generator.test_results_dir / "test_file.py"
test_file.write_text("import pytest\n\ndef test_dummy():\n assert True")
with patch('asyncio.create_subprocess_shell', side_effect=Exception("Execution error")):
result = await test_generator.execute_test_case(str(test_file))
assert result["success"] is False
assert "Failed to execute test case" in result["error"]
assert "Execution error" in result["error"]