"""Tests for parsing tools."""
import pytest
import json
from pathlib import Path
from unittest.mock import patch, AsyncMock
from xcsift_mcp.tools.parse import (
_run_xcsift,
BuildError,
BuildWarning,
FailedTest,
BuildSummary,
)
# Load test fixtures
FIXTURES_DIR = Path(__file__).parent / "fixtures"
def load_fixture(name: str) -> str:
"""Load a test fixture file."""
return (FIXTURES_DIR / name).read_text()
class TestRunXcsift:
"""Tests for _run_xcsift function."""
@pytest.mark.asyncio
async def test_runs_xcsift_with_json_format(self):
"""Should run xcsift with JSON format."""
mock_output = '{"status": "succeeded", "summary": {"errors": 0}}'
with patch("asyncio.create_subprocess_exec") as mock_exec:
mock_proc = AsyncMock()
mock_proc.communicate.return_value = (mock_output.encode(), b"")
mock_proc.returncode = 0
mock_exec.return_value = mock_proc
result = await _run_xcsift(
output="test output",
format="json",
xcsift_path="/usr/local/bin/xcsift",
)
assert result == mock_output
mock_exec.assert_called_once()
call_args = mock_exec.call_args[0]
assert "/usr/local/bin/xcsift" in call_args
assert "--format" in call_args
assert "json" in call_args
@pytest.mark.asyncio
async def test_runs_xcsift_with_toon_format(self):
"""Should run xcsift with TOON format."""
mock_output = "status: succeeded\nsummary:\n errors: 0"
with patch("asyncio.create_subprocess_exec") as mock_exec:
mock_proc = AsyncMock()
mock_proc.communicate.return_value = (mock_output.encode(), b"")
mock_proc.returncode = 0
mock_exec.return_value = mock_proc
result = await _run_xcsift(
output="test output",
format="toon",
xcsift_path="/usr/local/bin/xcsift",
)
assert result == mock_output
call_args = mock_exec.call_args[0]
assert "toon" in call_args
@pytest.mark.asyncio
async def test_includes_warnings_flag(self):
"""Should include --warnings flag when requested."""
with patch("asyncio.create_subprocess_exec") as mock_exec:
mock_proc = AsyncMock()
mock_proc.communicate.return_value = (b'{"status": "succeeded"}', b"")
mock_proc.returncode = 0
mock_exec.return_value = mock_proc
await _run_xcsift(
output="test",
format="json",
warnings=True,
xcsift_path="/usr/local/bin/xcsift",
)
call_args = mock_exec.call_args[0]
assert "--warnings" in call_args
@pytest.mark.asyncio
async def test_includes_coverage_flag(self):
"""Should include --coverage flag when requested."""
with patch("asyncio.create_subprocess_exec") as mock_exec:
mock_proc = AsyncMock()
mock_proc.communicate.return_value = (b'{"status": "succeeded"}', b"")
mock_proc.returncode = 0
mock_exec.return_value = mock_proc
await _run_xcsift(
output="test",
format="json",
coverage=True,
xcsift_path="/usr/local/bin/xcsift",
)
call_args = mock_exec.call_args[0]
assert "--coverage" in call_args
@pytest.mark.asyncio
async def test_raises_on_failure(self):
"""Should raise RuntimeError when xcsift fails."""
with patch("asyncio.create_subprocess_exec") as mock_exec:
mock_proc = AsyncMock()
mock_proc.communicate.return_value = (b"", b"Error: invalid input")
mock_proc.returncode = 1
mock_exec.return_value = mock_proc
with pytest.raises(RuntimeError) as exc_info:
await _run_xcsift(
output="bad input",
format="json",
xcsift_path="/usr/local/bin/xcsift",
)
assert "xcsift failed" in str(exc_info.value)
class TestBuildModels:
"""Tests for Pydantic models."""
def test_build_error_model(self):
"""Should create BuildError from dict."""
error = BuildError(
file="main.swift",
line=15,
message="use of undeclared identifier",
)
assert error.file == "main.swift"
assert error.line == 15
assert "undeclared" in error.message
def test_build_warning_model(self):
"""Should create BuildWarning from dict."""
warning = BuildWarning(
file="view.swift",
line=20,
message="unused variable",
type="compile",
)
assert warning.file == "view.swift"
assert warning.type == "compile"
def test_failed_test_model(self):
"""Should create FailedTest from dict."""
failure = FailedTest(
test="testUserValidation",
message='XCTAssertEqual failed: ("invalid") is not equal to ("valid")',
)
assert failure.test == "testUserValidation"
assert "XCTAssertEqual" in failure.message
def test_build_summary_model(self):
"""Should create BuildSummary with optional fields."""
summary = BuildSummary(
errors=3,
warnings=5,
failed_tests=2,
linker_errors=0,
build_time="3.2s",
)
assert summary.errors == 3
assert summary.warnings == 5
assert summary.build_time == "3.2s"
assert summary.coverage_percent is None