"""
MCP tools with Cequence analytics integration
"""
import time
import asyncio
from typing import Dict, Any
from datetime import datetime
from fastapi import APIRouter, Depends, Request
import structlog
from src.middleware.auth_middleware import get_auth_context, require_scopes
from src.core.descope_auth import AuthContext
from src.core.cequence_integration import track_agent_operation, track_security_event
logger = structlog.get_logger()
router = APIRouter()
@router.post("/tools/generate_application")
async def generate_application_tool(
request: Request,
project_description: str,
project_type: str = "fullstack",
auth: AuthContext = Depends(require_scopes("tools:generate"))
) -> Dict[str, Any]:
"""
Application generation tool with comprehensive monitoring
Required scope: tools:generate
"""
start_time = time.time()
correlation_id = getattr(request.state, 'correlation_id', 'unknown')
try:
logger.info(
"application_generation_started",
user_id=auth.user_id,
project_type=project_type,
description_length=len(project_description),
correlation_id=correlation_id
)
# Simulate application generation
await asyncio.sleep(2) # Placeholder for actual generation
# Calculate processing time
processing_time = (time.time() - start_time) * 1000
# Track successful operation
await track_agent_operation(
operation_type="application_generation",
agent_type="orchestrator",
correlation_id=correlation_id,
duration_ms=processing_time,
success=True,
metadata={
"project_type": project_type,
"description_length": len(project_description),
"user_id": auth.user_id
}
)
return {
"status": "success",
"project_id": correlation_id,
"project_type": project_type,
"estimated_completion": "5 minutes",
"processing_time_ms": processing_time,
"correlation_id": correlation_id
}
except Exception as e:
processing_time = (time.time() - start_time) * 1000
# Track failed operation
await track_agent_operation(
operation_type="application_generation",
agent_type="orchestrator",
correlation_id=correlation_id,
duration_ms=processing_time,
success=False,
metadata={
"error": str(e),
"user_id": auth.user_id
}
)
# Track security event for unexpected errors
await track_security_event(
event_type="generation_error",
severity="warning",
description=f"Application generation failed: {str(e)}",
correlation_id=correlation_id,
client_ip=request.client.host if request.client else "unknown",
user_id=auth.user_id
)
raise
@router.post("/tools/self_heal")
async def self_heal_tool(
request: Request,
project_id: str,
issue_description: str,
auth: AuthContext = Depends(require_scopes("tools:fix"))
) -> Dict[str, Any]:
"""
Self-healing tool with detailed operation tracking
Required scope: tools:fix
"""
start_time = time.time()
correlation_id = getattr(request.state, 'correlation_id', 'unknown')
try:
# Track healing attempt start
await track_agent_operation(
operation_type="self_healing_started",
agent_type="reviewer",
correlation_id=correlation_id,
duration_ms=0,
success=True,
metadata={
"project_id": project_id,
"issue_type": "automated_detection",
"user_id": auth.user_id
}
)
# Simulate healing process
await asyncio.sleep(3) # Placeholder for actual healing
processing_time = (time.time() - start_time) * 1000
# Track successful healing
await track_agent_operation(
operation_type="self_healing_completed",
agent_type="reviewer",
correlation_id=correlation_id,
duration_ms=processing_time,
success=True,
metadata={
"project_id": project_id,
"fixes_applied": 3,
"tests_passed": 5,
"user_id": auth.user_id
}
)
return {
"status": "healed",
"project_id": project_id,
"fixes_applied": 3,
"tests_passed": 5,
"processing_time_ms": processing_time,
"correlation_id": correlation_id
}
except Exception as e:
processing_time = (time.time() - start_time) * 1000
# Track failed healing
await track_agent_operation(
operation_type="self_healing_failed",
agent_type="reviewer",
correlation_id=correlation_id,
duration_ms=processing_time,
success=False,
metadata={
"project_id": project_id,
"error": str(e),
"user_id": auth.user_id
}
)
raise
@router.post("/tools/code_review")
async def code_review_tool(
request: Request,
project_id: str,
review_type: str = "security",
auth: AuthContext = Depends(require_scopes("tools:review"))
) -> Dict[str, Any]:
"""
Code review tool with analytics tracking
Required scope: tools:review
"""
start_time = time.time()
correlation_id = getattr(request.state, 'correlation_id', 'unknown')
try:
logger.info(
"code_review_started",
user_id=auth.user_id,
project_id=project_id,
review_type=review_type,
correlation_id=correlation_id
)
# Simulate code review
await asyncio.sleep(1.5)
processing_time = (time.time() - start_time) * 1000
# Track review operation
await track_agent_operation(
operation_type="code_review",
agent_type="reviewer",
correlation_id=correlation_id,
duration_ms=processing_time,
success=True,
metadata={
"project_id": project_id,
"review_type": review_type,
"issues_found": 2,
"severity": "medium",
"user_id": auth.user_id
}
)
return {
"status": "completed",
"project_id": project_id,
"review_type": review_type,
"issues_found": 2,
"severity": "medium",
"processing_time_ms": processing_time,
"correlation_id": correlation_id
}
except Exception as e:
processing_time = (time.time() - start_time) * 1000
await track_agent_operation(
operation_type="code_review",
agent_type="reviewer",
correlation_id=correlation_id,
duration_ms=processing_time,
success=False,
metadata={
"project_id": project_id,
"error": str(e),
"user_id": auth.user_id
}
)
raise
@router.post("/tools/test_generation")
async def test_generation_tool(
request: Request,
project_id: str,
test_type: str = "unit",
coverage_target: float = 90.0,
auth: AuthContext = Depends(require_scopes("tools:generate"))
) -> Dict[str, Any]:
"""
Test generation tool with performance tracking
Required scope: tools:generate
"""
start_time = time.time()
correlation_id = getattr(request.state, 'correlation_id', 'unknown')
try:
# Simulate test generation
await asyncio.sleep(2.5)
processing_time = (time.time() - start_time) * 1000
await track_agent_operation(
operation_type="test_generation",
agent_type="generator",
correlation_id=correlation_id,
duration_ms=processing_time,
success=True,
metadata={
"project_id": project_id,
"test_type": test_type,
"coverage_target": coverage_target,
"tests_generated": 15,
"coverage_achieved": 92.5,
"user_id": auth.user_id
}
)
return {
"status": "generated",
"project_id": project_id,
"test_type": test_type,
"tests_generated": 15,
"coverage_achieved": 92.5,
"processing_time_ms": processing_time,
"correlation_id": correlation_id
}
except Exception as e:
processing_time = (time.time() - start_time) * 1000
await track_agent_operation(
operation_type="test_generation",
agent_type="generator",
correlation_id=correlation_id,
duration_ms=processing_time,
success=False,
metadata={
"project_id": project_id,
"error": str(e),
"user_id": auth.user_id
}
)
raise
@router.post("/tools/deployment")
async def deployment_tool(
request: Request,
project_id: str,
environment: str = "staging",
auth: AuthContext = Depends(require_scopes("tools:deploy"))
) -> Dict[str, Any]:
"""
Deployment tool with environment tracking
Required scope: tools:deploy
"""
start_time = time.time()
correlation_id = getattr(request.state, 'correlation_id', 'unknown')
try:
# Simulate deployment
await asyncio.sleep(4)
processing_time = (time.time() - start_time) * 1000
await track_agent_operation(
operation_type="deployment",
agent_type="deployer",
correlation_id=correlation_id,
duration_ms=processing_time,
success=True,
metadata={
"project_id": project_id,
"environment": environment,
"deployment_url": f"https://{project_id}.{environment}.foundry.ai",
"health_check": "passed",
"user_id": auth.user_id
}
)
return {
"status": "deployed",
"project_id": project_id,
"environment": environment,
"deployment_url": f"https://{project_id}.{environment}.foundry.ai",
"health_check": "passed",
"processing_time_ms": processing_time,
"correlation_id": correlation_id
}
except Exception as e:
processing_time = (time.time() - start_time) * 1000
await track_agent_operation(
operation_type="deployment",
agent_type="deployer",
correlation_id=correlation_id,
duration_ms=processing_time,
success=False,
metadata={
"project_id": project_id,
"environment": environment,
"error": str(e),
"user_id": auth.user_id
}
)
raise
@router.get("/analytics/dashboard_data")
async def dashboard_data(
request: Request,
auth: AuthContext = Depends(require_scopes("admin:logs"))
) -> Dict[str, Any]:
"""
Provide dashboard data for demo presentation
Required scope: admin:logs
"""
correlation_id = getattr(request.state, 'correlation_id', 'unknown')
# Simulated dashboard metrics for demo - in production this would come from Cequence analytics
return {
"real_time_metrics": {
"active_requests": 12,
"requests_per_minute": 45,
"average_response_time_ms": 234,
"success_rate": 98.5,
"active_agents": {
"frontend": 2,
"backend": 3,
"reviewer": 1,
"deployer": 1
}
},
"security_status": {
"threat_level": "low",
"blocked_requests": 0,
"rate_limited_requests": 2,
"authentication_failures": 1,
"security_events_last_hour": 3
},
"generation_stats": {
"total_projects_generated": 157,
"successful_generations": 149,
"self_healing_events": 23,
"average_generation_time_seconds": 45,
"code_reviews_completed": 89,
"tests_generated": 1247
},
"top_users": [
{"user_id": "software_foundry_demo", "requests": 89},
{"user_id": "test_client_001", "requests": 23},
{"user_id": "integration_test", "requests": 12}
],
"recent_operations": [
{
"operation": "application_generation",
"user": "software_foundry_demo",
"status": "success",
"duration_ms": 2341,
"timestamp": datetime.utcnow().isoformat()
},
{
"operation": "self_healing",
"user": "test_client_001",
"status": "success",
"duration_ms": 3102,
"timestamp": datetime.utcnow().isoformat()
},
{
"operation": "code_review",
"user": "integration_test",
"status": "success",
"duration_ms": 1543,
"timestamp": datetime.utcnow().isoformat()
}
],
"performance_metrics": {
"gateway_latency_ms": 12,
"authentication_time_ms": 8,
"agent_orchestration_time_ms": 156,
"total_requests_today": 2847,
"errors_today": 23
},
"correlation_id": correlation_id,
"timestamp": datetime.utcnow().isoformat()
}