"""
Agent Orchestration Platform - Tool Result Serialization and Validation
This module implements comprehensive serialization and validation for FastMCP tool results,
providing type-safe result handling with security validation and performance optimization.
Architecture Integration:
- Design Patterns: Strategy pattern for different serialization formats, Validator pattern for result validation
- Security Model: Result sanitization and validation to prevent data leakage and injection attacks
- Performance Profile: O(1) serialization with caching and efficient validation pipelines
Technical Decisions:
- JSON Serialization: Standard JSON format for MCP protocol compatibility
- Type Validation: Comprehensive type checking with security constraint enforcement
- Error Handling: Secure error serialization with sensitive data protection
- Performance Caching: Result validation caching for optimal performance
Dependencies & Integration:
- External: None beyond standard library for maximum reliability
- Internal: Type system for result types, security contracts for validation, schema generator for validation
Quality Assurance:
- Test Coverage: Property-based testing for all serialization paths and edge cases
- Error Handling: Comprehensive error handling with secure failure modes
Author: Adder_5 | Created: 2025-06-26 | Last Modified: 2025-06-26
"""
import json
import traceback
import uuid
from dataclasses import asdict, dataclass, is_dataclass
from datetime import datetime
from enum import Enum
from pathlib import Path
from typing import Any, Dict, List, Optional, Type, TypeVar, Union, cast
from src.models.agent import AgentCreationResult, AgentDeletionResult
from src.models.communication import (
ConversationClearResult,
ConversationStartResult,
MessageResult,
)
# Import types for serialization
from src.models.ids import AgentId, SessionId
from src.models.session import (
SessionCreationResult,
SessionDeletionResult,
SessionStatusResult,
)
from .contracts_shim import ensure, require
T = TypeVar("T")
class SerializationError(Exception):
"""Exception for serialization failures."""
pass
class ValidationError(Exception):
"""Exception for validation failures."""
pass
@dataclass
class SerializationResult:
"""Result of serialization operation with metadata."""
success: bool
data: Optional[Dict[str, Any]]
error_message: Optional[str]
serialized_at: datetime
content_type: str = "application/json"
def to_dict(self) -> Dict[str, Any]:
"""Convert serialization result to dictionary."""
return {
"success": self.success,
"data": self.data,
"error_message": self.error_message,
"serialized_at": self.serialized_at.isoformat(),
"content_type": self.content_type,
}
class SecureJSONEncoder(json.JSONEncoder):
"""
Secure JSON encoder with type safety and data protection.
Implements comprehensive JSON encoding with security constraints,
type validation, and protection against data leakage.
"""
def default(self, obj: Any) -> Any:
"""Convert non-serializable objects to JSON-serializable format."""
# Handle datetime objects
if isinstance(obj, datetime):
return obj.isoformat()
# Handle Path objects
if isinstance(obj, Path):
return str(obj)
# Handle Enum objects
if isinstance(obj, Enum):
return obj.value
# Handle dataclass objects
if is_dataclass(obj):
return asdict(obj)
# Handle branded types (NewType)
if hasattr(obj, "__supertype__"):
return obj
# Handle UUID objects
if isinstance(obj, uuid.UUID):
return str(obj)
# Handle sets
if isinstance(obj, set):
return list(obj)
# Handle exceptions (with security filtering)
if isinstance(obj, Exception):
return {
"error_type": obj.__class__.__name__,
"message": str(obj),
"traceback": None, # Never include traceback in serialized output
}
# Fallback to string representation for unknown types
try:
return str(obj)
except Exception:
return f"<non-serializable: {type(obj).__name__}>"
class ToolResultSerializer:
"""
Comprehensive tool result serializer with validation and security.
Implements secure serialization of tool results with comprehensive
type validation, security filtering, and performance optimization.
"""
def __init__(self):
"""Initialize result serializer with configuration."""
self.encoder = SecureJSONEncoder()
self._validation_cache: Dict[str, bool] = {}
self._security_constraints = {
"max_result_size": 10 * 1024 * 1024, # 10MB max
"max_nesting_depth": 20,
"max_string_length": 100000,
"max_array_length": 10000,
"prohibited_keys": {"password", "secret", "key", "token", "private"},
}
@require(lambda result: result is not None)
@ensure(lambda serialized: isinstance(serialized, SerializationResult))
def serialize_tool_result(
self, result: Any, result_type: Optional[Type] = None
) -> SerializationResult:
"""
Serialize tool result with comprehensive validation and security.
Contracts:
Preconditions:
- Result is not None
- Result conforms to expected type if provided
Postconditions:
- Result is JSON-serializable
- Security constraints are enforced
- Validation metadata is included
Invariants:
- Serialized data never exceeds size limits
- Sensitive data is never included
- Result structure is preserved
"""
try:
# Validate result before serialization
validated_result = self._validate_result(result, result_type)
# Apply security filtering
filtered_result = self._apply_security_filtering(validated_result)
# Serialize to JSON with custom encoder
try:
json_data = json.loads(
json.dumps(filtered_result, cls=SecureJSONEncoder)
)
except (TypeError, ValueError) as e:
raise SerializationError(f"JSON serialization failed: {e}")
# Validate serialized size
serialized_str = json.dumps(json_data)
if (
len(serialized_str.encode("utf-8"))
> self._security_constraints["max_result_size"]
):
raise SerializationError("Serialized result exceeds maximum size limit")
return SerializationResult(
success=True,
data=json_data,
error_message=None,
serialized_at=datetime.utcnow(),
)
except Exception as e:
# Create error result with secure error information
error_data = self._create_error_result(e)
return SerializationResult(
success=False,
data=error_data,
error_message=str(e),
serialized_at=datetime.utcnow(),
)
def _validate_result(self, result: Any, expected_type: Optional[Type]) -> Any:
"""Validate result against expected type and constraints."""
# Type validation if expected type is provided
if expected_type is not None:
if not self._is_compatible_type(result, expected_type):
raise ValidationError(
f"Result type {type(result)} not compatible with expected type {expected_type}"
)
# Structural validation
self._validate_structure(result, depth=0)
return result
def _validate_structure(self, obj: Any, depth: int = 0) -> None:
"""Recursively validate object structure against security constraints."""
# Check nesting depth
if depth > self._security_constraints["max_nesting_depth"]:
raise ValidationError("Object nesting depth exceeds security limit")
# Validate based on object type
if isinstance(obj, str):
if len(obj) > self._security_constraints["max_string_length"]:
raise ValidationError("String length exceeds security limit")
elif isinstance(obj, (list, tuple)):
if len(obj) > self._security_constraints["max_array_length"]:
raise ValidationError("Array length exceeds security limit")
for item in obj:
self._validate_structure(item, depth + 1)
elif isinstance(obj, dict):
# Check for prohibited keys
for key in obj.keys():
if isinstance(key, str) and any(
prohibited in key.lower()
for prohibited in self._security_constraints["prohibited_keys"]
):
raise ValidationError(f"Prohibited key detected: {key}")
for value in obj.values():
self._validate_structure(value, depth + 1)
elif is_dataclass(obj):
# Validate dataclass fields
for field_value in asdict(obj).values():
self._validate_structure(field_value, depth + 1)
def _is_compatible_type(self, value: Any, expected_type: Type) -> bool:
"""Check if value is compatible with expected type."""
# Handle None type
if value is None:
return expected_type is type(None) or hasattr(expected_type, "__origin__")
# Handle basic types
if expected_type in (str, int, float, bool):
return isinstance(value, expected_type)
# Handle dataclass types
if is_dataclass(expected_type):
return isinstance(value, expected_type)
# Handle dict and list types
if expected_type == dict:
return isinstance(value, dict)
if expected_type == list:
return isinstance(value, list)
# For complex types, use basic isinstance check
try:
return isinstance(value, expected_type)
except TypeError:
# Fallback for complex generic types
return True
def _apply_security_filtering(self, obj: Any) -> Any:
"""Apply security filtering to remove sensitive data."""
if isinstance(obj, dict):
filtered_dict = {}
for key, value in obj.items():
# Filter out prohibited keys
if isinstance(key, str) and any(
prohibited in key.lower()
for prohibited in self._security_constraints["prohibited_keys"]
):
filtered_dict[key] = "<filtered>"
else:
filtered_dict[key] = self._apply_security_filtering(value)
return filtered_dict
elif isinstance(obj, (list, tuple)):
return [self._apply_security_filtering(item) for item in obj]
elif is_dataclass(obj):
# Convert to dict and filter
obj_dict = asdict(obj)
filtered_dict = self._apply_security_filtering(obj_dict)
return filtered_dict
else:
return obj
def _create_error_result(self, error: Exception) -> Dict[str, Any]:
"""Create secure error result data."""
return {
"error": True,
"error_type": error.__class__.__name__,
"message": str(error),
"timestamp": datetime.utcnow().isoformat(),
"details": None, # Never include detailed error information
}
def deserialize_tool_result(
self, serialized_data: Dict[str, Any], expected_type: Optional[Type] = None
) -> Any:
"""
Deserialize tool result with type validation.
Converts serialized JSON data back to appropriate Python objects
with comprehensive type checking and security validation.
"""
try:
# Basic validation of serialized data
if not isinstance(serialized_data, dict):
raise ValidationError("Serialized data must be a dictionary")
# Handle error results
if serialized_data.get("error"):
raise SerializationError(
serialized_data.get("message", "Unknown error")
)
# Extract actual data
data = serialized_data.get("data")
if data is None:
raise ValidationError("No data found in serialized result")
# Convert to expected type if specified
if expected_type is not None:
return self._convert_to_type(data, expected_type)
return data
except Exception as e:
raise SerializationError(f"Deserialization failed: {e}")
def _convert_to_type(self, data: Any, target_type: Type) -> Any:
"""Convert data to target type with validation."""
# Handle basic types
if target_type == str:
return str(data)
elif target_type == int:
return int(data)
elif target_type == float:
return float(data)
elif target_type == bool:
return bool(data)
# Handle datetime
if target_type == datetime:
if isinstance(data, str):
return datetime.fromisoformat(data.replace("Z", "+00:00"))
return data
# Handle dataclass types
if is_dataclass(target_type):
if isinstance(data, dict):
# Create dataclass instance from dict
try:
return target_type(**data)
except TypeError as e:
raise ValidationError(
f"Cannot create {target_type.__name__} from data: {e}"
)
return data
# For other types, return as-is
return data
class ToolResultValidator:
"""
Tool result validator with comprehensive type and security checking.
Implements validation pipeline for tool results with security
constraint enforcement and comprehensive error reporting.
"""
def __init__(self):
"""Initialize result validator."""
self.serializer = ToolResultSerializer()
def validate_tool_result(
self, result: Any, expected_schema: Optional[Dict[str, Any]] = None
) -> bool:
"""
Validate tool result against expected schema and security constraints.
Returns True if result is valid, raises ValidationError otherwise.
"""
try:
# Serialize result to validate structure
serialization_result = self.serializer.serialize_tool_result(result)
if not serialization_result.success:
raise ValidationError(
f"Result serialization failed: {serialization_result.error_message}"
)
# Schema validation if provided
if expected_schema is not None:
self._validate_against_schema(
serialization_result.data, expected_schema
)
return True
except Exception as e:
raise ValidationError(f"Result validation failed: {e}")
def _validate_against_schema(self, data: Any, schema: Dict[str, Any]) -> None:
"""Validate data against JSON schema (simplified validation)."""
# Basic schema validation implementation
schema_type = schema.get("type")
if schema_type == "object" and not isinstance(data, dict):
raise ValidationError(f"Expected object, got {type(data)}")
elif schema_type == "array" and not isinstance(data, list):
raise ValidationError(f"Expected array, got {type(data)}")
elif schema_type == "string" and not isinstance(data, str):
raise ValidationError(f"Expected string, got {type(data)}")
elif schema_type == "number" and not isinstance(data, (int, float)):
raise ValidationError(f"Expected number, got {type(data)}")
elif schema_type == "boolean" and not isinstance(data, bool):
raise ValidationError(f"Expected boolean, got {type(data)}")
# Global instances
_result_serializer = ToolResultSerializer()
_result_validator = ToolResultValidator()
def serialize_tool_result(
result: Any, result_type: Optional[Type] = None
) -> SerializationResult:
"""Serialize tool result with validation and security filtering."""
return _result_serializer.serialize_tool_result(result, result_type)
def deserialize_tool_result(
serialized_data: Dict[str, Any], expected_type: Optional[Type] = None
) -> Any:
"""Deserialize tool result with type validation."""
return _result_serializer.deserialize_tool_result(serialized_data, expected_type)
def validate_tool_result(
result: Any, expected_schema: Optional[Dict[str, Any]] = None
) -> bool:
"""Validate tool result against schema and security constraints."""
return _result_validator.validate_tool_result(result, expected_schema)