Skip to main content
Glama
utils.py7.75 kB
"""Quantum Machine Learning utilities.""" import base64 import io import json import logging from typing import Any, Dict, List, Optional import joblib import numpy as np from qiskit import QuantumCircuit from qiskit.primitives import StatevectorSampler from qiskit_machine_learning.kernels import FidelityQuantumKernel from qiskit_machine_learning.algorithms import VQC from qiskit.circuit.library import ZZFeatureMap, RealAmplitudes from qiskit_algorithms.optimizers import COBYLA from sklearn.svm import SVC logger = logging.getLogger(__name__) class QuantumCircuitRunner: """Execute quantum circuits safely.""" def __init__(self, max_qubits: int, max_shots: int): self.max_qubits = max_qubits self.max_shots = max_shots # Reuse sampler instance for better performance self.sampler = StatevectorSampler() def validate_circuit(self, circuit: QuantumCircuit) -> None: """Validate circuit against safety limits.""" if circuit.num_qubits > self.max_qubits: raise ValueError( f"Circuit has {circuit.num_qubits} qubits, " f"exceeds maximum of {self.max_qubits}" ) async def run_circuit( self, circuit: QuantumCircuit, shots: Optional[int] = None ) -> Dict[str, Any]: """Execute a quantum circuit and return results.""" self.validate_circuit(circuit) if shots is None: shots = 1024 if shots > self.max_shots: raise ValueError( f"Requested {shots} shots, exceeds maximum of {self.max_shots}" ) try: # Use reused StatevectorSampler instance job = self.sampler.run([circuit], shots=shots) result = job.result() # Extract counts from the result pub_result = result[0] counts = pub_result.data.meas.get_counts() return { "counts": counts, "shots": shots, "num_qubits": circuit.num_qubits, "success": True } except Exception as e: logger.error(f"Circuit execution failed: {e}") raise class QuantumKernelComputer: """Compute quantum kernels for ML tasks.""" def __init__(self, max_qubits: int): self.max_qubits = max_qubits async def compute_kernel( self, train_data: np.ndarray, test_data: Optional[np.ndarray] = None, feature_dimension: Optional[int] = None ) -> Dict[str, Any]: """Compute quantum kernel matrix.""" if feature_dimension is None: feature_dimension = train_data.shape[1] if feature_dimension > self.max_qubits: raise ValueError( f"Feature dimension {feature_dimension} exceeds " f"maximum qubits {self.max_qubits}" ) try: # Create feature map feature_map = ZZFeatureMap( feature_dimension=feature_dimension, reps=2, entanglement="linear" ) # Create quantum kernel kernel = FidelityQuantumKernel(feature_map=feature_map) # Compute kernel matrix if test_data is not None: kernel_matrix = kernel.evaluate(x_vec=train_data, y_vec=test_data) else: kernel_matrix = kernel.evaluate(x_vec=train_data) return { "kernel_matrix": kernel_matrix.tolist(), "train_shape": list(train_data.shape), "test_shape": list(test_data.shape) if test_data is not None else None, "feature_dimension": feature_dimension, "success": True } except Exception as e: logger.error(f"Kernel computation failed: {e}") raise class VQCTrainer: """Train Variational Quantum Classifier.""" def __init__(self, max_qubits: int): self.max_qubits = max_qubits async def train( self, X_train: np.ndarray, y_train: np.ndarray, feature_dimension: Optional[int] = None, max_iter: int = 100 ) -> Dict[str, Any]: """Train a VQC model.""" if feature_dimension is None: feature_dimension = X_train.shape[1] if feature_dimension > self.max_qubits: raise ValueError( f"Feature dimension {feature_dimension} exceeds " f"maximum qubits {self.max_qubits}" ) try: # Create feature map and ansatz feature_map = ZZFeatureMap( feature_dimension=feature_dimension, reps=2, entanglement="linear" ) ansatz = RealAmplitudes( num_qubits=feature_dimension, reps=3, entanglement="linear" ) # Create optimizer optimizer = COBYLA(maxiter=max_iter) # Create and train VQC vqc = VQC( feature_map=feature_map, ansatz=ansatz, optimizer=optimizer, ) vqc.fit(X_train, y_train) # Serialize model to base64 model_buffer = io.BytesIO() joblib.dump(vqc, model_buffer) model_buffer.seek(0) model_base64 = base64.b64encode(model_buffer.read()).decode('utf-8') # Get training score train_score = vqc.score(X_train, y_train) return { "model": model_base64, "train_score": float(train_score), "feature_dimension": feature_dimension, "num_samples": int(X_train.shape[0]), "success": True } except Exception as e: logger.error(f"VQC training failed: {e}") raise class ModelEvaluator: """Evaluate trained quantum ML models.""" @staticmethod async def evaluate( model_base64: str, X_test: np.ndarray, y_test: Optional[np.ndarray] = None ) -> Dict[str, Any]: """Evaluate a trained model.""" try: # Deserialize model model_buffer = io.BytesIO(base64.b64decode(model_base64)) model = joblib.load(model_buffer) # Make predictions predictions = model.predict(X_test) result = { "predictions": predictions.tolist(), "num_samples": int(X_test.shape[0]), "success": True } # Compute score if labels provided if y_test is not None: score = model.score(X_test, y_test) result["score"] = float(score) result["accuracy"] = float(score) return result except Exception as e: logger.error(f"Model evaluation failed: {e}") raise def serialize_numpy(obj: Any) -> Any: """Serialize numpy objects to JSON-compatible types.""" if isinstance(obj, np.ndarray): return obj.tolist() elif isinstance(obj, np.integer): return int(obj) elif isinstance(obj, np.floating): return float(obj) elif isinstance(obj, dict): return {key: serialize_numpy(value) for key, value in obj.items()} elif isinstance(obj, (list, tuple)): return [serialize_numpy(item) for item in obj] return obj

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/des137/qml-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server