Skip to main content
Glama

MCP Hybrid Forecasting

by j1c4b
fix_optimization_results.py6.58 kB
# fix_optimization_results.py - Fix corrupted optimization results files import json import pickle from pathlib import Path from datetime import datetime def fix_corrupted_optimization_file(filepath: str): """ Attempt to recover data from a corrupted optimization results file. This handles the case where tuples were saved as dictionary keys. """ file_path = Path(filepath) if not file_path.exists(): print(f"❌ File not found: {filepath}") return print(f"🔧 Attempting to fix: {file_path.name}") try: # Try to load the file first to see if it's actually corrupted with open(file_path, 'r') as f: data = json.load(f) print(f"✅ File is already valid JSON, no fix needed") return str(file_path) except json.JSONDecodeError as e: print(f"⚠️ JSON decode error: {e}") return None except TypeError as e: if "not JSON serializable" in str(e) or "keys must be str" in str(e): print(f"🔧 Detected tuple key issue, attempting recovery...") # Try to recover from potential backup or reconstruct from logs # For now, suggest manual reconstruction print(f"❌ Cannot automatically fix this file type") print(f"💡 Suggestion: Re-run the optimization for the affected portfolio") return None else: print(f"❌ Unknown TypeError: {e}") return None def create_sample_fixed_results(): """Create a sample properly formatted results file for reference.""" sample_results = { "AAPL": { "ticker": "AAPL", "method": "pmdarima", "best_order": [2, 1, 1], # List instead of tuple for JSON "best_avg_rmse": 0.0234, "total_iterations": 47, "order_frequency": { "(2, 1, 1)": 0.4, # String representation of tuple "(1, 1, 1)": 0.3, "(1, 1, 2)": 0.3 }, "optimization_date": datetime.now().isoformat(), "optimization_time_seconds": 12.3 }, "MSFT": { "ticker": "MSFT", "method": "pmdarima", "best_order": [1, 1, 2], "best_avg_rmse": 0.0198, "total_iterations": 49, "order_frequency": { "(1, 1, 2)": 0.5, "(1, 1, 1)": 0.3, "(2, 1, 1)": 0.2 }, "optimization_date": datetime.now().isoformat(), "optimization_time_seconds": 11.8 } } # Save sample file output_dir = Path("arima_optimization") output_dir.mkdir(exist_ok=True) timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") sample_file = output_dir / f"sample_fixed_format_{timestamp}.json" with open(sample_file, 'w') as f: json.dump(sample_results, f, indent=2) print(f"📝 Created sample file: {sample_file}") return str(sample_file) def validate_optimization_file(filepath: str): """Validate that an optimization file is properly formatted.""" file_path = Path(filepath) try: with open(file_path, 'r') as f: data = json.load(f) print(f"✅ File loads successfully: {file_path.name}") # Validate structure valid_count = 0 for ticker, result in data.items(): if isinstance(result, dict) and 'best_order' in result: order = result['best_order'] if isinstance(order, list) and len(order) == 3: valid_count += 1 print(f" ✅ {ticker}: ARIMA{tuple(order)}") else: print(f" ⚠️ {ticker}: Invalid order format: {order}") else: print(f" ❌ {ticker}: Missing or invalid structure") print(f"📊 Validation complete: {valid_count}/{len(data)} entries valid") return valid_count == len(data) except Exception as e: print(f"❌ Validation failed: {e}") return False def main(): """Main function to fix optimization results.""" import argparse parser = argparse.ArgumentParser(description="Fix corrupted ARIMA optimization results") parser.add_argument('--file', type=str, help='Specific file to fix') parser.add_argument('--validate', type=str, help='Validate a specific file') parser.add_argument('--create-sample', action='store_true', help='Create sample fixed format') parser.add_argument('--scan-all', action='store_true', help='Scan all files in arima_optimization/') args = parser.parse_args() if args.create_sample: create_sample_fixed_results() return if args.validate: validate_optimization_file(args.validate) return if args.file: fix_corrupted_optimization_file(args.file) return if args.scan_all: optimization_dir = Path("arima_optimization") if not optimization_dir.exists(): print("❌ No arima_optimization directory found") return json_files = list(optimization_dir.glob("*.json")) if not json_files: print("❌ No JSON files found in arima_optimization/") return print(f"🔍 Scanning {len(json_files)} files...") for json_file in json_files: print(f"\n📁 Checking: {json_file.name}") try: is_valid = validate_optimization_file(str(json_file)) if not is_valid: print(f" ⚠️ File may need fixing") except Exception as e: print(f" ❌ Error checking file: {e}") return # No arguments - show help print("🔧 ARIMA OPTIMIZATION RESULTS FIXER") print("=" * 40) print("Options:") print(" --file FILE : Fix specific file") print(" --validate FILE : Validate specific file") print(" --create-sample : Create sample fixed format file") print(" --scan-all : Scan all files in arima_optimization/") print("") print("Examples:") print(" python fix_optimization_results.py --scan-all") print(" python fix_optimization_results.py --validate arima_optimization/results.json") print(" python fix_optimization_results.py --create-sample") if __name__ == "__main__": main()

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/j1c4b/mcp-hybrid-forecasting'

If you have feedback or need assistance with the MCP directory API, please join our Discord server