Skip to main content
Glama

MCP Market Statistics Server

by whdghk1907
test_market_anomaly_detector.pyโ€ข19.7 kB
"""์‹œ์žฅ ์ด์ƒ ํƒ์ง€๊ธฐ ํ…Œ์ŠคํŠธ""" import pytest import asyncio import time from datetime import datetime, timedelta from unittest.mock import AsyncMock, MagicMock, patch from typing import Dict, List, Any from src.ai.market_anomaly_detector import MarketAnomalyDetector from src.exceptions import ModelNotTrainedError, InsufficientDataError, PredictionError class TestMarketAnomalyDetector: """์‹œ์žฅ ์ด์ƒ ํƒ์ง€๊ธฐ ํ…Œ์ŠคํŠธ""" @pytest.fixture def detector_config(self): """ํƒ์ง€๊ธฐ ์„ค์ •""" return { "anomaly_threshold": 2.5, "window_size": 20, "min_anomaly_duration": 3, "algorithms": ["isolation_forest", "statistical", "lstm_autoencoder"], "sensitivity": 0.8, "ensemble_method": "majority_vote", "feature_weights": { "price": 0.3, "volume": 0.25, "volatility": 0.2, "sentiment": 0.15, "technical_indicators": 0.1 } } @pytest.fixture def anomaly_detector(self, detector_config): """์‹œ์žฅ ์ด์ƒ ํƒ์ง€๊ธฐ ์ธ์Šคํ„ด์Šค""" return MarketAnomalyDetector(detector_config) @pytest.fixture def normal_market_data(self): """์ •์ƒ ์‹œ์žฅ ๋ฐ์ดํ„ฐ""" return [ { "timestamp": "2024-01-15T09:00:00", "symbol": "005930", "price": 75000, "volume": 1000000, "volatility": 0.02, "sentiment_score": 0.1, "rsi": 55 }, { "timestamp": "2024-01-15T09:30:00", "symbol": "005930", "price": 75100, "volume": 1050000, "volatility": 0.021, "sentiment_score": 0.12, "rsi": 56 }, { "timestamp": "2024-01-15T10:00:00", "symbol": "005930", "price": 75200, "volume": 980000, "volatility": 0.019, "sentiment_score": 0.08, "rsi": 57 } ] @pytest.fixture def anomalous_market_data(self): """์ด์ƒ ์‹œ์žฅ ๋ฐ์ดํ„ฐ""" return [ { "timestamp": "2024-01-15T11:00:00", "symbol": "005930", "price": 78000, # ๊ธ‰๋“ฑ "volume": 5000000, # ๊ฑฐ๋ž˜๋Ÿ‰ ๊ธ‰์ฆ "volatility": 0.08, # ๋ณ€๋™์„ฑ ๊ธ‰์ฆ "sentiment_score": 0.7, # ๊ฐ•ํ•œ ๊ธ์ • ๊ฐ์ • "rsi": 85 # ๊ณผ๋งค์ˆ˜ }, { "timestamp": "2024-01-15T11:30:00", "symbol": "005930", "price": 79500, "volume": 6000000, "volatility": 0.09, "sentiment_score": 0.8, "rsi": 90 } ] def test_detector_initialization(self, anomaly_detector, detector_config): """ํƒ์ง€๊ธฐ ์ดˆ๊ธฐํ™” ํ…Œ์ŠคํŠธ""" assert anomaly_detector.anomaly_threshold == detector_config["anomaly_threshold"] assert anomaly_detector.window_size == detector_config["window_size"] assert anomaly_detector.min_anomaly_duration == detector_config["min_anomaly_duration"] assert anomaly_detector.algorithms == detector_config["algorithms"] assert anomaly_detector.sensitivity == detector_config["sensitivity"] assert anomaly_detector.ensemble_method == detector_config["ensemble_method"] assert anomaly_detector.feature_weights == detector_config["feature_weights"] assert anomaly_detector.is_trained == False @pytest.mark.asyncio async def test_model_training(self, anomaly_detector, normal_market_data): """๋ชจ๋ธ ํ›ˆ๋ จ ํ…Œ์ŠคํŠธ""" # ํ›ˆ๋ จ ์ „ ์ƒํƒœ assert anomaly_detector.is_trained == False # ์ถฉ๋ถ„ํ•œ ํ›ˆ๋ จ ๋ฐ์ดํ„ฐ ์ƒ์„ฑ (์ •์ƒ ๋ฐ์ดํ„ฐ) training_data = normal_market_data * 30 # 90๊ฐœ ๋ฐ์ดํ„ฐํฌ์ธํŠธ # ๋ชจ๋ธ ํ›ˆ๋ จ training_result = await anomaly_detector.train(training_data) # ํ›ˆ๋ จ ํ›„ ์ƒํƒœ assert anomaly_detector.is_trained == True assert "models" in training_result assert "training_metrics" in training_result assert "validation_score" in training_result @pytest.mark.asyncio async def test_single_point_anomaly_detection(self, anomaly_detector, normal_market_data, anomalous_market_data): """๋‹จ์ผ ๋ฐ์ดํ„ฐํฌ์ธํŠธ ์ด์ƒ ํƒ์ง€ ํ…Œ์ŠคํŠธ""" # ๋ชจ๋ธ ํ›ˆ๋ จ training_data = normal_market_data * 30 await anomaly_detector.train(training_data) # ์ •์ƒ ๋ฐ์ดํ„ฐ ํ…Œ์ŠคํŠธ normal_result = await anomaly_detector.detect_anomaly(normal_market_data[0]) assert "is_anomaly" in normal_result assert "anomaly_score" in normal_result assert "anomaly_type" in normal_result assert "confidence" in normal_result assert normal_result["is_anomaly"] == False # ์ด์ƒ ๋ฐ์ดํ„ฐ ํ…Œ์ŠคํŠธ anomalous_result = await anomaly_detector.detect_anomaly(anomalous_market_data[0]) assert anomalous_result["is_anomaly"] == True assert anomalous_result["anomaly_score"] > normal_result["anomaly_score"] @pytest.mark.asyncio async def test_batch_anomaly_detection(self, anomaly_detector, normal_market_data, anomalous_market_data): """๋ฐฐ์น˜ ์ด์ƒ ํƒ์ง€ ํ…Œ์ŠคํŠธ""" # ๋ชจ๋ธ ํ›ˆ๋ จ training_data = normal_market_data * 30 await anomaly_detector.train(training_data) # ํ˜ผํ•ฉ ๋ฐ์ดํ„ฐ ๋ฐฐ์น˜ mixed_data = normal_market_data + anomalous_market_data batch_results = await anomaly_detector.detect_batch_anomalies(mixed_data) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert len(batch_results) == len(mixed_data) # ์ •์ƒ ๋ฐ์ดํ„ฐ๋Š” ์ด์ƒ์œผ๋กœ ํƒ์ง€๋˜์ง€ ์•Š์•„์•ผ ํ•จ for i in range(len(normal_market_data)): assert batch_results[i]["is_anomaly"] == False # ์ด์ƒ ๋ฐ์ดํ„ฐ๋Š” ์ด์ƒ์œผ๋กœ ํƒ์ง€๋˜์–ด์•ผ ํ•จ for i in range(len(normal_market_data), len(mixed_data)): assert batch_results[i]["is_anomaly"] == True @pytest.mark.asyncio async def test_time_series_anomaly_detection(self, anomaly_detector, normal_market_data, anomalous_market_data): """์‹œ๊ณ„์—ด ์ด์ƒ ํƒ์ง€ ํ…Œ์ŠคํŠธ""" # ๋ชจ๋ธ ํ›ˆ๋ จ training_data = normal_market_data * 30 await anomaly_detector.train(training_data) # ์‹œ๊ณ„์—ด ๋ฐ์ดํ„ฐ time_series_data = normal_market_data + anomalous_market_data + normal_market_data time_series_results = await anomaly_detector.detect_time_series_anomalies( time_series_data, window_size=3 ) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert "anomaly_periods" in time_series_results assert "anomaly_summary" in time_series_results assert "trend_analysis" in time_series_results # ์ด์ƒ ๊ธฐ๊ฐ„์ด ๊ฐ์ง€๋˜์–ด์•ผ ํ•จ anomaly_periods = time_series_results["anomaly_periods"] assert len(anomaly_periods) > 0 # ๊ฐ ์ด์ƒ ๊ธฐ๊ฐ„์— ๋Œ€ํ•œ ์ •๋ณด ํ™•์ธ for period in anomaly_periods: assert "start_time" in period assert "end_time" in period assert "severity" in period assert "anomaly_type" in period @pytest.mark.asyncio async def test_anomaly_type_classification(self, anomaly_detector, normal_market_data): """์ด์ƒ ์œ ํ˜• ๋ถ„๋ฅ˜ ํ…Œ์ŠคํŠธ""" # ๋ชจ๋ธ ํ›ˆ๋ จ training_data = normal_market_data * 30 await anomaly_detector.train(training_data) # ๋‹ค์–‘ํ•œ ์œ ํ˜•์˜ ์ด์ƒ ๋ฐ์ดํ„ฐ price_anomaly = { "timestamp": "2024-01-15T12:00:00", "symbol": "005930", "price": 85000, # ๊ธ‰๋“ฑ "volume": 1000000, "volatility": 0.02, "sentiment_score": 0.1, "rsi": 55 } volume_anomaly = { "timestamp": "2024-01-15T12:30:00", "symbol": "005930", "price": 75000, "volume": 10000000, # ๊ฑฐ๋ž˜๋Ÿ‰ ๊ธ‰์ฆ "volatility": 0.02, "sentiment_score": 0.1, "rsi": 55 } volatility_anomaly = { "timestamp": "2024-01-15T13:00:00", "symbol": "005930", "price": 75000, "volume": 1000000, "volatility": 0.15, # ๋ณ€๋™์„ฑ ๊ธ‰์ฆ "sentiment_score": 0.1, "rsi": 55 } # ๊ฐ ์œ ํ˜•๋ณ„ ํƒ์ง€ price_result = await anomaly_detector.detect_anomaly(price_anomaly) volume_result = await anomaly_detector.detect_anomaly(volume_anomaly) volatility_result = await anomaly_detector.detect_anomaly(volatility_anomaly) # ์ด์ƒ ์œ ํ˜•์ด ์˜ฌ๋ฐ”๋ฅด๊ฒŒ ๋ถ„๋ฅ˜๋˜๋Š”์ง€ ํ™•์ธ assert price_result["anomaly_type"] in ["price_spike", "multi_feature"] assert volume_result["anomaly_type"] in ["volume_spike", "multi_feature"] assert volatility_result["anomaly_type"] in ["volatility_spike", "multi_feature"] @pytest.mark.asyncio async def test_anomaly_severity_scoring(self, anomaly_detector, normal_market_data): """์ด์ƒ ์‹ฌ๊ฐ๋„ ์ ์ˆ˜ ํ…Œ์ŠคํŠธ""" # ๋ชจ๋ธ ํ›ˆ๋ จ training_data = normal_market_data * 30 await anomaly_detector.train(training_data) # ๋‹ค์–‘ํ•œ ์‹ฌ๊ฐ๋„์˜ ์ด์ƒ ๋ฐ์ดํ„ฐ mild_anomaly = { "timestamp": "2024-01-15T14:00:00", "symbol": "005930", "price": 76000, # ์•ฝ๊ฐ„ ์ƒ์Šน "volume": 1200000, # ์•ฝ๊ฐ„ ์ฆ๊ฐ€ "volatility": 0.025, # ์•ฝ๊ฐ„ ์ฆ๊ฐ€ "sentiment_score": 0.2, "rsi": 60 } severe_anomaly = { "timestamp": "2024-01-15T14:30:00", "symbol": "005930", "price": 85000, # ๊ธ‰๋“ฑ "volume": 8000000, # ๊ธ‰์ฆ "volatility": 0.12, # ๊ธ‰์ฆ "sentiment_score": 0.8, "rsi": 95 } mild_result = await anomaly_detector.detect_anomaly(mild_anomaly) severe_result = await anomaly_detector.detect_anomaly(severe_anomaly) # ์‹ฌ๊ฐ๋„ ์ ์ˆ˜ ๋น„๊ต assert severe_result["anomaly_score"] > mild_result["anomaly_score"] assert severe_result["confidence"] >= mild_result["confidence"] @pytest.mark.asyncio async def test_ensemble_method_voting(self, anomaly_detector, normal_market_data, anomalous_market_data): """์•™์ƒ๋ธ” ๋ฐฉ๋ฒ• ํˆฌํ‘œ ํ…Œ์ŠคํŠธ""" # ๋ชจ๋ธ ํ›ˆ๋ จ training_data = normal_market_data * 30 await anomaly_detector.train(training_data) # ์•™์ƒ๋ธ” ๊ฒฐ๊ณผ ํ™•์ธ result = await anomaly_detector.detect_anomaly(anomalous_market_data[0]) assert "ensemble_scores" in result assert "algorithm_votes" in result # ๊ฐ ์•Œ๊ณ ๋ฆฌ์ฆ˜์˜ ๊ฒฐ๊ณผ๊ฐ€ ํฌํ•จ๋˜์–ด์•ผ ํ•จ ensemble_scores = result["ensemble_scores"] for algorithm in anomaly_detector.algorithms: assert algorithm in ensemble_scores @pytest.mark.asyncio async def test_feature_importance_analysis(self, anomaly_detector, normal_market_data, anomalous_market_data): """ํ”ผ์ฒ˜ ์ค‘์š”๋„ ๋ถ„์„ ํ…Œ์ŠคํŠธ""" # ๋ชจ๋ธ ํ›ˆ๋ จ training_data = normal_market_data * 30 await anomaly_detector.train(training_data) # ํ”ผ์ฒ˜ ์ค‘์š”๋„ ๋ถ„์„ importance_result = await anomaly_detector.analyze_feature_importance(anomalous_market_data[0]) assert "feature_contributions" in importance_result assert "top_contributing_features" in importance_result assert "anomaly_explanation" in importance_result # ํ”ผ์ฒ˜ ๊ธฐ์—ฌ๋„ ํ™•์ธ feature_contributions = importance_result["feature_contributions"] assert all(feature in feature_contributions for feature in ["price", "volume", "volatility"]) @pytest.mark.asyncio async def test_anomaly_threshold_adjustment(self, anomaly_detector, normal_market_data): """์ด์ƒ ์ž„๊ณ„๊ฐ’ ์กฐ์ • ํ…Œ์ŠคํŠธ""" # ๋ชจ๋ธ ํ›ˆ๋ จ training_data = normal_market_data * 30 await anomaly_detector.train(training_data) # ์ž„๊ณ„๊ฐ’ ๋ณ€๊ฒฝ ์ „ ์„ค์ • original_threshold = anomaly_detector.anomaly_threshold # ์ž„๊ณ„๊ฐ’ ์กฐ์ • new_threshold = 1.5 await anomaly_detector.adjust_threshold(new_threshold) assert anomaly_detector.anomaly_threshold == new_threshold # ์กฐ์ •๋œ ์ž„๊ณ„๊ฐ’์œผ๋กœ ๊ฒ€์ฆ test_data = { "timestamp": "2024-01-15T15:00:00", "symbol": "005930", "price": 76500, "volume": 1500000, "volatility": 0.03, "sentiment_score": 0.3, "rsi": 65 } result = await anomaly_detector.detect_anomaly(test_data) assert "threshold_used" in result assert result["threshold_used"] == new_threshold @pytest.mark.asyncio async def test_anomaly_pattern_learning(self, anomaly_detector, normal_market_data, anomalous_market_data): """์ด์ƒ ํŒจํ„ด ํ•™์Šต ํ…Œ์ŠคํŠธ""" # ๋ชจ๋ธ ํ›ˆ๋ จ training_data = normal_market_data * 30 await anomaly_detector.train(training_data) # ์ƒˆ๋กœ์šด ์ด์ƒ ํŒจํ„ด ํ•™์Šต new_anomaly_patterns = anomalous_market_data * 5 learning_result = await anomaly_detector.learn_new_patterns(new_anomaly_patterns) assert "patterns_learned" in learning_result assert "model_updated" in learning_result assert learning_result["model_updated"] == True # ํ•™์Šต ํ›„ ํƒ์ง€ ์„ฑ๋Šฅ ํ™•์ธ post_learning_result = await anomaly_detector.detect_anomaly(anomalous_market_data[0]) assert post_learning_result["confidence"] > 0.7 @pytest.mark.asyncio async def test_real_time_anomaly_detection(self, anomaly_detector, normal_market_data): """์‹ค์‹œ๊ฐ„ ์ด์ƒ ํƒ์ง€ ํ…Œ์ŠคํŠธ""" # ๋ชจ๋ธ ํ›ˆ๋ จ training_data = normal_market_data * 30 await anomaly_detector.train(training_data) # ์‹ค์‹œ๊ฐ„ ์ŠคํŠธ๋ฆฌ๋ฐ ์‹œ๋ฎฌ๋ ˆ์ด์…˜ streaming_data = [] for i in range(10): data_point = { "timestamp": f"2024-01-15T{16+i}:00:00", "symbol": "005930", "price": 75000 + i * 100, "volume": 1000000 + i * 50000, "volatility": 0.02 + i * 0.001, "sentiment_score": 0.1 + i * 0.05, "rsi": 55 + i } streaming_data.append(data_point) # ์‹ค์‹œ๊ฐ„ ์ฒ˜๋ฆฌ real_time_results = [] for data_point in streaming_data: result = await anomaly_detector.process_real_time_data(data_point) real_time_results.append(result) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert len(real_time_results) == len(streaming_data) for result in real_time_results: assert "is_anomaly" in result assert "processing_time" in result assert result["processing_time"] < 1.0 # 1์ดˆ ์ด๋‚ด ์ฒ˜๋ฆฌ @pytest.mark.asyncio async def test_anomaly_alert_system(self, anomaly_detector, normal_market_data, anomalous_market_data): """์ด์ƒ ์•Œ๋ฆผ ์‹œ์Šคํ…œ ํ…Œ์ŠคํŠธ""" # ๋ชจ๋ธ ํ›ˆ๋ จ training_data = normal_market_data * 30 await anomaly_detector.train(training_data) # ์•Œ๋ฆผ ์„ค์ • alert_config = { "severity_threshold": "medium", "notification_channels": ["email", "webhook"], "cooldown_period": 300 # 5๋ถ„ } await anomaly_detector.configure_alerts(alert_config) # ์ด์ƒ ํƒ์ง€ ๋ฐ ์•Œ๋ฆผ ์ƒ์„ฑ alert_result = await anomaly_detector.detect_with_alerts(anomalous_market_data[0]) assert "alert_triggered" in alert_result assert "alert_details" in alert_result if alert_result["alert_triggered"]: alert_details = alert_result["alert_details"] assert "severity" in alert_details assert "message" in alert_details assert "timestamp" in alert_details @pytest.mark.asyncio async def test_error_handling(self, anomaly_detector): """์˜ค๋ฅ˜ ์ฒ˜๋ฆฌ ํ…Œ์ŠคํŠธ""" # ํ›ˆ๋ จ๋˜์ง€ ์•Š์€ ๋ชจ๋ธ๋กœ ํƒ์ง€ ์‹œ๋„ with pytest.raises(ModelNotTrainedError): await anomaly_detector.detect_anomaly({"price": 100}) # ๋ถˆ์ถฉ๋ถ„ํ•œ ๋ฐ์ดํ„ฐ๋กœ ํ›ˆ๋ จ ์‹œ๋„ insufficient_data = [{"price": 100}] # ๋„ˆ๋ฌด ์ ์€ ๋ฐ์ดํ„ฐ with pytest.raises(InsufficientDataError): await anomaly_detector.train(insufficient_data) # ์ž˜๋ชป๋œ ๋ฐ์ดํ„ฐ ํ˜•์‹ await anomaly_detector.train([{"price": 100, "volume": 1000}] * 50) invalid_data = {"invalid_field": "value"} result = await anomaly_detector.detect_anomaly(invalid_data) assert result["is_anomaly"] == False # ๊ธฐ๋ณธ๊ฐ’์œผ๋กœ ์ฒ˜๋ฆฌ @pytest.mark.asyncio async def test_performance_metrics(self, anomaly_detector, normal_market_data, anomalous_market_data): """์„ฑ๋Šฅ ๋ฉ”ํŠธ๋ฆญ ํ…Œ์ŠคํŠธ""" # ๋ชจ๋ธ ํ›ˆ๋ จ training_data = normal_market_data * 30 await anomaly_detector.train(training_data) # ์„ฑ๋Šฅ ํ…Œ์ŠคํŠธ ๋ฐ์ดํ„ฐ test_data = normal_market_data + anomalous_market_data start_time = time.time() batch_results = await anomaly_detector.detect_batch_anomalies(test_data) end_time = time.time() processing_time = end_time - start_time # ์„ฑ๋Šฅ ๋ฉ”ํŠธ๋ฆญ ํ™•์ธ metrics = anomaly_detector.get_performance_metrics() assert "total_detections" in metrics assert "average_processing_time" in metrics assert "accuracy_metrics" in metrics # ์ฒ˜๋ฆฌ ์‹œ๊ฐ„์ด ํ•ฉ๋ฆฌ์ ์ธ์ง€ ํ™•์ธ assert processing_time < 10.0 # 10์ดˆ ์ด๋‚ด assert len(batch_results) == len(test_data) def test_statistical_anomaly_detection(self, anomaly_detector): """ํ†ต๊ณ„์  ์ด์ƒ ํƒ์ง€ ํ…Œ์ŠคํŠธ""" # ์ •์ƒ ๋ฐ์ดํ„ฐ (ํ‰๊ท  100, ํ‘œ์ค€ํŽธ์ฐจ 10) normal_values = [100, 105, 95, 110, 90, 102, 98, 107, 93, 101] # ์ด์ƒ ๋ฐ์ดํ„ฐ anomalous_value = 200 # z-score > 3 # ํ†ต๊ณ„์  ์ด์ƒ ํƒ์ง€ z_score = anomaly_detector._calculate_z_score(anomalous_value, normal_values) is_statistical_anomaly = anomaly_detector._is_statistical_anomaly(z_score) assert abs(z_score) > 3 # ๊ฐ•ํ•œ ์ด์ƒ assert is_statistical_anomaly == True # ์ •์ƒ ๊ฐ’ ํ…Œ์ŠคํŠธ normal_z_score = anomaly_detector._calculate_z_score(105, normal_values) is_normal = anomaly_detector._is_statistical_anomaly(normal_z_score) assert abs(normal_z_score) < 2 assert is_normal == False

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/whdghk1907/mcp-market-statistics'

If you have feedback or need assistance with the MCP directory API, please join our Discord server