Skip to main content
Glama

MCP Market Statistics Server

by whdghk1907
test_multi_timeframe_analyzer.pyโ€ข21.9 kB
"""๋ฉ€ํ‹ฐ ํƒ€์ž„ํ”„๋ ˆ์ž„ ๋ถ„์„๊ธฐ ํ…Œ์ŠคํŠธ""" import pytest from datetime import datetime, timedelta from src.analytics.multi_timeframe_analyzer import MultiTimeframeAnalyzer from src.exceptions import TimeframeError, AnalysisError, InsufficientDataError class TestMultiTimeframeAnalyzer: """๋ฉ€ํ‹ฐ ํƒ€์ž„ํ”„๋ ˆ์ž„ ๋ถ„์„๊ธฐ ํ…Œ์ŠคํŠธ ํด๋ž˜์Šค""" @pytest.fixture def analyzer_config(self): """๋ถ„์„๊ธฐ ์„ค์ •""" return { "timeframes": ["1m", "5m", "15m", "1h", "4h", "1d"], "aggregation_methods": { "volume": "sum", "close": "last", "high": "max", "low": "min", "open": "first" }, "analysis_types": [ "trend_alignment", "momentum_divergence", "volume_profile", "support_resistance" ], "correlation_thresholds": { "strong": 0.8, "moderate": 0.6, "weak": 0.3 } } @pytest.fixture def multi_timeframe_analyzer(self, analyzer_config): """๋ฉ€ํ‹ฐ ํƒ€์ž„ํ”„๋ ˆ์ž„ ๋ถ„์„๊ธฐ ์ธ์Šคํ„ด์Šค""" return MultiTimeframeAnalyzer(analyzer_config) @pytest.fixture def sample_ohlcv_data(self): """์ƒ˜ํ”Œ OHLCV ๋ฐ์ดํ„ฐ (1๋ถ„๋ด‰)""" data = [] base_price = 50000 base_time = datetime(2024, 1, 1, 0, 0, 0) for i in range(1440): # 24์‹œ๊ฐ„ = 1440๋ถ„ time = base_time + timedelta(minutes=i) # ๊ฐ€๊ฒฉ ๋ณ€๋™ ์‹œ๋ฎฌ๋ ˆ์ด์…˜ trend = (i // 60) * 100 # ์‹œ๊ฐ„๋ณ„ ํŠธ๋ Œ๋“œ wave = 500 * ((i % 60) / 60) # ๋ถ„๋ณ„ ๋ณ€๋™ noise = (i % 5) * 20 # ๋…ธ์ด์ฆˆ close = base_price + trend + wave + noise high = close + abs(noise) * 2 low = close - abs(noise) * 1.5 open_price = close - noise volume = 1000000 + (i % 30) * 50000 data.append({ "timestamp": time.isoformat(), "open": open_price, "high": high, "low": low, "close": close, "volume": volume }) return data @pytest.fixture def multi_symbol_data(self): """์—ฌ๋Ÿฌ ์‹ฌ๋ณผ์˜ ๋ฐ์ดํ„ฐ""" symbols = ["BTC/USD", "ETH/USD", "SOL/USD"] data = {} for symbol in symbols: base_price = 50000 if "BTC" in symbol else 3000 if "ETH" in symbol else 100 symbol_data = [] for i in range(100): symbol_data.append({ "timestamp": (datetime(2024, 1, 1, 0, 0, 0) + timedelta(hours=i)).isoformat(), "symbol": symbol, "close": base_price + i * 10, "volume": 1000000 + i * 10000 }) data[symbol] = symbol_data return data @pytest.mark.asyncio async def test_timeframe_aggregation(self, multi_timeframe_analyzer, sample_ohlcv_data): """ํƒ€์ž„ํ”„๋ ˆ์ž„ ์ง‘๊ณ„ ํ…Œ์ŠคํŠธ""" # 5๋ถ„๋ด‰์œผ๋กœ ์ง‘๊ณ„ aggregated_5m = await multi_timeframe_analyzer.aggregate_to_timeframe( sample_ohlcv_data, target_timeframe="5m" ) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert len(aggregated_5m) == 288 # 24์‹œ๊ฐ„ = 288๊ฐœ 5๋ถ„๋ด‰ # ์ฒซ ๋ฒˆ์งธ 5๋ถ„๋ด‰ ๊ฒ€์ฆ first_candle = aggregated_5m[0] assert "open" in first_candle assert "high" in first_candle assert "low" in first_candle assert "close" in first_candle assert "volume" in first_candle # ์ง‘๊ณ„ ๊ทœ์น™ ๊ฒ€์ฆ # open์€ ์ฒซ ๋ฒˆ์งธ ๊ฐ’ assert first_candle["open"] == sample_ohlcv_data[0]["open"] # close๋Š” ๋งˆ์ง€๋ง‰ ๊ฐ’ assert first_candle["close"] == sample_ohlcv_data[4]["close"] # high๋Š” ์ตœ๋Œ€๊ฐ’ max_high = max(d["high"] for d in sample_ohlcv_data[:5]) assert first_candle["high"] == max_high # low๋Š” ์ตœ์†Œ๊ฐ’ min_low = min(d["low"] for d in sample_ohlcv_data[:5]) assert first_candle["low"] == min_low # volume์€ ํ•ฉ๊ณ„ sum_volume = sum(d["volume"] for d in sample_ohlcv_data[:5]) assert first_candle["volume"] == sum_volume @pytest.mark.asyncio async def test_multi_timeframe_analysis(self, multi_timeframe_analyzer, sample_ohlcv_data): """๋ฉ€ํ‹ฐ ํƒ€์ž„ํ”„๋ ˆ์ž„ ๋ถ„์„ ํ…Œ์ŠคํŠธ""" # ์—ฌ๋Ÿฌ ํƒ€์ž„ํ”„๋ ˆ์ž„ ๋ถ„์„ analysis_results = await multi_timeframe_analyzer.analyze_timeframes( sample_ohlcv_data, timeframes=["5m", "15m", "1h"] ) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert "timeframe_data" in analysis_results assert "trend_alignment" in analysis_results assert "momentum_analysis" in analysis_results assert "volume_profile" in analysis_results # ํƒ€์ž„ํ”„๋ ˆ์ž„๋ณ„ ๋ฐ์ดํ„ฐ ํ™•์ธ tf_data = analysis_results["timeframe_data"] assert "5m" in tf_data assert "15m" in tf_data assert "1h" in tf_data # ํŠธ๋ Œ๋“œ ์ •๋ ฌ ํ™•์ธ trend_alignment = analysis_results["trend_alignment"] assert "alignment_score" in trend_alignment assert "timeframe_trends" in trend_alignment assert 0 <= trend_alignment["alignment_score"] <= 1 @pytest.mark.asyncio async def test_trend_alignment_detection(self, multi_timeframe_analyzer, sample_ohlcv_data): """ํŠธ๋ Œ๋“œ ์ •๋ ฌ ๊ฐ์ง€ ํ…Œ์ŠคํŠธ""" # ํŠธ๋ Œ๋“œ ์ •๋ ฌ ๋ถ„์„ trend_alignment = await multi_timeframe_analyzer.detect_trend_alignment( sample_ohlcv_data, timeframes=["5m", "15m", "1h"], lookback_periods={"5m": 20, "15m": 20, "1h": 20} ) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert "aligned" in trend_alignment assert "direction" in trend_alignment assert "strength" in trend_alignment assert "timeframe_trends" in trend_alignment # ํƒ€์ž„ํ”„๋ ˆ์ž„๋ณ„ ํŠธ๋ Œ๋“œ tf_trends = trend_alignment["timeframe_trends"] for tf in ["5m", "15m", "1h"]: assert tf in tf_trends assert "direction" in tf_trends[tf] assert "strength" in tf_trends[tf] assert tf_trends[tf]["direction"] in ["up", "down", "neutral"] @pytest.mark.asyncio async def test_momentum_divergence_analysis(self, multi_timeframe_analyzer, sample_ohlcv_data): """๋ชจ๋ฉ˜ํ…€ ๋‹ค์ด๋ฒ„์ „์Šค ๋ถ„์„ ํ…Œ์ŠคํŠธ""" # ๋ชจ๋ฉ˜ํ…€ ๋‹ค์ด๋ฒ„์ „์Šค ๋ถ„์„ divergence_results = await multi_timeframe_analyzer.analyze_momentum_divergence( sample_ohlcv_data, timeframes=["5m", "15m"], momentum_indicator="rsi" ) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert "divergences" in divergence_results assert "momentum_values" in divergence_results assert "divergence_strength" in divergence_results # ๋‹ค์ด๋ฒ„์ „์Šค ๊ฒ€์ฆ divergences = divergence_results["divergences"] for div in divergences: assert "type" in div # bullish_divergence, bearish_divergence assert "timeframe" in div assert "index" in div assert "strength" in div @pytest.mark.asyncio async def test_volume_profile_analysis(self, multi_timeframe_analyzer, sample_ohlcv_data): """๋ณผ๋ฅจ ํ”„๋กœํŒŒ์ผ ๋ถ„์„ ํ…Œ์ŠคํŠธ""" # ๋ณผ๋ฅจ ํ”„๋กœํŒŒ์ผ ๋ถ„์„ volume_profile = await multi_timeframe_analyzer.analyze_volume_profile( sample_ohlcv_data, timeframes=["5m", "15m", "1h"], profile_bins=20 ) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert "price_levels" in volume_profile assert "volume_distribution" in volume_profile assert "poc" in volume_profile # Point of Control assert "value_area" in volume_profile # POC ๊ฒ€์ฆ poc = volume_profile["poc"] assert "price" in poc assert "volume" in poc # ๊ฐ€์น˜ ์˜์—ญ ๊ฒ€์ฆ value_area = volume_profile["value_area"] assert "high" in value_area assert "low" in value_area assert "volume_percentage" in value_area assert 0 <= value_area["volume_percentage"] <= 1 @pytest.mark.asyncio async def test_support_resistance_across_timeframes(self, multi_timeframe_analyzer, sample_ohlcv_data): """ํƒ€์ž„ํ”„๋ ˆ์ž„๋ณ„ ์ง€์ง€/์ €ํ•ญ ๋ถ„์„ ํ…Œ์ŠคํŠธ""" # ์ง€์ง€/์ €ํ•ญ ๋ ˆ๋ฒจ ๋ถ„์„ sr_levels = await multi_timeframe_analyzer.find_support_resistance_levels( sample_ohlcv_data, timeframes=["5m", "15m", "1h"], min_touches=2 ) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert "levels" in sr_levels assert "timeframe_levels" in sr_levels assert "confluence_zones" in sr_levels # ๋ ˆ๋ฒจ ๊ฒ€์ฆ levels = sr_levels["levels"] assert len(levels) > 0 for level in levels: assert "price" in level assert "type" in level # support or resistance assert "strength" in level assert "timeframes" in level assert len(level["timeframes"]) > 0 @pytest.mark.asyncio async def test_correlation_matrix_generation(self, multi_timeframe_analyzer, multi_symbol_data): """์ƒ๊ด€๊ด€๊ณ„ ๋งคํŠธ๋ฆญ์Šค ์ƒ์„ฑ ํ…Œ์ŠคํŠธ""" # ์ƒ๊ด€๊ด€๊ณ„ ๋งคํŠธ๋ฆญ์Šค ๊ณ„์‚ฐ correlation_matrix = await multi_timeframe_analyzer.calculate_correlation_matrix( multi_symbol_data, timeframe="1h", correlation_window=24 ) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert "matrix" in correlation_matrix assert "symbols" in correlation_matrix assert "significant_correlations" in correlation_matrix # ๋งคํŠธ๋ฆญ์Šค ๊ฒ€์ฆ matrix = correlation_matrix["matrix"] symbols = correlation_matrix["symbols"] assert len(matrix) == len(symbols) assert all(len(row) == len(symbols) for row in matrix) # ๋Œ€๊ฐ์„ ์€ 1์ด์–ด์•ผ ํ•จ for i in range(len(symbols)): assert matrix[i][i] == 1.0 # ์ƒ๊ด€๊ด€๊ณ„๋Š” -1๊ณผ 1 ์‚ฌ์ด for row in matrix: for corr in row: assert -1 <= corr <= 1 @pytest.mark.asyncio async def test_timeframe_strength_ranking(self, multi_timeframe_analyzer, sample_ohlcv_data): """ํƒ€์ž„ํ”„๋ ˆ์ž„ ๊ฐ•๋„ ์ˆœ์œ„ ํ…Œ์ŠคํŠธ""" # ํƒ€์ž„ํ”„๋ ˆ์ž„๋ณ„ ๊ฐ•๋„ ํ‰๊ฐ€ strength_ranking = await multi_timeframe_analyzer.rank_timeframe_strength( sample_ohlcv_data, timeframes=["5m", "15m", "1h"], criteria=["trend", "momentum", "volume"] ) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert "rankings" in strength_ranking assert "scores" in strength_ranking assert "strongest_timeframe" in strength_ranking # ์ˆœ์œ„ ๊ฒ€์ฆ rankings = strength_ranking["rankings"] assert len(rankings) == 3 for rank in rankings: assert "timeframe" in rank assert "score" in rank assert "breakdown" in rank assert 0 <= rank["score"] <= 1 @pytest.mark.asyncio async def test_market_regime_identification(self, multi_timeframe_analyzer, sample_ohlcv_data): """์‹œ์žฅ ์ฒด์ œ ์‹๋ณ„ ํ…Œ์ŠคํŠธ""" # ์‹œ์žฅ ์ฒด์ œ ๋ถ„์„ market_regime = await multi_timeframe_analyzer.identify_market_regime( sample_ohlcv_data, timeframes=["15m", "1h", "4h"] ) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert "regime" in market_regime assert "confidence" in market_regime assert "characteristics" in market_regime assert "timeframe_regimes" in market_regime # ์ฒด์ œ ํƒ€์ž… ๊ฒ€์ฆ assert market_regime["regime"] in ["trending", "ranging", "volatile", "quiet"] assert 0 <= market_regime["confidence"] <= 1 # ํŠน์„ฑ ๊ฒ€์ฆ characteristics = market_regime["characteristics"] assert "volatility" in characteristics assert "trend_strength" in characteristics assert "volume_profile" in characteristics @pytest.mark.asyncio async def test_divergence_confluence_detection(self, multi_timeframe_analyzer, sample_ohlcv_data): """๋‹ค์ด๋ฒ„์ „์Šค ์ปจํ”Œ๋ฃจ์–ธ์Šค ๊ฐ์ง€ ํ…Œ์ŠคํŠธ""" # ๋‹ค์ด๋ฒ„์ „์Šค ์ปจํ”Œ๋ฃจ์–ธ์Šค ๋ถ„์„ confluence = await multi_timeframe_analyzer.detect_divergence_confluence( sample_ohlcv_data, timeframes=["5m", "15m", "1h"], indicators=["rsi", "macd"] ) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert "confluence_zones" in confluence assert "divergence_count" in confluence assert "strength_distribution" in confluence # ์ปจํ”Œ๋ฃจ์–ธ์Šค ์กด ๊ฒ€์ฆ zones = confluence["confluence_zones"] for zone in zones: assert "start_index" in zone assert "end_index" in zone assert "timeframes_involved" in zone assert "indicators_involved" in zone assert "strength" in zone assert zone["strength"] > 0 @pytest.mark.asyncio async def test_optimal_timeframe_selection(self, multi_timeframe_analyzer, sample_ohlcv_data): """์ตœ์  ํƒ€์ž„ํ”„๋ ˆ์ž„ ์„ ํƒ ํ…Œ์ŠคํŠธ""" # ์ตœ์  ํƒ€์ž„ํ”„๋ ˆ์ž„ ์„ ํƒ optimal_tf = await multi_timeframe_analyzer.select_optimal_timeframe( sample_ohlcv_data, trading_style="scalping", # scalping, day_trading, swing_trading market_conditions={"volatility": "high", "trend": "strong"} ) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert "recommended_timeframe" in optimal_tf assert "reasoning" in optimal_tf assert "alternative_timeframes" in optimal_tf assert "confidence_score" in optimal_tf # ์ถ”์ฒœ ํƒ€์ž„ํ”„๋ ˆ์ž„ ๊ฒ€์ฆ assert optimal_tf["recommended_timeframe"] in ["1m", "5m", "15m", "1h", "4h", "1d"] assert 0 <= optimal_tf["confidence_score"] <= 1 # ๋Œ€์•ˆ ํƒ€์ž„ํ”„๋ ˆ์ž„ ๊ฒ€์ฆ alternatives = optimal_tf["alternative_timeframes"] assert len(alternatives) > 0 for alt in alternatives: assert "timeframe" in alt assert "score" in alt @pytest.mark.asyncio async def test_fractal_analysis(self, multi_timeframe_analyzer, sample_ohlcv_data): """ํ”„๋ž™ํƒˆ ๋ถ„์„ ํ…Œ์ŠคํŠธ""" # ํ”„๋ž™ํƒˆ ํŒจํ„ด ๋ถ„์„ fractal_analysis = await multi_timeframe_analyzer.analyze_fractals( sample_ohlcv_data, timeframes=["5m", "15m", "1h"], fractal_period=5 ) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert "fractals" in fractal_analysis assert "fractal_dimensions" in fractal_analysis assert "self_similarity_score" in fractal_analysis # ํ”„๋ž™ํƒˆ ๊ฒ€์ฆ fractals = fractal_analysis["fractals"] for tf, tf_fractals in fractals.items(): for fractal in tf_fractals: assert "type" in fractal # up or down assert "index" in fractal assert "price" in fractal assert "strength" in fractal @pytest.mark.asyncio async def test_timeframe_transition_analysis(self, multi_timeframe_analyzer, sample_ohlcv_data): """ํƒ€์ž„ํ”„๋ ˆ์ž„ ์ „ํ™˜ ๋ถ„์„ ํ…Œ์ŠคํŠธ""" # ํƒ€์ž„ํ”„๋ ˆ์ž„ ์ „ํ™˜์  ๋ถ„์„ transition_analysis = await multi_timeframe_analyzer.analyze_timeframe_transitions( sample_ohlcv_data, from_timeframe="5m", to_timeframe="15m" ) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert "transition_points" in transition_analysis assert "signal_quality" in transition_analysis assert "confirmation_rate" in transition_analysis # ์ „ํ™˜์  ๊ฒ€์ฆ transitions = transition_analysis["transition_points"] for point in transitions: assert "index_5m" in point assert "index_15m" in point assert "signal_type" in point assert "confirmed" in point assert isinstance(point["confirmed"], bool) @pytest.mark.asyncio async def test_volatility_across_timeframes(self, multi_timeframe_analyzer, sample_ohlcv_data): """ํƒ€์ž„ํ”„๋ ˆ์ž„๋ณ„ ๋ณ€๋™์„ฑ ๋ถ„์„ ํ…Œ์ŠคํŠธ""" # ๋ณ€๋™์„ฑ ๋ถ„์„ volatility_analysis = await multi_timeframe_analyzer.analyze_volatility_profile( sample_ohlcv_data, timeframes=["5m", "15m", "1h"], volatility_window=20 ) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert "volatility_by_timeframe" in volatility_analysis assert "volatility_ratio" in volatility_analysis assert "expansion_contraction" in volatility_analysis # ํƒ€์ž„ํ”„๋ ˆ์ž„๋ณ„ ๋ณ€๋™์„ฑ vol_by_tf = volatility_analysis["volatility_by_timeframe"] for tf in ["5m", "15m", "1h"]: assert tf in vol_by_tf assert "average" in vol_by_tf[tf] assert "current" in vol_by_tf[tf] assert "percentile" in vol_by_tf[tf] assert 0 <= vol_by_tf[tf]["percentile"] <= 100 @pytest.mark.asyncio async def test_entry_exit_optimization(self, multi_timeframe_analyzer, sample_ohlcv_data): """์ง„์ž…/์ฒญ์‚ฐ ์ตœ์ ํ™” ํ…Œ์ŠคํŠธ""" # ์ง„์ž…/์ฒญ์‚ฐ ํฌ์ธํŠธ ์ตœ์ ํ™” optimization_results = await multi_timeframe_analyzer.optimize_entry_exit_points( sample_ohlcv_data, primary_timeframe="15m", confirmation_timeframes=["5m", "1h"], strategy_type="momentum" ) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert "entry_points" in optimization_results assert "exit_points" in optimization_results assert "risk_reward_ratio" in optimization_results assert "win_rate_estimate" in optimization_results # ์ง„์ž…์  ๊ฒ€์ฆ entry_points = optimization_results["entry_points"] for entry in entry_points: assert "index" in entry assert "price" in entry assert "confidence" in entry assert "confirmations" in entry assert len(entry["confirmations"]) > 0 @pytest.mark.asyncio async def test_data_synchronization(self, multi_timeframe_analyzer): """๋ฐ์ดํ„ฐ ๋™๊ธฐํ™” ํ…Œ์ŠคํŠธ""" # ๋น„๋™๊ธฐ ๋ฐ์ดํ„ฐ ์ƒ์„ฑ data_1m = [] data_5m = [] base_time = datetime(2024, 1, 1, 0, 0, 0) # 1๋ถ„ ๋ฐ์ดํ„ฐ (์ •ํ™•ํ•œ ์‹œ๊ฐ„) for i in range(60): data_1m.append({ "timestamp": (base_time + timedelta(minutes=i)).isoformat(), "close": 50000 + i * 10 }) # 5๋ถ„ ๋ฐ์ดํ„ฐ (์•ฝ๊ฐ„ ์–ด๊ธ‹๋‚œ ์‹œ๊ฐ„) for i in range(12): # 2์ดˆ ๋Šฆ์€ ํƒ€์ž„์Šคํƒฌํ”„ timestamp = base_time + timedelta(minutes=i*5, seconds=2) data_5m.append({ "timestamp": timestamp.isoformat(), "close": 50000 + i * 50 }) # ๋ฐ์ดํ„ฐ ๋™๊ธฐํ™” synced_data = await multi_timeframe_analyzer.synchronize_timeframe_data({ "1m": data_1m, "5m": data_5m }) # ๊ฒฐ๊ณผ ๊ฒ€์ฆ assert "synchronized_timestamps" in synced_data assert "aligned_data" in synced_data assert "alignment_quality" in synced_data # ์ •๋ ฌ ํ’ˆ์งˆ ํ™•์ธ assert synced_data["alignment_quality"] > 0.8 @pytest.mark.asyncio async def test_error_handling(self, multi_timeframe_analyzer): """์˜ค๋ฅ˜ ์ฒ˜๋ฆฌ ํ…Œ์ŠคํŠธ""" # ์ž˜๋ชป๋œ ํƒ€์ž„ํ”„๋ ˆ์ž„ with pytest.raises(TimeframeError): await multi_timeframe_analyzer.aggregate_to_timeframe( [], target_timeframe="invalid" ) # ๋ฐ์ดํ„ฐ ๋ถ€์กฑ with pytest.raises(InsufficientDataError): await multi_timeframe_analyzer.analyze_timeframes( [{"timestamp": "2024-01-01", "close": 100}], # 1๊ฐœ ๋ฐ์ดํ„ฐ timeframes=["1h", "4h"] ) # ๋ถ„์„ ์˜ค๋ฅ˜ with pytest.raises(AnalysisError): await multi_timeframe_analyzer.detect_trend_alignment( None, # None ๋ฐ์ดํ„ฐ timeframes=["5m"] ) @pytest.mark.asyncio async def test_performance_metrics(self, multi_timeframe_analyzer, sample_ohlcv_data): """์„ฑ๋Šฅ ๋ฉ”ํŠธ๋ฆญ ํ…Œ์ŠคํŠธ""" # ์—ฌ๋Ÿฌ ๋ถ„์„ ์ˆ˜ํ–‰ await multi_timeframe_analyzer.analyze_timeframes( sample_ohlcv_data, timeframes=["5m", "15m"] ) await multi_timeframe_analyzer.detect_trend_alignment( sample_ohlcv_data, timeframes=["5m", "15m"] ) # ์„ฑ๋Šฅ ๋ฉ”ํŠธ๋ฆญ ํ™•์ธ metrics = multi_timeframe_analyzer.get_performance_metrics() assert "analysis_count" in metrics assert "average_processing_time" in metrics assert "cache_hit_rate" in metrics assert metrics["analysis_count"] > 0 assert metrics["average_processing_time"] > 0 assert 0 <= metrics["cache_hit_rate"] <= 1

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/whdghk1907/mcp-market-statistics'

If you have feedback or need assistance with the MCP directory API, please join our Discord server