Skip to main content
Glama
agarwalvivek29

OpenTelemetry MCP Server

prometheus_tools.py5.7 kB
"""Prometheus MCP tools.""" import logging from typing import Any, Dict, Optional from ..backends.prometheus import PrometheusClient from ..utils.time_helpers import ( parse_time_range, to_prometheus_time, parse_time ) logger = logging.getLogger(__name__) async def query_prometheus( client: PrometheusClient, query: str, time: Optional[str] = None ) -> Dict[str, Any]: """ Execute raw PromQL instant query. Args: client: Prometheus client query: PromQL query string time: Optional evaluation timestamp Returns: Query results """ try: # Parse time if provided time_value = None if time: parsed_time = parse_time(time) if parsed_time: time_value = to_prometheus_time(parsed_time) result = await client.query(query, time_value) return { "success": True, "query": query, "result": result } except Exception as e: logger.error(f"Error executing Prometheus query: {e}") return { "success": False, "error": str(e), "query": query } async def query_prometheus_range( client: PrometheusClient, query: str, start: Optional[str] = None, end: Optional[str] = None, step: str = "15s" ) -> Dict[str, Any]: """ Execute PromQL range query. Args: client: Prometheus client query: PromQL query string start: Start time (relative like '1h' or absolute) end: End time (relative like 'now' or absolute) step: Query resolution step Returns: Query results with time series data """ try: # Parse time range start_dt, end_dt = parse_time_range(start, end) start_ts = to_prometheus_time(start_dt) end_ts = to_prometheus_time(end_dt) result = await client.query_range(query, start_ts, end_ts, step) return { "success": True, "query": query, "start": start or "auto", "end": end or "now", "step": step, "result": result } except Exception as e: logger.error(f"Error executing Prometheus range query: {e}") return { "success": False, "error": str(e), "query": query } async def list_metrics( client: PrometheusClient, prefix: Optional[str] = None ) -> Dict[str, Any]: """ List all available metrics in Prometheus. Args: client: Prometheus client prefix: Optional prefix to filter metrics Returns: List of metric names """ try: # Query __name__ label to get all metrics result = await client.label_values("__name__") if result.get("status") == "success": metrics = result.get("data", []) # Filter by prefix if provided if prefix: metrics = [m for m in metrics if m.startswith(prefix)] return { "success": True, "count": len(metrics), "metrics": metrics[:1000] # Limit to prevent huge responses } else: return { "success": False, "error": "Failed to fetch metrics" } except Exception as e: logger.error(f"Error listing metrics: {e}") return { "success": False, "error": str(e) } async def list_label_values( client: PrometheusClient, label: str, metric: Optional[str] = None ) -> Dict[str, Any]: """ Get all values for a specific label. Args: client: Prometheus client label: Label name (e.g., 'service', 'job', 'namespace') metric: Optional metric to filter label values Returns: List of label values """ try: match = [f"{{{metric}}}"] if metric else None result = await client.label_values(label, match) if result.get("status") == "success": values = result.get("data", []) return { "success": True, "label": label, "count": len(values), "values": values } else: return { "success": False, "error": "Failed to fetch label values" } except Exception as e: logger.error(f"Error listing label values: {e}") return { "success": False, "error": str(e) } async def list_labels( client: PrometheusClient, metric: Optional[str] = None ) -> Dict[str, Any]: """ Get all label names in Prometheus. Args: client: Prometheus client metric: Optional metric to get labels for Returns: List of label names """ try: match = [f"{{{metric}}}"] if metric else None result = await client.labels(match) if result.get("status") == "success": labels = result.get("data", []) return { "success": True, "count": len(labels), "labels": labels } else: return { "success": False, "error": "Failed to fetch labels" } except Exception as e: logger.error(f"Error listing labels: {e}") return { "success": False, "error": str(e) }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/agarwalvivek29/opentelemetry-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server