Skip to main content
Glama
layer-cache-inserts.json9.86 kB
{ "id": null, "uid": "layer-cache-inserts", "title": "Layer Cache Inserts", "tags": [ "layercache", "cache", "inserts", "metrics" ], "timezone": "browser", "schemaVersion": 39, "version": 0, "refresh": "5s", "annotations": { "list": [ { "builtIn": 1, "datasource": { "type": "grafana", "uid": "-- Grafana --" }, "enable": true, "hide": true, "iconColor": "rgba(0, 211, 255, 1)", "name": "Annotations & Alerts", "type": "dashboard" } ] }, "templating": { "list": [ { "name": "datasource", "type": "datasource", "label": "Data Source", "query": "prometheus", "refresh": 1, "hide": 0 }, { "name": "service", "type": "query", "label": "Service", "datasource": { "type": "prometheus", "uid": "${datasource}" }, "query": "label_values(layer_cache_inserts_total, exported_job)", "refresh": 2, "sort": 1, "multi": true, "includeAll": true, "allValue": ".*" }, { "name": "cache", "type": "query", "label": "Cache", "datasource": { "type": "prometheus", "uid": "${datasource}" }, "query": "label_values(layer_cache_inserts_total, cache_name)", "refresh": 2, "sort": 1, "multi": true, "includeAll": true } ] }, "panels": [ { "id": 1, "type": "piechart", "title": "Data Volume by Cache (1h)", "interval": "1m", "gridPos": { "x": 0, "y": 0, "w": 24, "h": 8 }, "datasource": { "type": "prometheus", "uid": "${datasource}" }, "targets": [ { "datasource": { "type": "prometheus", "uid": "${datasource}" }, "expr": "sum(increase(layer_cache_insert_bytes_total{exported_job=~\"${service}\"}[1h])) by (cache_name)", "legendFormat": "{{cache_name}}", "refId": "A", "description": "Total bytes inserted into each cache over the last hour" } ], "fieldConfig": { "defaults": { "unit": "bytes" } }, "options": { "legend": { "displayMode": "table", "placement": "right", "calcs": [ "lastNotNull" ], "values": [ "value", "percent" ] }, "pieType": "pie", "tooltip": { "mode": "single" } } }, { "id": 2, "type": "timeseries", "title": "Insert Rate by Cache", "interval": "1m", "gridPos": { "x": 0, "y": 8, "w": 12, "h": 8 }, "datasource": { "type": "prometheus", "uid": "${datasource}" }, "targets": [ { "datasource": { "type": "prometheus", "uid": "${datasource}" }, "expr": "sum(rate(layer_cache_inserts_total{exported_job=~\"${service}\"}[$__interval])) by (cache_name)", "legendFormat": "{{cache_name}}", "refId": "A", "description": "Rate of cache insert operations per second (5m average)" } ], "fieldConfig": { "defaults": { "unit": "ops", "color": { "mode": "palette-classic" }, "custom": { "lineWidth": 2, "fillOpacity": 10, "showPoints": "never" } } }, "options": { "tooltip": { "mode": "multi", "sort": "desc" }, "legend": { "displayMode": "table", "placement": "bottom", "calcs": [ "lastNotNull", "max", "mean" ] } } }, { "id": 3, "type": "timeseries", "title": "Byte Throughput by Cache", "interval": "1m", "gridPos": { "x": 12, "y": 8, "w": 12, "h": 8 }, "datasource": { "type": "prometheus", "uid": "${datasource}" }, "targets": [ { "datasource": { "type": "prometheus", "uid": "${datasource}" }, "expr": "sum(rate(layer_cache_insert_bytes_total{exported_job=~\"${service}\"}[$__interval])) by (cache_name)", "legendFormat": "{{cache_name}}", "refId": "A", "description": "Rate of bytes being inserted into each cache per second (5m average)" } ], "fieldConfig": { "defaults": { "unit": "Bps", "color": { "mode": "palette-classic" }, "custom": { "lineWidth": 2, "fillOpacity": 10, "showPoints": "never" } } }, "options": { "tooltip": { "mode": "multi", "sort": "desc" }, "legend": { "displayMode": "table", "placement": "bottom", "calcs": [ "lastNotNull", "max", "mean" ] } } }, { "id": 4, "type": "timeseries", "title": "${cache} - Insert Size Distribution", "repeat": "cache", "repeatDirection": "v", "interval": "1m", "gridPos": { "x": 0, "y": 16, "w": 24, "h": 8 }, "datasource": { "type": "prometheus", "uid": "${datasource}" }, "targets": [ { "datasource": { "type": "prometheus", "uid": "${datasource}" }, "expr": "histogram_quantile(0.50, sum(rate(layer_cache_insert_size_bytes_bucket{exported_job=~\"${service}\", cache_name=\"${cache}\"}[$__interval])) by (le))", "legendFormat": "p50", "refId": "A", "description": "50th percentile (median) insert size - half of inserts are smaller than this" }, { "datasource": { "type": "prometheus", "uid": "${datasource}" }, "expr": "histogram_quantile(0.95, sum(rate(layer_cache_insert_size_bytes_bucket{exported_job=~\"${service}\", cache_name=\"${cache}\"}[$__interval])) by (le))", "legendFormat": "p95", "refId": "B", "description": "95th percentile insert size - 95% of inserts are smaller than this" }, { "datasource": { "type": "prometheus", "uid": "${datasource}" }, "expr": "histogram_quantile(0.99, sum(rate(layer_cache_insert_size_bytes_bucket{exported_job=~\"${service}\", cache_name=\"${cache}\"}[$__interval])) by (le))", "legendFormat": "p99", "refId": "C", "description": "99th percentile insert size - 99% of inserts are smaller than this" }, { "datasource": { "type": "prometheus", "uid": "${datasource}" }, "expr": "histogram_quantile(1.0, sum(rate(layer_cache_insert_size_bytes_bucket{exported_job=~\"${service}\", cache_name=\"${cache}\"}[$__interval])) by (le))", "legendFormat": "max", "refId": "D", "description": "Maximum insert size observed in the time window" } ], "fieldConfig": { "defaults": { "unit": "bytes", "color": { "mode": "palette-classic" }, "custom": { "lineWidth": 2, "fillOpacity": 10, "showPoints": "never" } }, "overrides": [ { "matcher": { "id": "byName", "options": "p50" }, "properties": [ { "id": "color", "value": { "mode": "fixed", "fixedColor": "green" } } ] }, { "matcher": { "id": "byName", "options": "p95" }, "properties": [ { "id": "color", "value": { "mode": "fixed", "fixedColor": "yellow" } } ] }, { "matcher": { "id": "byName", "options": "p99" }, "properties": [ { "id": "color", "value": { "mode": "fixed", "fixedColor": "orange" } } ] }, { "matcher": { "id": "byName", "options": "max" }, "properties": [ { "id": "color", "value": { "mode": "fixed", "fixedColor": "red" } } ] } ] }, "options": { "tooltip": { "mode": "multi", "sort": "desc" }, "legend": { "displayMode": "table", "placement": "bottom", "calcs": [ "lastNotNull", "max", "mean" ] } } } ], "time": { "from": "now-15m", "to": "now" }, "timepicker": { "refresh_intervals": [ "5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d" ] } }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/systeminit/si'

If you have feedback or need assistance with the MCP directory API, please join our Discord server