We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/mix0z/Semantic-Search-MCP'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
{
"timestamp": "20251209_210022",
"stability_mode": true,
"searchers": {
"sgr_gemini_flash_lite": {
"output_file": "eval_results/sgr_gemini_flash_lite_stability_20251209_204726.json",
"num_queries": 26,
"num_runs_per_query": 10,
"metrics": {
"precision": {
"mean": 0.2955769230769231,
"std": 0.3751947471521933,
"cv": 1.2693641413086565
},
"recall": {
"mean": 0.308974358974359,
"std": 0.4095859906792269,
"cv": 1.3256310071775808
},
"f1": {
"mean": 0.2901098901098902,
"std": 0.3777935824871397,
"cv": 1.3022430305427917
},
"success_rate": {
"mean": 0.39999999999999997,
"std": 0.4551922670696418,
"cv": 1.1379806676741044
},
"file_discovery_rate": {
"mean": 0.6144230769230768,
"std": 0.40297199856060917,
"cv": 0.655854269957799
},
"substring_coverage": {
"mean": 0.3531774475524475,
"std": 0.3925991324232528,
"cv": 1.1116200514613863
}
},
"stability": {
"avg_stability_score": 0.7394817323396679,
"stable_queries_count": 16
},
"latency": {
"mean_ms": 20625.602037173052,
"std_ms": 7940.306363731624,
"cv": 0.38497331372053967
}
}
},
"ranking": [
"sgr_gemini_flash_lite"
]
}