We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/sgx-labs/statelessagent'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
config-openai-compatible.toml•433 B
# SAME configuration for OpenAI-compatible servers
# Works with: LM Studio, vLLM, llama.cpp, OpenRouter
[embedding]
provider = "openai-compatible"
base_url = "http://localhost:1234/v1" # LM Studio default
model = "nomic-embed-text-v1.5"
# api_key = "" # optional, depends on server
# For OpenRouter:
# base_url = "https://openrouter.ai/api/v1"
# model = "nomic-ai/nomic-embed-text-v1.5"
# api_key from OPENAI_API_KEY env var