We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/get-convex/convex-backend'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
[package]
name = "load_generator"
version = "0.1.0"
authors = ["Convex, Inc. <no-reply@convex.dev>"]
edition = "2024"
license = "LicenseRef-FSL-1.1-Apache-2.0"
[[bin]]
name = "load-generator"
path = "src/main.rs"
[dependencies]
anyhow = { workspace = true }
axum = { workspace = true }
backend_harness = { workspace = true }
clap = { workspace = true }
cmd_util = { workspace = true }
common = { workspace = true }
convex = { workspace = true }
convex_sync_types = { workspace = true }
errors = { workspace = true }
futures = { workspace = true }
health_check = { workspace = true }
log_interleaver = { workspace = true }
maplit = { workspace = true }
metrics = { workspace = true }
performance_stats = { workspace = true }
prometheus = { workspace = true }
rand = { workspace = true }
runtime = { workspace = true }
sentry = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
strum = { workspace = true }
tokio = { workspace = true }
tokio-tungstenite = { workspace = true, features = ["native-tls"] }
tracing = { workspace = true }
tungstenite = { workspace = true }
[lints]
workspace = true