Skip to main content
Glama

Convex MCP server

Official
by get-convex
Cargo.toml2.05 kB
[package] name = "value" description = "Serverside value types supported by Convex (convex.dev)" version = "0.1.0" authors = ["Convex, Inc. <no-reply@convex.dev>"] edition = "2021" license = "LicenseRef-FSL-1.1-Apache-2.0" [lib] doctest = false [features] testing = [ "dep:byteorder", "errors/testing", "metrics/testing", "proptest", "proptest-derive", "sync_types/testing", ] [dependencies] anyhow = { workspace = true } base-62 = { workspace = true } base64 = { workspace = true } byteorder = { workspace = true, optional = true } bytes = { workspace = true } compact_str = { workspace = true } derive_more = { workspace = true } errors = { workspace = true } hex = { workspace = true } humansize = { workspace = true } imbl = { workspace = true } metrics = { workspace = true } paste = { workspace = true } proptest = { workspace = true, optional = true } proptest-derive = { workspace = true, optional = true } serde = { workspace = true } serde_json = { workspace = true } sha2 = { workspace = true } sync_types = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true } uuid = { workspace = true } [target.'cfg(not(target_os="windows"))'.dependencies] sha2 = { workspace = true, features = ["asm"] } [dev-dependencies] # We only use `base32` in tests for checking that our custom implementation matches. We diverge from # `base32` by using lowercase characters and not being permissive while decoding. base32 = { workspace = true } byteorder = { workspace = true } cmd_util = { workspace = true } convex = { workspace = true, features = ["testing"] } criterion = { workspace = true } errors = { workspace = true, features = ["testing"] } metrics = { workspace = true, features = ["testing"] } proptest = { workspace = true } proptest-derive = { workspace = true } sync_types = { workspace = true, features = [ "testing", ] } [[bench]] name = "document_id" harness = false [[bench]] name = "json" harness = false [[bench]] name = "base32" harness = false [lints] workspace = true

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/get-convex/convex-backend'

If you have feedback or need assistance with the MCP directory API, please join our Discord server