Skip to main content
Glama

CodeGraph CLI MCP Server

by Jakedismo
Cargo.toml2.63 kB
[package] name = "codegraph-mcp" version.workspace = true edition.workspace = true authors.workspace = true license.workspace = true description = "Model Context Protocol implementation for CodeGraph" [dependencies] tokio = { workspace = true } tokio-tungstenite = { workspace = true } futures = { workspace = true } async-trait = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } thiserror = { workspace = true } anyhow = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } uuid = { workspace = true } chrono = { workspace = true } url = { workspace = true } dashmap = { workspace = true } parking_lot = { workspace = true } tokio-util = { workspace = true } fastrand = { workspace = true } reqwest = { workspace = true } sha2 = { workspace = true } toml = { workspace = true } rmcp = { workspace = true } schemars = { workspace = true } once_cell = { workspace = true } lru = { workspace = true } atty = { workspace = true } # CLI dependencies clap = { workspace = true } colored = { workspace = true } indicatif = { workspace = true } which = "8.0" nix = { version = "0.30.1", features = ["process", "signal"] } notify = "8.2.0" walkdir = "2.5" regex = { workspace = true } rayon = { workspace = true } num_cpus = { workspace = true } # Internal dependencies codegraph-core = { workspace = true } codegraph-parser = { workspace = true } codegraph-graph = { workspace = true } codegraph-vector = { workspace = true, optional = true, default-features = false } codegraph-ai = { workspace = true, optional = true } # Re-enabled for AI-powered symbol resolution faiss = { workspace = true, optional = true } # codegraph-api not used directly here; avoid pulling heavy deps ## core-rag-mcp-server intentionally not linked to keep binary lean axum = { workspace = true, optional = true } hyper = { workspace = true, optional = true } [dev-dependencies] tokio-test = { workspace = true } tempfile = { workspace = true } anyhow = { workspace = true } # rmcp already available through main dependencies for testing [[bin]] name = "codegraph" path = "src/bin/codegraph.rs" [[bin]] name = "codegraph-official" path = "src/bin/codegraph-official.rs" [features] default = [] faiss = ["dep:faiss"] embeddings = ["dep:codegraph-vector"] embeddings-local = ["embeddings", "codegraph-vector/local-embeddings"] embeddings-openai = ["embeddings", "codegraph-vector/openai"] embeddings-ollama = ["embeddings", "codegraph-vector/ollama"] server-http = ["dep:axum", "dep:hyper"] qwen-integration = [] ai-enhanced = ["dep:codegraph-ai", "faiss", "embeddings"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Jakedismo/codegraph-rust'

If you have feedback or need assistance with the MCP directory API, please join our Discord server