Skip to main content
Glama

CodeGraph CLI MCP Server

by Jakedismo
Cargo.toml3.6 kB
[package] name = "codegraph-mcp" version.workspace = true edition.workspace = true authors.workspace = true license.workspace = true description = "Model Context Protocol implementation for CodeGraph" [dependencies] tokio = { workspace = true } tokio-tungstenite = { workspace = true } futures = { workspace = true } async-trait = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } serde_yaml = "0.9" thiserror = { workspace = true } anyhow = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } tracing-appender = "0.2" uuid = { workspace = true } chrono = { workspace = true } url = { workspace = true } dashmap = { workspace = true } parking_lot = { workspace = true } fastrand = { workspace = true } reqwest = { workspace = true } sha2 = { workspace = true } toml = { workspace = true } rmcp = { workspace = true } schemars = { workspace = true } lru = { workspace = true } atty = { workspace = true } rustc-demangle = "0.1" symbolic-demangle = "12" syn = { version = "2", features = ["parsing"] } # CLI dependencies clap = { workspace = true } colored = { workspace = true } indicatif = { workspace = true } dirs = "6.0" dotenv = "0.15" nix = { version = "0.30.1", features = ["process", "signal"] } notify = "8.2.0" walkdir = "2.5" regex = { workspace = true } rayon = { workspace = true } num_cpus = { workspace = true } # Pattern matching for daemon mode glob-match = "0.2" # Internal dependencies codegraph-core = { workspace = true } codegraph-parser = { workspace = true } codegraph-graph = { workspace = true, features = ["surrealdb"] } codegraph-vector = { workspace = true, optional = true, default-features = false } codegraph-ai = { workspace = true, optional = true } # Re-enabled for AI-powered symbol resolution codegraph-mcp-autoagents = { path = "../codegraph-mcp-autoagents", optional = true } semchunk-rs = { workspace = true } # Semantic chunking for long code nodes tokenizers = { workspace = true } # Qwen2.5-Coder tokenizer for accurate token counting # codegraph-api not used directly here; avoid pulling heavy deps ## core-rag-mcp-server intentionally not linked to keep binary lean # AutoAgents framework for agentic workflows autoagents = { git = "https://github.com/liquidos-ai/AutoAgents", optional = true } autoagents-derive = { git = "https://github.com/liquidos-ai/AutoAgents", optional = true } codegraph-mcp-core = { path = "../codegraph-mcp-core" } codegraph-mcp-tools = { path = "../codegraph-mcp-tools", optional = true } unicode-bom = "2.0.3" [dev-dependencies] tempfile = { workspace = true } anyhow = { workspace = true } serial_test = "3.2" # rmcp already available through main dependencies for testing codegraph-vector = { workspace = true, features = ["ollama"] } [features] default = [] daemon = ["codegraph-parser/watcher-experimental"] embeddings = ["dep:codegraph-vector", "dep:codegraph-mcp-tools"] embeddings-local = ["embeddings", "codegraph-vector/local-embeddings"] embeddings-openai = ["embeddings", "codegraph-vector/openai"] embeddings-ollama = ["embeddings", "codegraph-vector/ollama"] embeddings-jina = ["embeddings", "codegraph-vector/jina"] embeddings-lmstudio = ["embeddings", "codegraph-vector/lmstudio"] cloud = ["embeddings-jina", "codegraph-graph/surrealdb"] server-http = [] ai-enhanced = ["dep:codegraph-ai", "embeddings", "autoagents-experimental", "codegraph-ai/openai-compatible"] autoagents-experimental = ["dep:autoagents", "dep:autoagents-derive"] autoagents-lats = ["codegraph-mcp-autoagents/autoagents-lats", "autoagents-experimental"]

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Jakedismo/codegraph-rust'

If you have feedback or need assistance with the MCP directory API, please join our Discord server