Skip to main content
Glama

CodeGraph CLI MCP Server

by Jakedismo
Cargo.toml2.96 kB
[package] name = "codegraph-mcp-server" version = "0.1.0" edition = "2021" authors = ["Solita CodeGraph"] license = "Apache-2.0" [lib] path = "src/lib.rs" [features] default = ["daemon"] ai-enhanced = [ "codegraph-mcp-autoagents", "codegraph-ai", "codegraph-mcp-autoagents/autoagents-experimental", "codegraph-ai/all-cloud-providers", "codegraph-mcp/ai-enhanced" ] server-http = ["dep:axum", "dep:hyper", "dep:tower", "dep:http-body-util"] autoagents-experimental = ["codegraph-mcp-autoagents/autoagents-experimental", "codegraph-mcp/autoagents-experimental"] autoagents-lats = ["codegraph-mcp-autoagents/autoagents-lats", "codegraph-mcp/autoagents-lats"] all-agents = ["autoagents-experimental", "autoagents-lats"] embeddings = ["codegraph-mcp-tools/embeddings", "codegraph-mcp/embeddings"] embeddings-ollama = ["embeddings", "codegraph-mcp/embeddings-ollama"] embeddings-openai = ["embeddings", "codegraph-mcp/embeddings-openai"] embeddings-jina = ["embeddings", "codegraph-mcp/embeddings-jina"] embeddings-lmstudio = ["embeddings", "codegraph-mcp/embeddings-lmstudio"] embeddings-local = ["embeddings", "codegraph-mcp/embeddings-local"] all-embeddings = [ "embeddings-ollama", "embeddings-openai", "embeddings-jina", "embeddings-lmstudio", "embeddings-local" ] daemon = ["dep:codegraph-mcp-daemon"] # Full features for cargo install --features full full = [ "daemon", "ai-enhanced", "server-http", "all-agents", "all-embeddings" ] [dependencies] codegraph-mcp-core = { path = "../codegraph-mcp-core" } codegraph-mcp-tools = { path = "../codegraph-mcp-tools" } codegraph-mcp-autoagents = { path = "../codegraph-mcp-autoagents", optional = true } codegraph-mcp-daemon = { path = "../codegraph-mcp-daemon", optional = true } codegraph-core = { path = "../codegraph-core" } codegraph-graph = { path = "../codegraph-graph" } codegraph-vector = { path = "../codegraph-vector" } codegraph-ai = { path = "../codegraph-ai", optional = true } rmcp = { workspace = true } futures = { workspace = true } schemars = { workspace = true } uuid = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } tracing = { workspace = true } tokio = { workspace = true } clap = { workspace = true } thiserror = { workspace = true } async-trait = { workspace = true } axum = { workspace = true, optional = true } hyper = { workspace = true, optional = true } tower = { workspace = true, optional = true } http-body-util = { version = "0.1", optional = true } colored = { workspace = true } indicatif = { workspace = true } atty = { workspace = true } chrono = { workspace = true } tracing-subscriber = { workspace = true } serde_yaml = "0.9" anyhow = { workspace = true } codegraph-mcp = { path = "../codegraph-mcp" } tracing-appender = { workspace = true } dirs = "6.0.0" dotenv = "0.15" nix = { version = "0.30.1", features = ["process", "signal"] } [dev-dependencies] serial_test = "3.2"

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Jakedismo/codegraph-rust'

If you have feedback or need assistance with the MCP directory API, please join our Discord server