Skip to main content
Glama

CodeGraph CLI MCP Server

by Jakedismo
Cargo.toml2.56 kB
[package] name = "codegraph-api" version.workspace = true edition.workspace = true authors.workspace = true license.workspace = true description = "REST API server for CodeGraph using Axum" [dependencies] codegraph-core = { workspace = true } codegraph-graph = { workspace = true } codegraph-parser = { workspace = true } codegraph-vector = { workspace = true } memscope-rs = { workspace = true, optional = true } axum = { workspace = true } tower = { workspace = true } # Enable compression features for response compression tower-http = { workspace = true, features = ["compression-br", "compression-gzip", "compression-deflate", "compression-zstd", "trace", "cors", "set-header"] } hyper = { workspace = true } http = { workspace = true } tokio = { workspace = true } tokio-util = { workspace = true } reqwest = { workspace = true } url = { workspace = true } parking_lot = { workspace = true } dashmap = { workspace = true } async-trait = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } uuid = { workspace = true } chrono = { workspace = true } lazy_static = { workspace = true } utoipa = { workspace = true } utoipa-swagger-ui = { workspace = true, optional = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } anyhow = { workspace = true } thiserror = { workspace = true } sysinfo = { workspace = true } sha2 = { workspace = true } # GraphQL async-graphql = { workspace = true, features = ["chrono", "dataloader"] } async-graphql-axum = { workspace = true } async-stream = "0.3.6" # Streaming responses for large datasets axum-streams = { version = "0.23.1", features = ["json", "csv", "text"] } futures = { workspace = true } zstd = { workspace = true } # Auth jsonwebtoken = "9.3.1" governor = { workspace = true, features = ["std"] } prometheus = { workspace = true } [dev-dependencies] tokio-test = { workspace = true } hyper = { workspace = true } criterion = { workspace = true } axum-test = "18.1" async-trait = { workspace = true } tempfile = { workspace = true } [features] # Enable runtime memory leak detection, stack traces, and Prometheus metrics. # This pulls in a global tracking allocator from memscope-rs and adds routes # for memory stats and leak report exports. leak-detect = ["dep:memscope-rs"] openapi-ui = ["dep:utoipa-swagger-ui"] minimal = [] graphql = [] http2 = [] [[bench]] name = "api_benchmarks" harness = false path = "src/graphql/benchmarks.rs" [[bench]] name = "streaming_benchmarks" harness = false path = "benches/streaming_benchmarks.rs"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Jakedismo/codegraph-rust'

If you have feedback or need assistance with the MCP directory API, please join our Discord server