Skip to main content
Glama

CodeGraph CLI MCP Server

by Jakedismo
Cargo.toml2.53 kB
[package] name = "codegraph-vector" version.workspace = true edition.workspace = true authors.workspace = true license.workspace = true description = "Vector storage and similarity search using FAISS for CodeGraph" [dependencies] codegraph-core = { workspace = true } codegraph-cache = { path = "../codegraph-cache", optional = true } faiss = { workspace = true, optional = true } ndarray = { workspace = true, optional = true } serde = { workspace = true } serde_json = { workspace = true } bincode = { workspace = true } thiserror = { workspace = true } anyhow = { workspace = true } tracing = { workspace = true } async-trait = { workspace = true } tokio = { workspace = true } parking_lot = { workspace = true } uuid = { workspace = true } memmap2 = { workspace = true, optional = true } flate2 = { workspace = true, optional = true } dashmap = { workspace = true } rayon = { workspace = true } crossbeam-channel = { workspace = true } chrono = { workspace = true } futures = { workspace = true } num_cpus = { workspace = true } # Local embeddings (Candle) - optional via feature candle-core = { version = "0.9.1", optional = true, default-features = false, features = ["metal"] } candle-nn = { version = "0.9.1", optional = true, default-features = false } candle-transformers = { version = "0.9.1", optional = true, default-features = false } tokenizers = { version = "0.22", optional = true } hf-hub = { version = "0.4", optional = true, default-features = true, features = ["tokio"] } lru = { workspace = true } # OpenAI API client reqwest = { workspace = true, optional = true } # ONNX Runtime provider (optional) ort = { version = "2.0.0-rc.10", optional = true, default-features = false, features = ["std", "ndarray", "download-binaries"] } [dev-dependencies] tokio-test = { workspace = true } tempfile = { workspace = true } approx = { workspace = true } criterion = { workspace = true } fastrand = { workspace = true } [[bench]] name = "knn_benchmark" harness = false [features] # Minimal defaults; opt-in to FAISS or providers from dependents default = [] cache = ["dep:codegraph-cache"] faiss = ["dep:faiss"] persistent = ["dep:memmap2", "dep:flate2"] gpu = ["faiss/gpu"] openai = ["dep:reqwest"] local-embeddings = [ "dep:candle-core", "dep:candle-nn", "dep:candle-transformers", "dep:tokenizers", "dep:hf-hub", ] onnx = ["dep:ort", "dep:tokenizers", "dep:hf-hub", "dep:ndarray"] onnx-coreml = ["onnx"] ollama = ["dep:reqwest"] [[example]] name = "rag_demo" path = "examples/rag_demo.rs"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Jakedismo/codegraph-rust'

If you have feedback or need assistance with the MCP directory API, please join our Discord server