Skip to main content
Glama
Cargo.toml1.48 kB
# # TODO(fnichol): keep this file until we decide to extract this crate into its # own open source project or port the Cargo features # [package] name = "config-file" version.workspace = true authors.workspace = true license.workspace = true repository.workspace = true edition.workspace = true rust-version.workspace = true publish.workspace = true [features] default = [] json = [] toml = [] yaml = [] load-str = ["serde"] load-json = ["json", "serde_json", "load-str"] load-toml = ["toml", "serde_toml", "load-str"] load-yaml = ["yaml", "serde_yaml", "load-str"] load-sync = ["load-str"] load-async = ["load-str", "tokio"] layered = ["serde", "config"] layered-json = ["layered", "json", "config/json"] layered-toml = ["layered", "toml", "config/toml"] layered-yaml = ["layered", "yaml", "config/yaml"] [dependencies] async-trait = { workspace = true } config = { workspace = true, optional = true } directories = { workspace = true } pathdiff = { workspace = true } remain = { workspace = true } serde = { workspace = true, optional = true } serde_json = { workspace = true, optional = true } # FIXME(nick): we should move directly onto the "toml" crate and use the workspace's version. serde_toml = { package = "toml", version = "0.8.12", optional = true } serde_yaml = { workspace = true, optional = true } thiserror = { workspace = true } tokio = { workspace = true, optional = true } tracing = { workspace = true } [dev-dependencies] serde = { workspace = true }

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/systeminit/si'

If you have feedback or need assistance with the MCP directory API, please join our Discord server