Skip to main content
Glama

Raccoon AI MCP Server

Official
by raccoonaihq
pyproject.toml604 B
[project] name = "raccoonai-mcp-server" version = "0.0.1" description = "A MCP server for Raccoon AI LAM API" readme = "README.md" requires-python = ">=3.10" authors = [{ name = "Raccoon AI" }] maintainers = [{ name = "scorchy38", email = "shubh@flyingraccoon.tech" }] classifiers = [ "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", ] dependencies = [ "mcp[cli]>=1.3.0", "raccoonai>=0.1.0a10", ] [build-system] requires = ["hatchling"] build-backend = "hatchling.build"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/raccoonaihq/raccoonai-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server