Skip to main content
Glama

Fal.ai MCP Server

by raveenb
pyproject.toml2.01 kB
[project] name = "fal-mcp-server" version = "0.3.0" description = "MCP server for Fal.ai - Generate images, videos, music and audio with AI models" readme = "README.md" requires-python = ">=3.10" license = {text = "MIT"} keywords = ["mcp", "fal", "ai", "image-generation", "claude", "llm"] classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", ] dependencies = [ "mcp>=1.0.0", "fal-client>=0.4.0", "starlette>=0.27.0", "uvicorn>=0.31.0", "sse-starlette>=1.6.0", ] [project.urls] homepage = "https://github.com/raveenb/fal-mcp-server" repository = "https://github.com/raveenb/fal-mcp-server" issues = "https://github.com/raveenb/fal-mcp-server/issues" [project.scripts] fal-mcp = "fal_mcp_server.server:main" fal-mcp-http = "fal_mcp_server.server_http:main" fal-mcp-dual = "fal_mcp_server.server_dual:main" [project.optional-dependencies] dev = [ "pytest>=7.0.0", "pytest-asyncio>=0.21.0", "pytest-cov>=4.1.0", "black>=23.0.0", "ruff>=0.1.0", "mypy>=1.5.0", "types-aiofiles>=23.0.0", "requests>=2.31.0", "pyyaml>=6.0.0", ] [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [tool.hatch.build.targets.wheel] packages = ["src/fal_mcp_server"] [tool.mypy] python_version = "3.10" warn_return_any = true warn_unused_configs = true disallow_untyped_defs = true disallow_any_unimported = false no_implicit_optional = true check_untyped_defs = true warn_redundant_casts = true warn_unused_ignores = true warn_no_return = true follow_imports = "normal" ignore_missing_imports = true [tool.black] line-length = 88 target-version = ['py310'] [tool.ruff] line-length = 88 target-version = "py310" [tool.ruff.lint] select = ["E", "F", "W", "I", "N", "B"] ignore = ["E501"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/raveenb/fal-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server