Skip to main content
Glama

YouTube Transcript MCP Server

by suckerfish
pyproject.toml881 B
[build-system] requires = ["hatchling"] build-backend = "hatchling.build" [project] name = "yttranscript-mcp" version = "0.1.0" description = "MCP server for fetching YouTube transcripts" authors = [ {name = "User", email = "user@example.com"}, ] dependencies = [ "fastmcp>=0.9.0", "pydantic>=2.0.0", "uvicorn>=0.24.0", "yt-dlp", "requests>=2.31.0", ] requires-python = ">=3.11" readme = "README.md" license = {text = "MIT"} [project.optional-dependencies] dev = [ "pytest>=7.0.0", "pytest-asyncio>=0.21.0", "black>=23.0.0", "isort>=5.12.0", "mypy>=1.0.0", ] [tool.hatch.build.targets.wheel] packages = ["src"] [tool.black] line-length = 88 target-version = ['py311'] [tool.isort] profile = "black" line_length = 88 [tool.mypy] python_version = "3.11" warn_return_any = true warn_unused_configs = true disallow_untyped_defs = true

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/suckerfish/yttranscript_mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server