Skip to main content
Glama

content-core

pyproject.toml1.69 kB
[project] name = "content-core" version = "1.4.2" description = "Extract what matters from any media source. Available as Python Library, macOS Service, CLI and MCP Server" readme = "README.md" homepage = "https://github.com/lfnovo/content-core" authors = [ { name = "LUIS NOVO", email = "lfnovo@gmail.com" } ] requires-python = ">=3.10" dependencies = [ "aiohttp>=3.11", "bs4>=0.0.2", "esperanto>=1.2.0", "jinja2>=3.1.6", "langdetect>=1.0.9", "loguru>=0.7.3", "openpyxl>=3.1.5", "pandas>=2.2.3", "pymupdf>=1.25.5", "python-docx>=1.1.2", "python-dotenv>=1.1.0", "python-pptx>=1.0.2", "youtube-transcript-api>=1.0.3", "langgraph>=0.3.29", "dicttoxml>=1.7.16", "validators>=0.34.0", "ai-prompter>=0.2.3", "moviepy>=2.1.2", "readability-lxml>=0.8.4.1", "firecrawl-py>=2.7.0", "pillow>=10.4.0", "asciidoc>=10.2.1", "pytubefix>=9.1.1", "fastmcp>=2.10.0", ] [project.optional-dependencies] docling = ["docling>=2.34.0"] [project.scripts] ccore = "content_core:ccore" cclean = "content_core:cclean" csum = "content_core:csum" content-core-mcp = "content_core.mcp.server:main" [tool.hatch.metadata] allow-direct-references = true [build-system] requires = ["hatchling", "pip"] build-backend = "hatchling.build" [tool.setuptools] package-dir = {"content_core" = "src/content_core"} [tool.uv.sources] [dependency-groups] dev = [ "ipykernel>=4.0.1", "ipywidgets>=4.0.0", "openai>=1.78.1", "pyperclip>=1.9.0", "pytest>=7.2.0", "pytest-asyncio>=0.21.0", ] [tool.pytest.ini_options] pythonpath = ["src"] asyncio_mode = "auto" asyncio_default_fixture_loop_scope = "function"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/lfnovo/content-core'

If you have feedback or need assistance with the MCP directory API, please join our Discord server