Skip to main content
Glama

Translation MCP Server

by Barnettxxf
pyproject.toml1.16 kB
[project] name = "translation-mcp" version = "0.1.0" description = "A Model Context Protocol server providing text translation tools using OpenAI-compatible models" readme = "README.md" requires-python = ">=3.10" authors = [{ name = "Translation MCP Developer" }] maintainers = [{ name = "Translation MCP Developer" }] keywords = ["translation", "mcp", "llm", "openai", "langchain"] license = { text = "MIT" } classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Text Processing :: Linguistic", ] dependencies = [ "mcp>=0.1.0", "langchain>=0.1.0", "langchain-openai>=0.1.0", "openai>=1.0.0", ] [project.scripts] translation-mcp = "translation_mcp:main" [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [tool.uv] dev-dependencies = ["pyright>=1.1.389", "ruff>=0.7.3"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Barnettxxf/translation_mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server