Skip to main content
Glama

Optuna MCP Server

Official
by optuna
pyproject.toml3.04 kB
[project] name = "optuna-mcp" dynamic = ["version"] description = "Optuna MCP Server operates, manages, and visualizes Optuna studies." readme = "README.md" classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Science/Research", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3 :: Only", "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: Mathematics", "Topic :: Scientific/Engineering :: Artificial Intelligence", "Topic :: Software Development", "Topic :: Software Development :: Libraries", "Topic :: Software Development :: Libraries :: Python Modules", ] requires-python = ">=3.12" dependencies = [ "kaleido==0.2.1", "mcp[cli]>=1.10.0", "optuna>=4.2.1", "optuna-dashboard>=0.18.0", "pandas>=2.2.3", "plotly>=6.0.1", "torch>=2.7.0", "bottle>=0.13.4", ] [project.scripts] optuna-mcp = "optuna_mcp.server:main" [dependency-groups] dev = [ "mypy>=1.15.0", "pytest>=8.3.5", "pre-commit>=4.2.0", "ruff>=0.11.11", "trio>=0.30.0", ] [build-system] requires = ["setuptools >= 61.1.0", "wheel"] build-backend = "setuptools.build_meta" [tool.setuptools.dynamic] version = {attr = "optuna_mcp.version.__version__"} [tool.setuptools.packages.find] include = ["optuna_mcp*"] [tool.setuptools.package-data] "optuna_mcp" = [ "py.typed", ] [[tool.uv.index]] name = "pytorch-cpu" url = "https://download.pytorch.org/whl/cpu" explicit = true [tool.uv.sources] torch = [ { index = "pytorch-cpu" }, ] [tool.ruff] line-length = 99 target-version = "py312" exclude = [ ".bzr", ".direnv", ".eggs", ".git", ".hg", ".mypy_cache", ".nox", ".pants.d", ".ruff_cache", ".svn", ".tox", ".venv", "__pypackages__", "_build", "buck-out", "build", "dist", "venv", ] [tool.ruff.lint] select = [ "E", # pycodestyle errors "W", # pycodestyle warnings "F", # pyflakes "I", # isort "N", # pep8-naming "UP", # pyupgrade "B", # flake8-bugbear "C4", # flake8-comprehensions "SIM", # flake8-simplify ] ignore = [ "E501", # line too long (handled by formatter) ] [tool.ruff.lint.isort] lines-after-imports = 2 force-single-line = true force-sort-within-sections = true order-by-type = false [tool.ruff.format] quote-style = "double" indent-style = "space" skip-magic-trailing-comma = false line-ending = "auto" [tool.mypy] # Options configure mypy's strict mode. warn_unused_configs = true disallow_untyped_calls = true disallow_untyped_defs = true disallow_incomplete_defs = true check_untyped_defs = true no_implicit_optional = true warn_redundant_casts = true strict_equality = true extra_checks = true no_implicit_reexport = true ignore_missing_imports = true exclude = [".venv", "venv", "build", "docs", "dist"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/optuna/optuna-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server