pyproject.toml•6.77 kB
[project]
requires-python = ">=3.10"
name = "data-commons-search"
description = "A Model Context Protocol (MCP) server to access data from various open-access data publishers, developed for the EOSC Data Commons project."
readme = "README.md"
license = { file = "LICENSE" }
authors = [
{ name = "Vincent Emonet", email = "vincent.emonet@gmail.com" },
]
maintainers = [
{ name = "Vincent Emonet", email = "vincent.emonet@gmail.com" },
]
keywords = [
"EOSC",
"Data Commons",
"Search",
"Datasets",
"Tools",
"MCP",
]
classifiers = [
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
]
dynamic = ["version"]
dependencies = [
"mcp >=1.19.0",
"pydantic >=2.12.0",
"pydantic-settings >=2.11.0",
"httpx >=0.28.1",
"fastembed >=0.7.3",
"opensearch-py >=3.0.0",
]
[project.optional-dependencies]
agent = [
"starlette >=0.48.0",
"uvicorn[standard] >=0.38.0",
"ag-ui-protocol >=0.1.9",
"langchain >=1.0.2",
"langchain-mcp-adapters >=0.1.10",
"langchain-openai >=1.0.1",
"langchain-groq >=1.0.0",
# "langchain-huggingface", # This will install torch and many heavy nvidia dependencies
]
gpu = [
"fastembed-gpu >=0.5.1",
]
[dependency-groups]
dev = [
"pytest >=8.4.2",
"pytest-asyncio >=1.2.0",
"pytest-cov >=7.0.0",
"mypy >=1.18.0",
"pre-commit >=4.3.0",
"mcp[cli]",
]
# # Required for onnxruntime-gpu on CUDA 12
# [tool.uv.pip]
# extra-index-url = ["https://aiinfra.pkgs.visualstudio.com/Publicsrc/_packaging/onnxruntime-cuda-12/pypi/simple/"]
[project.urls]
Homepage = "https://github.com/EOSC-Data-Commons/data-commons-search"
Documentation = "https://github.com/EOSC-Data-Commons/data-commons-search"
History = "https://github.com/EOSC-Data-Commons/data-commons-search/releases"
Tracker = "https://github.com/EOSC-Data-Commons/data-commons-search/issues"
Source = "https://github.com/EOSC-Data-Commons/data-commons-search"
[project.scripts]
data-commons-search = "data_commons_search.mcp_server:cli"
## TOOLS
[tool.hatch.version]
path = "src/data_commons_search/__init__.py"
[tool.hatch.build.targets.wheel]
packages = ["src/data_commons_search"]
[tool.hatch.metadata]
allow-direct-references = true
[tool.pytest.ini_options]
asyncio_mode = "auto"
asyncio_default_fixture_loop_scope="session"
addopts = [
"-vvv", # Verbose level 3
"--durations=10", # Show 10 slowest tests durations
"--cov=src",
"--color=yes",
"--cov-report=term-missing",
]
[tool.mypy]
files = ["src/"]
strict = false
implicit_reexport = true
follow_imports = "normal"
ignore_missing_imports = true
pretty = true
show_column_numbers = true
warn_no_return = true
warn_unused_ignores = true
warn_redundant_casts = true
disallow_untyped_defs = true
disallow_any_generics = true
disallow_untyped_calls = false # needed due to _eval() not being typed in rdflib
[tool.ruff]
target-version = "py310"
line-length = 120
exclude = [
"**/__init__.py",
]
[tool.ruff.lint]
select = [
"I", # isort
"N", # pep8-naming
"S", # bandit
"A", # flake8-builtins
"YTT", # flake8-2020
"B", # flake8-bugbear
"C", # flake8-comprehensions
"ICN", # flake8-import-conventions
"SIM", # flake8-simplify
"TID", # flake8-tidy-imports
"Q", # flake8-quotes
# "FBT", # flake8-boolean-trap
"F", # pyflakes
"UP", # pyupgrade
"E", # pycodestyle errors
"W", # pycodestyle warnings
"PLC", # pylint convention
"PLE", # pylint error
"PLW", # pylint warning
"RUF", # ruff specific
"T",
]
ignore = [
"E501", # line too long
"C901", # too complex
"T201", # do not use print
"B008", # do not perform function calls in argument defaults
]
[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["I", "F401"] # module imported but unused
# Tests can use magic values, assertions, and relative imports:
"tests/*" = ["PLR2004", "S101", "S105", "TID252"]
[tool.git-cliff.git]
# NOTE: Add an exclamation mark in your commit message prefix to indicate a BREAKING change https://www.conventionalcommits.org
# e.g. `feat!: changed things` or `feat(python)!: changed things`
commit_parsers = [
{ message = "^feat", group = "⛰️ Features" },
{ message = "^fix", group = "🐛 Bug Fixes" },
{ message = "^doc", group = "📚 Documentation" },
{ message = "^perf", group = "⚡ Performance" },
{ message = "^refactor", group = "🚜 Refactor" },
{ message = "^style", group = "🎨 Styling" },
{ message = "^test", group = "🧪 Testing" },
{ message = "^ci", group = "⚙️ Continuous Integration" },
{ message = "^chore\\(release\\): prepare for", skip = true },
{ message = "^chore\\(deps\\)", skip = true },
{ message = "^chore\\(pr\\)", skip = true },
{ message = "^chore\\(pull\\)", skip = true },
{ message = "^chore", group = "🛠️ Miscellaneous Tasks" },
{ body = ".*security", group = "🛡️ Security" },
{ message = "^revert", group = "◀️ Revert" },
]
conventional_commits = true
filter_unconventional = false
split_commits = false
# Protect breaking changes from being skipped due to matching a skipping commit_parser
protect_breaking_commits = false
filter_commits = false
tag_pattern = "v?[0-9].*"
topo_order = false
sort_commits = "oldest"
[tool.git-cliff.changelog]
header = """
# 🪵 Changelog\n
"""
# Template for the changelog: https://keats.github.io/tera/docs
body = """
{% if version %}\
{% if previous.version %}\
## [{{ version | trim_start_matches(pat="v") }}](<REPO>/compare/{{ previous.version }}..{{ version }}) - {{ timestamp | date(format="%Y-%m-%d") }}
{% else %}\
## [{{ version | trim_start_matches(pat="v") }}](<REPO>/tree/{{ version }}) - {{ timestamp | date(format="%Y-%m-%d") }}
{% endif %}\
{% else %}\
## [unreleased]
{% endif %}\
{% for group, commits in commits | group_by(attribute="group") %}
### {{ group | upper_first }}
{% for commit in commits %}
- {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message | upper_first | trim }} - ([{{ commit.id | truncate(length=7, end="") }}](<REPO>/commit/{{ commit.id }}))\
{% endfor %}
{% endfor %}\n
"""
trim = true
footer = """
<!-- generated by git-cliff -->
"""
postprocessors = [
{ pattern = '<REPO>', replace = "https://github.com/EOSC-Data-Commons/data-commons-search" },
]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"