Skip to main content
Glama

llm-context

by cyberchitta
pyproject.toml2.65 kB
[project] name = "llm-context" version = "0.5.2" description = "Share code with LLMs via Model Context Protocol or clipboard. Rule-based customization enables easy switching between different tasks (like code review and documentation). Code outlining support is included as a standard feature." authors = [ { name = "restlessronin", email = "88921269+restlessronin@users.noreply.github.com" }, ] readme = "README.md" requires-python = ">=3.11" license = "Apache-2.0" keywords = ["llm", "ai", "context", "code", "clipboard", "chat"] classifiers = [ "Development Status :: 4 - Beta", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Intended Audience :: Science/Research", "Topic :: Software Development :: Code Generators", "Topic :: Utilities", "Topic :: Communications :: Chat", "Topic :: Scientific/Engineering :: Artificial Intelligence", ] dependencies = [ "jinja2>=3.1.6, <4.0", "mcp>=1.15.0", "packaging>=24.2, <25.0", "pathspec>=0.12.1, <0.13.0", "pyperclip>=1.11.0, <2.0.0", "pyyaml>=6.0.3", "tree-sitter>=0.25.2", "tree-sitter-language-pack>=0.9.0", ] [project.urls] Repository = "https://github.com/cyberchitta/llm-context.py" "User Guide" = "https://github.com/cyberchitta/llm-context.py/blob/main/docs/user-guide.md" Changelog = "https://github.com/cyberchitta/llm-context.py/blob/main/CHANGELOG.md" [project.scripts] lc-context = "llm_context.cli:context" lc-changed = "llm_context.cli:changed_files" lc-init = "llm_context.cli:init_project" lc-rule-instructions = "llm_context.cli:rule_instructions" lc-mcp = "llm_context.mcp:run_server" lc-missing = "llm_context.cli:missing" lc-outlines = "llm_context.cli:outlines" lc-prompt = "llm_context.cli:prompt" lc-select = "llm_context.cli:select" lc-set-rule = "llm_context.cli:set_rule" lc-version = "llm_context.cli:version" [tool.pytest.ini_options] testpaths = ["tests"] pythonpath = ["src"] filterwarnings = ["ignore::FutureWarning"] [tool.mypy] python_version = "3.10" warn_return_any = true warn_unused_configs = true ignore_missing_imports = true [tool.ruff] line-length = 100 target-version = "py313" [tool.ruff.lint] select = ["E", "F", "I"] ignore = ["E203", "E266", "E501", "F403", "F401"] [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [dependency-groups] dev = [ "git-cliff>=2.6.1, <3.0", "isort>=6.0.1, <7.0", "mypy>=1.18.2, <2.0", "pytest>=8.3.5, <9.0", "types-pyyaml>=6.0.12.20241230", "ruff>=0.13.2, <1.0", "taplo>=0.9.3, <1.0", ] [tool.hatch.build] include = ["src/**"] [tool.hatch.build.targets.wheel] sources = ["src"]

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/cyberchitta/llm-context.py'

If you have feedback or need assistance with the MCP directory API, please join our Discord server