Skip to main content
Glama

Wireshark MCP

pyproject.toml1.02 kB
[build-system] requires = ["setuptools>=42", "wheel"] build-backend = "setuptools.build_meta" [project] name = "wireshark-mcp" version = "0.1.0" description = "Wireshark Model Context Protocol for AI assistants like Claude" readme = "README.md" authors = [ {name = "Sarthak Siddha", email = "your.email@example.com"} ] license = {text = "MIT"} classifiers = [ "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ] requires-python = ">=3.8" dependencies = [ "pyshark>=0.6.0", "scapy>=2.5.0", "pydantic>=2.0.0", "rich>=12.0.0", "fastmcp>=0.4.0", ] [project.optional-dependencies] dev = [ "pytest>=7.0.0", "black>=22.0.0", "isort>=5.12.0", "flake8>=6.0.0", ] [project.urls] "Homepage" = "https://github.com/sarthaksiddha/wireshark-mcp" "Bug Tracker" = "https://github.com/sarthaksiddha/wireshark-mcp/issues" [tool.black] line-length = 88 target-version = ["py38"] [tool.isort] profile = "black"

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/sarthaksiddha/Wireshark-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server