We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/vrtornisiello/mcp-camara'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
from typing import Any
import httpx
from loguru import logger
from mcp_camara.models import Endpoint, Parameter
type APIPaths = dict[str, dict[str, dict]]
def load_openapi_spec(url: str) -> dict[str, Any]:
logger.info(f"Loading spec from {url}")
try:
response = httpx.get(url)
response.raise_for_status()
logger.success(f"Spec load successfully.")
return response.json()
except Exception:
logger.exception("Error loading spec:")
def get_endpoints(openapi_spec: dict[str, Any]) -> list[Endpoint]:
logger.info("Parsing endpoints...")
paths: APIPaths = openapi_spec.get("paths", {})
endpoints = []
for path, path_methods in paths.items():
for method, method_details in path_methods.items():
parameters: list[dict] = []
method_parameters: list[dict] = method_details.get("parameters", [])
for param in method_parameters:
if param.get("in") in {"query", "path"}:
parameters.append(Parameter(**param))
endpoint = Endpoint(
path=path,
method=method.upper(),
description=method_details.get("description") or method_details.get("summary"),
parameters=parameters
)
endpoints.append(endpoint)
logger.success("Endpoints parsed successfully.")
return endpoints