We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/akiani/mock-epic-mcp'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
"""Base LLM client interface."""
from abc import ABC, abstractmethod
class LLMClient(ABC):
"""Abstract base class for LLM clients."""
@abstractmethod
def generate(self, prompt: str) -> str:
"""Generate a response from the LLM.
Args:
prompt: The prompt to send to the LLM
Returns:
The generated text response
Raises:
Exception: If the API call fails
"""
pass