We provide all the information about MCP servers via our MCP API.
curl -X GET 'https://glama.ai/api/mcp/v1/servers/livingstaccato/mcp-bbs'
If you have feedback or need assistance with the MCP directory API, please join our Discord server
__init__.py•1.16 KiB
"""Provider registry and factory."""
from bbsbot.llm.base import LLMProvider
from bbsbot.llm.config import LLMConfig
from bbsbot.llm.exceptions import LLMError
def get_provider(config: LLMConfig) -> LLMProvider:
"""Get provider instance based on configuration.
Args:
config: LLM configuration
Returns:
Initialized provider instance
Raises:
LLMError: If provider type is unsupported
"""
if config.provider == "ollama":
from bbsbot.llm.providers.ollama import OllamaProvider
return OllamaProvider(config.ollama)
elif config.provider == "openai":
from bbsbot.llm.providers.openai import OpenAIProvider
if config.openai is None:
raise LLMError("OpenAI config required but not provided")
return OpenAIProvider(config.openai)
elif config.provider == "gemini":
from bbsbot.llm.providers.gemini import GeminiProvider
if config.gemini is None:
raise LLMError("Gemini config required but not provided")
return GeminiProvider(config.gemini)
else:
raise LLMError(f"Unsupported provider: {config.provider}")
__all__ = ["get_provider"]