Skip to main content
Glama

MemOS-MCP

by qinshu1109
factory.py3.05 kB
import importlib import os import types import sys from typing import Any, ClassVar from memos.configs.llm import LLMConfigFactory from memos.llms.base import BaseLLM # 全局缓存和配置 _CACHE = {} _MINIMAL = bool(os.getenv("MEMOS_MINIMAL_MODE")) # 后端注册表 - 字符串路径延迟导入 BACKEND_REGISTRY = { "openai": "memos.llms.openai:OpenAILLM", "ollama": "memos.llms.ollama:OllamaLLM", "huggingface": "memos.llms.hf:HFLLM", # 为了支持直接导入,也添加类名映射 "OllamaLLM": "memos.llms.ollama:OllamaLLM", "OpenAILLM": "memos.llms.openai:OpenAILLM", "HFLLM": "memos.llms.hf:HFLLM", } def __getattr__(name): """ 魔法钩子:让所有直接导入LLM类的代码都能安全工作 例如: from memos.llms.factory import OllamaLLM 会在第一次真正访问时才尝试导入,如果环境没装就返回安全Stub """ if name in _CACHE: return _CACHE[name] if name not in BACKEND_REGISTRY: raise AttributeError(f"module '{__name__}' has no attribute '{name}'") module_path = BACKEND_REGISTRY[name] module_name, class_name = module_path.rsplit(":", 1) try: mod = importlib.import_module(module_name) obj = getattr(mod, class_name) except ModuleNotFoundError: # 无重依赖 → 返回安全Stub class _Stub(types.SimpleNamespace): def __init__(self, *args, **kwargs): if not _MINIMAL: raise RuntimeError(f"{name} backend requires additional dependencies. " f"Install with: pip install 'memos[llm]'") # 在MINIMAL模式下静默创建Stub def __getattr__(self, item): raise RuntimeError(f"{name} backend not installed. " f"Install with: pip install 'memos[llm]'") obj = _Stub _CACHE[name] = obj return obj class LLMFactory(BaseLLM): """Factory class for creating LLM instances with delayed import.""" backend_to_class: ClassVar[dict[str, str]] = BACKEND_REGISTRY @classmethod def from_config(cls, config_factory: LLMConfigFactory) -> BaseLLM: backend = config_factory.backend if backend not in cls.backend_to_class: raise ValueError(f"Invalid backend: {backend}") # 延迟导入:只在实际需要时才导入具体的LLM类 module_path = cls.backend_to_class[backend] module_name, class_name = module_path.rsplit(":", 1) try: module = importlib.import_module(module_name) llm_class = getattr(module, class_name) return llm_class(config_factory.config) except (ImportError, AttributeError) as e: raise ValueError(f"Failed to import LLM backend '{backend}' from '{module_path}': {e}") # 向后兼容的直接访问会通过__getattr__自动处理 # 例如: from memos.llms.factory import OllamaLLM # 会自动调用__getattr__("OllamaLLM")并返回相应的类或Stub

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/qinshu1109/memos-MCP'

If you have feedback or need assistance with the MCP directory API, please join our Discord server