Skip to main content
Glama

MemOS-MCP

by qinshu1109
Apache 2.0
3
  • Linux
  • Apple
standalone_mcp_server.py12.7 kB
#!/usr/bin/env python3 """ 独立的MemOS MCP服务器 不依赖外部包,使用标准库实现 """ import json import sys import asyncio import os import hashlib import tempfile from pathlib import Path from typing import Any, Dict, List class StandaloneMCPServer: """独立的MCP服务器""" def __init__(self): # 简单的内存存储 self.memories = [] self.memory_counter = 0 # 初始化一些示例数据 self._init_sample_data() def _init_sample_data(self): """初始化示例数据""" sample_memories = [ {"id": 1, "content": "我正在使用MemOS进行智能记忆管理", "tags": ["MemOS", "项目"]}, {"id": 2, "content": "Claude可以通过MCP调用MemOS获取记忆上下文", "tags": ["Claude", "MCP", "技术"]}, {"id": 3, "content": "当前项目目标是实现AI记忆增强对话", "tags": ["项目目标", "AI"]}, {"id": 4, "content": "使用DeepSeek-V3作为LLM模型", "tags": ["技术栈", "LLM"]}, {"id": 5, "content": "SiliconFlow提供API服务", "tags": ["API", "服务商"]}, ] self.memories = sample_memories self.memory_counter = len(sample_memories) def _simple_similarity(self, query: str, content: str) -> float: """简单的相似度计算""" query_words = set(query.lower().split()) content_words = set(content.lower().split()) if not query_words or not content_words: return 0.0 intersection = query_words.intersection(content_words) union = query_words.union(content_words) return len(intersection) / len(union) if union else 0.0 def search_memories(self, query: str, limit: int = 5) -> List[Dict]: """搜索记忆""" if not query: return [] # 计算相似度并排序 scored_memories = [] for memory in self.memories: score = self._simple_similarity(query, memory["content"]) if score > 0: scored_memories.append({ **memory, "score": score }) # 按相似度排序 scored_memories.sort(key=lambda x: x["score"], reverse=True) return scored_memories[:limit] def add_memory(self, content: str, tags: List[str] = None) -> int: """添加记忆""" self.memory_counter += 1 memory = { "id": self.memory_counter, "content": content, "tags": tags or [] } self.memories.append(memory) return self.memory_counter def organize_context(self, query: str, memories: List[Dict]) -> str: """组织上下文(简化版LLM处理)""" if not memories: return f"没有找到与'{query}'相关的记忆。这可能是一个新的话题。" # 简单的上下文组织 context = f"基于{len(memories)}条相关记忆,为查询'{query}'提供以下上下文:\n\n" for i, memory in enumerate(memories, 1): context += f"{i}. {memory['content']}\n" context += f"\n这些记忆表明用户正在进行与{query}相关的工作或学习。" return context async def handle_request(self, request: Dict[str, Any]) -> Dict[str, Any]: """处理MCP请求""" try: method = request.get("method") params = request.get("params", {}) request_id = request.get("id") if method == "initialize": return { "jsonrpc": "2.0", "id": request_id, "result": { "protocolVersion": "2024-11-05", "capabilities": { "tools": {} }, "serverInfo": { "name": "memos-standalone", "version": "1.0.0" } } } elif method == "tools/list": return { "jsonrpc": "2.0", "id": request_id, "result": { "tools": [ { "name": "query_memos_context", "description": "查询MemOS记忆数据库并获取组织的上下文", "inputSchema": { "type": "object", "properties": { "query": { "type": "string", "description": "要查询的问题或关键词" }, "max_memories": { "type": "integer", "description": "最大返回记忆数量", "default": 5 } }, "required": ["query"] } }, { "name": "add_memos_memory", "description": "向MemOS添加新的记忆", "inputSchema": { "type": "object", "properties": { "content": { "type": "string", "description": "记忆内容" }, "tags": { "type": "array", "items": {"type": "string"}, "description": "记忆标签", "default": [] } }, "required": ["content"] } } ] } } elif method == "tools/call": tool_name = params.get("name") arguments = params.get("arguments", {}) if tool_name == "query_memos_context": return await self.query_memos_context(arguments, request_id) elif tool_name == "add_memos_memory": return await self.add_memos_memory(arguments, request_id) else: return { "jsonrpc": "2.0", "id": request_id, "error": { "code": -32601, "message": f"Tool not found: {tool_name}" } } else: return { "jsonrpc": "2.0", "id": request_id, "error": { "code": -32601, "message": f"Method not found: {method}" } } except Exception as e: return { "jsonrpc": "2.0", "id": request.get("id"), "error": { "code": -32603, "message": f"Internal error: {str(e)}" } } async def query_memos_context(self, args: Dict[str, Any], request_id: Any) -> Dict[str, Any]: """查询MemOS上下文""" try: query = args.get("query", "") max_memories = args.get("max_memories", 5) if not query: return { "jsonrpc": "2.0", "id": request_id, "result": { "content": [ { "type": "text", "text": "错误: 查询内容不能为空" } ] } } # 搜索相关记忆 memories = self.search_memories(query, max_memories) # 组织上下文 llm_context = self.organize_context(query, memories) # 格式化返回结果 context_text = f"""MemOS查询结果: 查询: {query} 找到 {len(memories)} 条相关记忆 组织的上下文: {llm_context} 相关记忆详情: """ for i, memory in enumerate(memories, 1): tags_str = ", ".join(memory['tags']) if memory['tags'] else "无标签" context_text += f"{i}. {memory['content']} (相关度: {memory['score']:.2f}, 标签: {tags_str})\n" return { "jsonrpc": "2.0", "id": request_id, "result": { "content": [ { "type": "text", "text": context_text } ] } } except Exception as e: return { "jsonrpc": "2.0", "id": request_id, "result": { "content": [ { "type": "text", "text": f"查询失败: {str(e)}" } ] } } async def add_memos_memory(self, args: Dict[str, Any], request_id: Any) -> Dict[str, Any]: """添加记忆到MemOS""" try: content = args.get("content", "") tags = args.get("tags", []) if not content: return { "jsonrpc": "2.0", "id": request_id, "result": { "content": [ { "type": "text", "text": "错误: 记忆内容不能为空" } ] } } # 添加记忆 memory_id = self.add_memory(content, tags) return { "jsonrpc": "2.0", "id": request_id, "result": { "content": [ { "type": "text", "text": f"成功添加记忆 #{memory_id}: {content}" } ] } } except Exception as e: return { "jsonrpc": "2.0", "id": request_id, "result": { "content": [ { "type": "text", "text": f"添加记忆失败: {str(e)}" } ] } } async def main(): """主函数""" server = StandaloneMCPServer() # 读取stdin的JSON-RPC请求 while True: try: line = await asyncio.get_event_loop().run_in_executor(None, sys.stdin.readline) if not line: break request = json.loads(line.strip()) response = await server.handle_request(request) # 输出响应 print(json.dumps(response), flush=True) except json.JSONDecodeError: continue except Exception as e: error_response = { "jsonrpc": "2.0", "error": { "code": -32603, "message": f"Server error: {str(e)}" } } print(json.dumps(error_response), flush=True) if __name__ == "__main__": asyncio.run(main())

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/qinshu1109/memos-MCP'

If you have feedback or need assistance with the MCP directory API, please join our Discord server