Skip to main content
Glama

Claude Memory MCP Server

by WhenMoon-afk
semantic.py4.98 kB
""" Semantic Domain for managing semantic memories. The Semantic Domain is responsible for: - Managing factual information and knowledge - Organizing categorical and conceptual information - Handling entity relationships and attributes - Knowledge consolidation and organization - Abstract concept representation """ from typing import Any, Dict, List from loguru import logger from memory_mcp.domains.persistence import PersistenceDomain class SemanticDomain: """ Manages semantic memories (facts, knowledge, entities). This domain handles factual information, knowledge, and entity-relationship structures. """ def __init__(self, config: Dict[str, Any], persistence_domain: PersistenceDomain) -> None: """ Initialize the semantic domain. Args: config: Configuration dictionary persistence_domain: Reference to the persistence domain """ self.config = config self.persistence_domain = persistence_domain async def initialize(self) -> None: """Initialize the semantic domain.""" logger.info("Initializing Semantic Domain") # Initialization logic will be implemented here async def process_memory(self, memory: Dict[str, Any]) -> Dict[str, Any]: """ Process a semantic memory. This includes extracting key information, generating embeddings, and enriching the memory with additional metadata. Args: memory: The memory to process Returns: Processed memory """ logger.debug(f"Processing semantic memory: {memory['id']}") # Extract text representation for embedding text_content = self._extract_text_content(memory) # Generate embedding embedding = await self.persistence_domain.generate_embedding(text_content) memory["embedding"] = embedding # Additional processing based on memory type if memory["type"] == "entity": memory = self._process_entity_memory(memory) elif memory["type"] == "fact": memory = self._process_fact_memory(memory) return memory def _extract_text_content(self, memory: Dict[str, Any]) -> str: """ Extract text content from a memory for embedding generation. Args: memory: The memory to extract text from Returns: Text representation of the memory """ if memory["type"] == "fact": # For fact memories, use the fact text if "fact" in memory["content"]: return memory["content"]["fact"] elif memory["type"] == "document": # For document memories, combine title and text title = memory["content"].get("title", "") text = memory["content"].get("text", "") return f"{title}\n{text}" elif memory["type"] == "entity": # For entity memories, combine name and attributes name = memory["content"].get("name", "") entity_type = memory["content"].get("entity_type", "") # Extract attributes as text attributes = memory["content"].get("attributes", {}) attr_text = "" if attributes and isinstance(attributes, dict): attr_text = "\n".join([f"{k}: {v}" for k, v in attributes.items()]) return f"{name} ({entity_type})\n{attr_text}" # Fallback: try to convert content to string try: return str(memory["content"]) except: return f"Memory {memory['id']} of type {memory['type']}" def _process_entity_memory(self, memory: Dict[str, Any]) -> Dict[str, Any]: """ Process an entity memory. Args: memory: The entity memory to process Returns: Processed memory """ # Entity-specific processing # This is a placeholder for future implementation return memory def _process_fact_memory(self, memory: Dict[str, Any]) -> Dict[str, Any]: """ Process a fact memory. Args: memory: The fact memory to process Returns: Processed memory """ # Fact-specific processing # This is a placeholder for future implementation return memory async def get_stats(self) -> Dict[str, Any]: """ Get statistics about the semantic domain. Returns: Semantic domain statistics """ return { "memory_types": { "fact": 0, "document": 0, "entity": 0 }, "status": "initialized" }

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/WhenMoon-afk/claude-memory-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server