mcp_memory_server.py•5.7 kB
#!/usr/bin/env python3
"""
MCP Memory Server - Persistent memory server using FastMCP
Provides semantic search and storage capabilities via MCP protocol
"""
import logging
import sys
from pathlib import Path
from typing import Any, Optional
# Configure logging first
logging.basicConfig(level=logging.ERROR, format="%(levelname)s: %(message)s")
for lib in ["sentence_transformers", "chromadb", "transformers", "torch"]:
logging.getLogger(lib).setLevel(logging.ERROR)
# Adjust path for imports
if __name__ == "__main__":
src_path = Path(__file__).parent.parent.parent
sys.path.insert(0, str(src_path))
from mcp.server.fastmcp import FastMCP # noqa: E402
# Conditional imports
try:
from continuo.embedding_service import ( # type: ignore[assignment,no-redef]
EmbeddingService,
)
from continuo.hierarchical_compression import ( # type: ignore[assignment,no-redef]
HierarchicalCompressor,
)
from continuo.memory_server import MemoryServer # type: ignore[assignment,no-redef]
except ImportError:
from mcp.memory.embedding_service import ( # type: ignore[assignment,no-redef]
EmbeddingService,
)
from mcp.memory.hierarchical_compression import ( # type: ignore[assignment,no-redef]
HierarchicalCompressor,
)
from mcp.memory.memory_server import MemoryServer # type: ignore[assignment,no-redef]
# Initialize FastMCP
mcp = FastMCP("continuo-memory")
# Global variables for memory components
_memory: Optional[MemoryServer] = None
_embedding_service: Optional[EmbeddingService] = None
_compressor: Optional[HierarchicalCompressor] = None
_config: dict[str, Any] = {}
def _ensure_initialized():
"""Ensures memory is initialized (lazy init)"""
global _memory, _embedding_service, _compressor
if _memory is not None:
return
provider = _config.get("provider", "local")
db_path = _config.get("db_path", "./chroma_db")
api_key = _config.get("api_key")
_embedding_service = EmbeddingService(provider=provider, api_key=api_key)
_compressor = HierarchicalCompressor(max_working_set_size=10, decay_hours=168)
_memory = MemoryServer(
embedding_service=_embedding_service,
db_path=db_path,
hierarchical_compressor=_compressor,
)
@mcp.tool()
def search_memory(query: str, top_k: int = 5, level: Optional[str] = None) -> str:
"""
Semantic search in persistent memory.
Retrieves relevant knowledge from stored documents.
Args:
query: Search query
top_k: Number of results (default: 5)
level: Filter by level: N0 (chunks), N1 (micro-summaries), N2 (meta-summaries)
"""
_ensure_initialized()
if _memory is None:
return "Error: Memory not initialized"
results = _memory.retrieve(query=query, top_k=top_k, level_filter=level)
if not results["documents"]:
return "No results found in memory."
formatted = ["# Persistent Memory Results\n"]
for i, (doc, meta, dist) in enumerate(
zip(results["documents"], results["metadatas"], results["distances"]), 1
):
level_label = meta.get("level", "N0")
timestamp = meta.get("timestamp", "")
file_info = meta.get("file", "")
formatted.append(f"## [{i}] {level_label} | Similarity: {1-dist:.2f}")
if file_info:
formatted.append(f"**File:** `{file_info}`")
if timestamp:
formatted.append(f"**Date:** {timestamp[:10]}")
formatted.append(f"\n{doc}\n")
formatted.append("---\n")
return "\n".join(formatted)
@mcp.tool()
def store_memory(text: str, metadata: Optional[dict[str, Any]] = None, level: str = "N0") -> str:
"""
Store content in persistent memory.
Args:
text: Content to store
metadata: Additional metadata (file, author, tags, etc)
level: Hierarchical level (N0=chunk, N1=summary, N2=meta)
"""
_ensure_initialized()
if _memory is None:
return "Error: Memory not initialized"
doc_id = _memory.store(text=text, metadata=metadata or {}, level=level)
return f"✓ Stored successfully!\nID: {doc_id}\nLevel: {level}"
@mcp.tool()
def get_memory_stats() -> str:
"""
Get persistent memory statistics.
"""
_ensure_initialized()
if _memory is None:
return "Error: Memory not initialized"
stats = _memory.get_stats()
formatted = [
"# Persistent Memory Statistics\n",
f"**Total documents:** {stats['total_documents']}",
f"**Database:** `{stats['db_path']}`",
f"**Collection:** `{stats['collection']}`\n",
"## By Hierarchical Level:",
f"- N0 (Chunks): {stats['by_level']['N0_chunks']}",
f"- N1 (Micro-summaries): {stats['by_level']['N1_micro_summaries']}",
f"- N2 (Meta-summaries): {stats['by_level']['N2_meta_summaries']}",
]
return "\n".join(formatted)
def main():
"""Main entry point"""
import argparse
parser = argparse.ArgumentParser(description="MCP Memory Server")
parser.add_argument(
"--provider",
choices=["local", "openai"],
default="local",
help="Embeddings provider",
)
parser.add_argument("--db-path", default="./chroma_db", help="Path to ChromaDB")
parser.add_argument("--api-key", help="OpenAI API key (if provider=openai)")
args = parser.parse_args()
# Store configuration for lazy init
global _config
_config = {
"provider": args.provider,
"db_path": args.db_path,
"api_key": args.api_key,
}
# Run FastMCP server (memory will be initialized on first call)
mcp.run()
if __name__ == "__main__":
main()