Skip to main content
Glama

Continuo Memory System

by GtOkAi
test_memory_server.py6.45 kB
""" Tests for Memory Server """ import shutil import tempfile import pytest from continuo.embedding_service import EmbeddingService from continuo.hierarchical_compression import HierarchicalCompressor from continuo.memory_server import MemoryServer @pytest.fixture def temp_db_path(): """Create temporary directory for tests""" temp_dir = tempfile.mkdtemp() yield temp_dir shutil.rmtree(temp_dir) @pytest.fixture def embedding_service(): """Create embedding service for tests""" return EmbeddingService(provider="local") @pytest.fixture def memory_server(embedding_service, temp_db_path): """Create memory server for tests""" return MemoryServer( embedding_service=embedding_service, db_path=temp_db_path, collection_name="test_collection", ) class TestMemoryServer: """Memory Server Tests""" def test_initialization(self, memory_server): """Test server initialization""" assert memory_server is not None assert memory_server._collection is not None assert memory_server._collection.count() == 0 def test_store_document(self, memory_server): """Test document storage""" doc_id = memory_server.store( text="Storage test", metadata={"type": "test"}, ) assert doc_id is not None assert memory_server._collection.count() == 1 def test_store_empty_text_raises_error(self, memory_server): """Test that empty text raises error""" with pytest.raises(ValueError): memory_server.store(text="") def test_retrieve_documents(self, memory_server): """Test document retrieval""" # Store documents memory_server.store("Python is a programming language", {"lang": "python"}) memory_server.store("JavaScript is used for web", {"lang": "js"}) memory_server.store("Rust is a systems language", {"lang": "rust"}) # Search results = memory_server.retrieve("programming language", top_k=2) assert len(results["documents"]) > 0 assert "Python" in results["documents"][0] or "Rust" in results["documents"][0] def test_hierarchical_levels(self, memory_server): """Test storage in hierarchical levels""" # Store at different levels memory_server.store("Chunk N0", level="N0") memory_server.store("Summary N1", level="N1") memory_server.store("Meta N2", level="N2") # Check stats stats = memory_server.get_stats() assert stats["total_documents"] == 3 def test_level_filtering(self, memory_server): """Test filtering by level""" memory_server.store("Chunk 1", level="N0") memory_server.store("Summary 1", level="N1") # Search only N0 results = memory_server.retrieve("Chunk", level_filter="N0") assert all(meta["level"] == "N0" for meta in results["metadatas"]) def test_access_count_increment(self, memory_server): """Test access counter increment""" doc_id = memory_server.store("Test document") # First search memory_server.retrieve("Test document", top_k=1) # Check increment (internal implementation) result = memory_server._collection.get(ids=[doc_id]) assert result["metadatas"][0]["access_count"] >= 1 def test_get_stats(self, memory_server): """Test statistics retrieval""" memory_server.store("Doc 1", level="N0") memory_server.store("Doc 2", level="N1") stats = memory_server.get_stats() assert "total_documents" in stats assert "by_level" in stats assert stats["total_documents"] == 2 class TestEmbeddingService: """Embedding Service Tests""" def test_local_initialization(self): """Test local initialization""" service = EmbeddingService(provider="local") assert service.provider == "local" assert service._model is not None def test_encode_single_text(self): """Test single text encoding""" service = EmbeddingService(provider="local") embeddings = service.encode("Test text") assert len(embeddings) == 1 assert len(embeddings[0]) > 0 # Vector dimension def test_encode_multiple_texts(self): """Test multiple texts encoding""" service = EmbeddingService(provider="local") texts = ["Text 1", "Text 2", "Text 3"] embeddings = service.encode(texts) assert len(embeddings) == 3 def test_get_dimension(self): """Test dimension retrieval""" service = EmbeddingService(provider="local") dim = service.get_dimension() assert dim > 0 assert isinstance(dim, int) class TestHierarchicalCompressor: """Hierarchical Compressor Tests""" def test_initialization(self): """Test initialization""" compressor = HierarchicalCompressor() assert compressor.max_working_set_size == 10 def test_create_micro_summary(self): """Test micro-summary creation""" compressor = HierarchicalCompressor() chunks = [ {"id": "1", "text": "Chunk 1 with information"}, {"id": "2", "text": "Chunk 2 with more data"}, {"id": "3", "text": "Chunk 3 finalizing"}, ] summary = compressor.create_micro_summary(chunks) assert summary["level"] == "N1" assert len(summary["source_chunks"]) == 3 assert "text" in summary def test_create_meta_summary(self): """Test meta-summary creation""" compressor = HierarchicalCompressor() micro_summaries = [ {"id": "n1_1", "text": "Summary 1"}, {"id": "n1_2", "text": "Summary 2"}, ] meta = compressor.create_meta_summary(micro_summaries) assert meta["level"] == "N2" assert len(meta["source_summaries"]) == 2 def test_select_working_set(self): """Test working set selection""" compressor = HierarchicalCompressor(max_working_set_size=2) items = [ {"id": "1", "text": "Item 1", "metadata": {"access_count": 10}}, {"id": "2", "text": "Item 2", "metadata": {"access_count": 5}}, {"id": "3", "text": "Item 3", "metadata": {"access_count": 1}}, ] working_set = compressor.select_working_set(items) assert len(working_set) <= 2

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/GtOkAi/continuo-memory-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server