Skip to main content
Glama
cbcoutinho

Nextcloud MCP Server

by cbcoutinho
test_deck_vector_search.py8.74 kB
"""Integration tests for Deck card vector search. These tests validate that Deck cards are properly indexed and searchable via semantic search. """ import pytest pytestmark = [pytest.mark.integration, pytest.mark.smoke] async def test_deck_card_semantic_search(nc_mcp_client, nc_client, mocker): """Test that Deck cards can be indexed and searched via semantic search. This test: 1. Creates a Deck board with a card 2. Manually triggers indexing (simulates vector sync) 3. Performs semantic search filtering by deck_card doc_type 4. Verifies the card is found in results """ # Skip if vector sync is not enabled settings_response = await nc_mcp_client.call_tool("nc_get_vector_sync_status", {}) if settings_response.isError: pytest.skip("Vector sync not enabled") # Create a test board board_title = "Test Board for Vector Search" board = await nc_client.deck.create_board(title=board_title, color="ff0000") try: # Create a stack for the board stack = await nc_client.deck.create_stack( board_id=board.id, title="Test Stack", order=0 ) # Create a test card with searchable content card_title = "Machine Learning Project Plan" card_description = """ # ML Project Outline ## Phase 1: Data Collection - Gather training data from multiple sources - Clean and preprocess the dataset ## Phase 2: Model Training - Experiment with different neural network architectures - Use gradient descent optimization ## Phase 3: Deployment - Deploy model to production environment - Monitor performance metrics """ card = await nc_client.deck.create_card( board_id=board.id, stack_id=stack.id, title=card_title, description=card_description, ) # Note: In a real integration test with vector sync enabled, # we would wait for the background scanner to index the card. # For now, we'll test the scanning function directly if needed. # TODO: Once vector sync is running in test environment, # add actual semantic search test here # For now, just verify the card was created successfully assert card.id is not None assert card.title == card_title assert card.description == card_description # Test semantic search with deck_card filter # Note: This will only work if vector sync is actually running # and the card has been indexed try: search_result = await nc_mcp_client.call_tool( "nc_semantic_search", { "query": "machine learning neural networks", "doc_types": ["deck_card"], "limit": 10, }, ) # If vector sync is working, we should find the card if not search_result.isError: data = search_result.structuredContent results = data.get("results", []) # Check if our card is in the results found_card = any( r.get("doc_type") == "deck_card" and r.get("title") == card_title for r in results ) # Log result for debugging if found_card: print("✓ Successfully found Deck card in vector search") else: print( "⚠ Deck card not found in search (may need time for indexing)" ) except Exception as e: # If search fails, it might be because indexing hasn't happened yet print(f"⚠ Semantic search failed (indexing may not be complete): {e}") finally: # Cleanup: delete the board try: await nc_client.deck.delete_board(board.id) except Exception as e: print(f"Warning: Failed to cleanup test board: {e}") async def test_deck_card_appears_in_cross_app_search(nc_mcp_client, nc_client): """Test that Deck cards appear in cross-app semantic search (no doc_type filter). This verifies that when searching without specifying doc_types, Deck cards are included in the results alongside notes, files, etc. """ # Skip if vector sync is not enabled settings_response = await nc_mcp_client.call_tool("nc_get_vector_sync_status", {}) if settings_response.isError: pytest.skip("Vector sync not enabled") # Create a test board with a distinctive card board_title = "Cross-App Search Test Board" board = await nc_client.deck.create_board(title=board_title, color="00ff00") try: # Create a stack for the board stack = await nc_client.deck.create_stack( board_id=board.id, title="Test Stack", order=0 ) # Use a very distinctive term to make it easy to find unique_term = "xylophone_banana_unicorn_test" _card = await nc_client.deck.create_card( board_id=board.id, stack_id=stack.id, title=f"Test Card with {unique_term}", description=f"This card contains the unique search term: {unique_term}", ) # Test cross-app search (no doc_type filter) try: search_result = await nc_mcp_client.call_tool( "nc_semantic_search", { "query": unique_term, "limit": 20, }, ) if not search_result.isError: data = search_result.structuredContent results = data.get("results", []) # Check if deck_card appears in cross-app results deck_cards_found = [ r for r in results if r.get("doc_type") == "deck_card" ] if deck_cards_found: print( f"✓ Found {len(deck_cards_found)} Deck card(s) in cross-app search" ) else: print( "⚠ No Deck cards in cross-app search (may need time for indexing)" ) except Exception as e: print(f"⚠ Cross-app search failed: {e}") finally: # Cleanup try: await nc_client.deck.delete_board(board.id) except Exception as e: print(f"Warning: Failed to cleanup test board: {e}") async def test_deck_card_chunk_context(nc_client): """Test that Deck card chunk context can be fetched for visualization. This test validates that the vector viz UI can display Deck card previews by fetching the chunk context via the context expansion module. """ from nextcloud_mcp_server.search.context import get_chunk_with_context # Create board, stack, and card board = await nc_client.deck.create_board(title="Test Board", color="ff0000") try: stack = await nc_client.deck.create_stack( board_id=board.id, title="Test Stack", order=0 ) card_title = "Test Card for Context Expansion" card_description = "This is a test description that should be fetched by the context expansion module when displaying chunk previews in the vector visualization UI." card = await nc_client.deck.create_card( board_id=board.id, stack_id=stack.id, title=card_title, description=card_description, ) # Fetch chunk context (simulates viz UI request) # The chunk spans the title, so start=0 and end=len(card_title) context = await get_chunk_with_context( nc_client=nc_client, user_id=nc_client.username, doc_id=card.id, doc_type="deck_card", chunk_start=0, chunk_end=len(card_title), context_chars=100, ) # Verify context was fetched successfully assert context is not None, "Chunk context should not be None" assert card_title in context.chunk_text, ( f"Card title '{card_title}' should be in chunk_text" ) # Verify context includes description assert card_description[:50] in context.after_context, ( "Card description should be in after_context" ) print(f"✓ Successfully fetched chunk context for Deck card {card.id}") finally: # Cleanup try: await nc_client.deck.delete_board(board.id) except Exception as e: print(f"Warning: Failed to cleanup test board: {e}")

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/cbcoutinho/nextcloud-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server