Skip to main content
Glama
example_usage.py7.69 kB
#!/usr/bin/env python3 """ Example Usage Script for Graphiti MCP Server This script demonstrates how to use the Graphiti MCP Server tools programmatically. It shows practical examples of storing memories, retrieving them, creating relationships, and getting context. Note: This script requires the MCP server to be running. For direct Neo4j operations, you can modify this to use the GraphitiMCP class directly. """ import asyncio import json import os from typing import Dict, Any from dotenv import load_dotenv # Load environment variables load_dotenv() # Example: Direct usage of GraphitiMCP class # This demonstrates the underlying functionality async def example_direct_usage(): """Example using GraphitiMCP class directly (for testing without MCP protocol)""" # Import the GraphitiMCP class from graphiti_mcp_server import GraphitiMCP # Get configuration from environment neo4j_uri = os.getenv("NEO4J_URI") neo4j_user = os.getenv("NEO4J_USER", "neo4j") neo4j_password = os.getenv("NEO4J_PASSWORD") openrouter_api_key = os.getenv("OPENROUTER_API_KEY") or os.getenv("OPENAI_API_KEY") model_name = os.getenv("MODEL_NAME", "openai/gpt-4o-mini") use_openrouter = bool(os.getenv("OPENROUTER_API_KEY")) if not neo4j_uri or not neo4j_password or not openrouter_api_key: print("Error: Missing required environment variables") print("Please set NEO4J_URI, NEO4J_PASSWORD, and OPENROUTER_API_KEY (or OPENAI_API_KEY)") return print("=" * 60) print("Graphiti MCP Server - Example Usage") print("=" * 60) print() # Initialize the server print("Initializing Graphiti MCP Server...") try: server = GraphitiMCP( neo4j_uri=neo4j_uri, neo4j_user=neo4j_user, neo4j_password=neo4j_password, openrouter_api_key=openrouter_api_key, model_name=model_name, use_openrouter=use_openrouter ) print("[SUCCESS] Server initialized!\n") except Exception as e: print(f"[ERROR] Failed to initialize server: {e}\n") return # Example 1: Store a memory print("=" * 60) print("Example 1: Storing a Memory") print("=" * 60) memory_content = "I'm working on a machine learning project to predict customer churn using XGBoost. The project is scheduled for completion in Q2 2024." print(f"Storing memory: {memory_content[:80]}...") result1 = await server._store_memory({ "content": memory_content, "tags": ["project", "machine-learning", "churn-prediction"], "metadata": { "project_name": "Customer Churn Prediction", "status": "in-progress", "deadline": "2024-06-30" } }) print(f"Result: {json.dumps(result1, indent=2)}\n") if "error" in result1: print("[ERROR] Failed to store memory. Stopping examples.") return memory1_id = result1.get("id") # Example 2: Store another related memory print("=" * 60) print("Example 2: Storing a Related Memory") print("=" * 60) memory_content2 = "The churn prediction model uses features like transaction frequency, average order value, customer lifetime value, and support ticket history. Current accuracy: 87%." print(f"Storing memory: {memory_content2[:80]}...") result2 = await server._store_memory({ "content": memory_content2, "tags": ["project", "machine-learning", "technical"], "metadata": { "project_name": "Customer Churn Prediction", "model_accuracy": "87%", "algorithm": "XGBoost" } }) print(f"Result: {json.dumps(result2, indent=2)}\n") if "error" in result2: print("[WARNING] Failed to store second memory. Continuing with examples.") memory2_id = None else: memory2_id = result2.get("id") # Example 3: Create a relationship if memory1_id and memory2_id: print("=" * 60) print("Example 3: Creating a Relationship") print("=" * 60) print(f"Creating relationship between memory {memory1_id[:8]}... and {memory2_id[:8]}...") result3 = await server._create_relationship({ "source_id": memory1_id, "target_id": memory2_id, "relationship_type": "has_details", "properties": { "relationship_strength": "strong", "category": "technical" } }) print(f"Result: {json.dumps(result3, indent=2)}\n") # Example 4: Retrieve memories print("=" * 60) print("Example 4: Retrieving Memories") print("=" * 60) query = "What do you know about machine learning projects?" print(f"Query: {query}") result4 = await server._retrieve_memories({ "query": query, "limit": 5 }) print(f"Result: {json.dumps(result4, indent=2)}\n") # Example 5: Get synthesized context print("=" * 60) print("Example 5: Getting Synthesized Context") print("=" * 60) context_query = "Tell me about the customer churn prediction project" print(f"Query: {context_query}") result5 = await server._get_context({ "query": context_query, "max_memories": 10 }) print(f"Result: {json.dumps(result5, indent=2)}\n") # Example 6: Search graph with Cypher print("=" * 60) print("Example 6: Custom Cypher Query") print("=" * 60) cypher_query = "MATCH (m:Memory) WHERE 'project' IN m.tags RETURN m.id as id, m.content as content, m.tags as tags ORDER BY m.created_at DESC LIMIT 5" print(f"Cypher Query: {cypher_query}") result6 = await server._search_graph({ "cypher_query": cypher_query }) print(f"Result: {json.dumps(result6, indent=2)}\n") # Cleanup print("=" * 60) print("Examples completed!") print("=" * 60) print("\nNote: The memories stored in these examples remain in your Neo4j database.") print("You can query them later or delete them using Cypher queries if needed.") # Close the driver server.driver.close() def example_mcp_client_usage(): """ Example of how to use the MCP server as a client. Note: This requires the MCP server to be running and accessible. For SSE transport, the server should be running on http://localhost:8000/sse For stdio transport, you would use the MCP client library. """ print("=" * 60) print("MCP Client Usage Example") print("=" * 60) print() print("To use the MCP server as a client, you have two options:") print() print("1. Use with Cursor AI:") print(" - Start the server: python graphiti_mcp_server.py --transport sse --port 8000") print(" - Configure Cursor's mcp.json to point to http://localhost:8000/sse") print(" - Use natural language in Cursor to interact with the tools") print() print("2. Use with Claude Desktop:") print(" - Configure claude_desktop_config.json with stdio transport") print(" - Restart Claude Desktop") print(" - Use natural language in Claude to interact with the tools") print() print("3. Programmatic MCP Client (requires mcp client library):") print(" - Use the MCP Python client library to connect to the server") print(" - Call tools using the MCP protocol") print() async def main(): """Main function""" import sys if len(sys.argv) > 1 and sys.argv[1] == "--client": example_mcp_client_usage() else: await example_direct_usage() if __name__ == "__main__": asyncio.run(main())

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/apexneural-hansika/graphiti_mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server