Skip to main content
Glama

Tavily Web Search MCP Server

by BQ31X
cal_langgraph_app.pyβ€’6.23 kB
#!/usr/bin/env python3 """ AI Makerspace HW13 - Cal.com LangGraph Application A standalone application that demonstrates integrating MCP servers with LangGraph. This creates a conversational agent that can interact with Cal.com via MCP tools. Usage: python cal_langgraph_app.py Requirements: - .env file with OPENAI_API_KEY and CALCOM_API_KEY - myserver.py MCP server in the same directory """ import asyncio import os from typing import Optional from dotenv import load_dotenv from langchain_mcp_adapters.client import MultiServerMCPClient from langgraph.prebuilt import create_react_agent from langchain_openai import ChatOpenAI from langchain_core.messages import HumanMessage class CalendarAgent: """A LangGraph agent that can interact with Cal.com via MCP tools.""" def __init__(self, model_name: str = "gpt-4o-mini"): self.model_name = model_name self.client: Optional[MultiServerMCPClient] = None self.agent = None self.tools = None async def initialize(self) -> bool: """Initialize the MCP client and LangGraph agent.""" try: print("πŸ”„ Initializing Calendar Agent...") # Check environment variables if not os.getenv("OPENAI_API_KEY"): print("❌ Missing OPENAI_API_KEY in environment") return False if not os.getenv("CALCOM_API_KEY"): print("❌ Missing CALCOM_API_KEY in environment") return False # Initialize MCP client print("πŸ“‘ Connecting to MCP server...") self.client = MultiServerMCPClient({ "cal": { "command": "uv", "args": ["--directory", os.getcwd(), "run", "myserver.py"], "transport": "stdio", } }) # Load tools from MCP server print("πŸ”§ Loading MCP tools...") self.tools = await self.client.get_tools() print(f"βœ… Loaded {len(self.tools)} tools:") for tool in self.tools: print(f" - {tool.name}: {tool.description}") # Create LangGraph agent print("πŸ€– Creating LangGraph agent...") llm = ChatOpenAI(model=self.model_name, temperature=0) self.agent = create_react_agent(llm, self.tools) print("βœ… Calendar Agent initialized successfully!\n") return True except Exception as e: print(f"❌ Failed to initialize agent: {e}") return False async def query(self, user_input: str) -> str: """Send a query to the agent and return the response.""" if not self.agent: return "❌ Agent not initialized. Call initialize() first." try: print(f"🧠 Processing: {user_input}") response = await self.agent.ainvoke({ "messages": [HumanMessage(content=user_input)] }) # Extract the final response final_message = response["messages"][-1] return final_message.content except Exception as e: return f"❌ Error processing query: {e}" async def interactive_session(self): """Run an interactive session with the agent.""" print("🎯 Starting interactive session. Type 'quit' to exit.\n") print("πŸ’‘ Try asking:") print(" - 'List my event types'") print(" - 'Show my upcoming bookings'") print(" - 'What availability do I have for event type 123 on 2025-01-15?'") print(" - 'Search the web for Cal.com pricing'\n") while True: try: user_input = input("You: ").strip() if user_input.lower() in ['quit', 'exit', 'bye']: print("πŸ‘‹ Goodbye!") break if not user_input: continue response = await self.query(user_input) print(f"Agent: {response}\n") except KeyboardInterrupt: print("\nπŸ‘‹ Goodbye!") break except Exception as e: print(f"❌ Error: {e}\n") async def demonstrate_features(agent: CalendarAgent): """Demonstrate key features of the agent.""" print("πŸŽͺ === DEMONSTRATION MODE ===\n") demos = [ "List my available event types", "Show me my upcoming bookings", "Search the web for 'Cal.com API documentation'", "What's 3d6 + 2d8?" # Test dice rolling tool ] for demo in demos: print(f"🎯 Demo Query: {demo}") response = await agent.query(demo) print(f"πŸ€– Response: {response}\n") print("-" * 60 + "\n") async def main(): """Main application entry point.""" print("πŸš€ Cal.com LangGraph Application") print("=" * 40) # Load environment variables load_dotenv(override=True) # Create and initialize agent agent = CalendarAgent() if not await agent.initialize(): print("❌ Failed to initialize. Please check your .env file and MCP server.") return # Ask user what they want to do print("Choose an option:") print("1. Interactive session") print("2. Run demonstrations") print("3. Single query") try: choice = input("\nEnter your choice (1-3): ").strip() if choice == "1": await agent.interactive_session() elif choice == "2": await demonstrate_features(agent) elif choice == "3": query = input("Enter your query: ").strip() if query: response = await agent.query(query) print(f"\nπŸ€– Response: {response}") else: print("❌ Invalid choice") except KeyboardInterrupt: print("\nπŸ‘‹ Goodbye!") if __name__ == "__main__": asyncio.run(main())

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/BQ31X/MCP-Session-Code'

If you have feedback or need assistance with the MCP directory API, please join our Discord server