#!/usr/bin/env python3
"""
Test FastMCP OpenAPI server with LangChain MCP adapters.
"""
import asyncio
import os
import subprocess
import time
from typing import Optional
from langchain_mcp_adapters.client import MultiServerMCPClient
from langchain_openai import ChatOpenAI
from langgraph.prebuilt import create_react_agent
async def start_fastmcp_server(port: int = 8080) -> Optional[subprocess.Popen]:
"""Start the FastMCP server."""
try:
process = subprocess.Popen([
"fastmcp-openapi",
"--spec", "examples/simple_api.json",
"--transport", "streamable-http",
"--port", str(port),
"--debug"
])
# Give server time to start
await asyncio.sleep(3)
if process.poll() is None:
print(f"✅ FastMCP server started on port {port}")
return process
else:
print("❌ Failed to start FastMCP server")
return None
except Exception as e:
print(f"❌ Error starting server: {e}")
return None
async def test_fastmcp_server():
"""Test the FastMCP server with LangChain."""
print("🚀 Testing FastMCP OpenAPI Server with LangChain")
# Check for OpenAI API key
if not os.getenv("OPENAI_API_KEY"):
print("❌ OPENAI_API_KEY environment variable is required")
print(" Set it with: export OPENAI_API_KEY='your-api-key'")
return
try:
# Connect to existing FastMCP server (should be running on port 8081)
print("🔌 Connecting to FastMCP Petstore server on port 8081...")
client = MultiServerMCPClient({
"petstore": {
"url": "http://127.0.0.1:8081/mcp",
"transport": "streamable_http",
}
})
# Get available tools
print("🔍 Getting available tools...")
tools = await client.get_tools()
print(f"✅ Found {len(tools)} tools:")
for tool in tools[:5]: # Show first 5 tools
print(f" - {tool.name}: {tool.description}")
if len(tools) > 5:
print(f" ... and {len(tools) - 5} more")
# Create OpenAI model
model = ChatOpenAI(model="gpt-4.1", temperature=0)
# Create agent
print("🤖 Creating LangChain agent...")
agent = create_react_agent(model, tools)
# Test queries for Petstore API
test_queries = [
"Find all pets with status 'available' and tell me how many there are",
"Get information about pet with ID 1"
]
for i, query in enumerate(test_queries, 1):
print(f"\n📝 Test {i}: {query}")
try:
response = await agent.ainvoke({"messages": [{"role": "user", "content": query}]})
if response and "messages" in response:
final_message = response["messages"][-1]
print("🤖 Agent response:")
print(final_message.content)
else:
print("❌ No response from agent")
except Exception as e:
print(f"❌ Error in test {i}: {e}")
# Interactive mode - allow user to ask custom questions
print("\n" + "="*60)
print("🎯 Interactive Mode - Ask your own questions!")
print(" Example queries:")
print(" • 'Add a new pet named Buddy with status available'")
print(" • 'Find all pets with status pending'")
print(" • 'Update pet with ID 123 to have status sold'")
print(" • 'Get details of pet with ID 456'")
print(" • 'List all available dog breeds'")
print("="*60)
while True:
try:
user_query = await asyncio.to_thread(input, "\n💬 Enter your query (or 'quit' to exit): ")
user_query = user_query.strip()
if user_query.lower() in ['quit', 'exit', 'q']:
print("👋 Goodbye!")
break
if not user_query:
print("Please enter a valid query.")
continue
print(f"\n🔄 Processing: {user_query}")
response = await agent.ainvoke({"messages": [{"role": "user", "content": user_query}]})
if response and "messages" in response:
final_message = response["messages"][-1]
print("🤖 Agent response:")
print(final_message.content)
else:
print("❌ No response from agent")
except KeyboardInterrupt:
print("\n👋 Goodbye!")
break
except Exception as e:
print(f"❌ Error processing query: {e}")
print("\n✅ Interactive session completed!")
except Exception as e:
print(f"❌ Test failed: {e}")
print(" Make sure FastMCP server is running on port 8081")
if __name__ == "__main__":
asyncio.run(test_fastmcp_server())