"""
Chainlit Chatbot Application using Microsoft Agent Framework
Connects to Product Recommendation MCP Server for interactive product queries
"""
import os
import chainlit as cl
from dotenv import load_dotenv
from agent_framework import ChatAgent, MCPStreamableHTTPTool
from agent_framework.azure import AzureOpenAIChatClient
# Load environment variables
load_dotenv()
# MCP Server configuration
MCP_SERVER_URL = os.getenv("MCP_SERVER_URL", "http://localhost:8000/mcp")
# Global agent instance
agent = None
mcp_tool = None
async def initialize_agent():
"""Initialize the Microsoft Agent Framework ChatAgent with MCP tools"""
global agent, mcp_tool
# Initialize MCP Server Tool
mcp_tool = MCPStreamableHTTPTool(
name="Product Recommendation MCP",
url=MCP_SERVER_URL,
)
# Initialize Azure OpenAI Chat Client
chat_client = AzureOpenAIChatClient(
endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
deployment_name=os.getenv("AZURE_OPENAI_DEPLOYMENT_NAME"),
api_key=os.getenv("AZURE_OPENAI_API_KEY"),
api_version=os.getenv("AZURE_OPENAI_API_VERSION"),
)
# Create the agent with MCP tools
agent = ChatAgent(
chat_client=chat_client,
name="ProductRecommendationAgent",
instructions="""You are a helpful and friendly product recommendation assistant for an outdoor equipment store.
You have access to a comprehensive product catalog through MCP tools that allow you to:
- Search for products using natural language queries
- Filter products by category, brand, and price range
- Get detailed product descriptions and specifications
- List all available categories and brands
When helping customers:
1. Listen carefully to their needs and preferences
2. Use the appropriate tools to find relevant products
3. Provide clear, detailed product information including:
- Product name and brand
- Price
- Key features and benefits
- Any special characteristics (waterproof, lightweight, etc.)
4. Make personalized recommendations based on their requirements
5. Be conversational and helpful
6. If they ask about categories or brands, show them what's available
7. For price-sensitive queries, use the price filter tools effectively
Always be concise but informative. Format your responses in a friendly, easy-to-read manner.
""",
)
return agent
@cl.on_chat_start
async def on_chat_start():
"""Initialize the agent when chat starts"""
try:
# Show a welcome message while initializing
msg = cl.Message(content="π Initializing Product Recommendation Assistant...")
await msg.send()
# Initialize the agent
await initialize_agent()
# Update welcome message
welcome_message = """
# ποΈ Welcome to the Outdoor Equipment Store!
I'm your AI shopping assistant, powered by Microsoft Agent Framework. I can help you find the perfect outdoor gear for your adventures!
**What I can help you with:**
- π Find products by description (e.g., "waterproof tent for 4 people")
- π·οΈ Browse by category or brand
- π° Filter by price range
- π Get detailed product information
- π Recommend products based on your needs
**Try asking me:**
- "Show me affordable backpacks under $100"
- "What camping tents do you have?"
- "I need waterproof hiking boots"
- "What brands do you carry?"
How can I help you today?
"""
msg.content = welcome_message
await msg.update()
# Store agent in session
cl.user_session.set("agent", agent)
cl.user_session.set("mcp_tool", mcp_tool)
except Exception as e:
error_msg = f"β Error initializing assistant: {str(e)}\n\nPlease make sure the MCP server is running at {MCP_SERVER_URL}"
await cl.Message(content=error_msg).send()
raise
@cl.on_message
async def on_message(message: cl.Message):
"""Handle incoming chat messages"""
try:
# Retrieve agent from session
agent = cl.user_session.get("agent")
mcp_tool = cl.user_session.get("mcp_tool")
if not agent or not mcp_tool:
await cl.Message(content="β Agent not initialized. Please refresh the page.").send()
return
# Create a message to show we're processing
processing_msg = cl.Message(content="π€ Searching products...")
await processing_msg.send()
# Run the agent with the user's query
result = await agent.run(message.content, tools=mcp_tool)
# Extract response text
response_text = result.text if hasattr(result, 'text') else str(result)
# Update the processing message with the result
processing_msg.content = response_text
await processing_msg.update()
except Exception as e:
error_msg = f"β Error processing your request: {str(e)}\n\nPlease try again or rephrase your question."
await cl.Message(content=error_msg).send()
@cl.on_chat_end
async def on_chat_end():
"""Cleanup when chat ends"""
print("Chat session ended")
if __name__ == "__main__":
# This is used for testing - normally chainlit is run via CLI
from chainlit.cli import run_chainlit
run_chainlit(__file__)