mcp_example.py•4.42 kB
"""
Complete example of configuring and invoking MCP
"""
import os
import json
import requests
from mcp_handler import MCPHandler
from tools import get_weather_tool, get_calculator_tool
# LLM API configuration (replace with your provider's details)
LLM_API_KEY = os.environ.get("LLM_API_KEY", "your_api_key_here")
LLM_API_URL = "https://api.llmprovider.com/v1/chat/completions" # Replace with your provider
def main():
# 1. Configure MCP and register tools
handler = configure_mcp()
# 2. Create a conversation with tool definitions
messages = create_conversation_with_mcp(handler)
# 3. Get an example query from the user
user_query = input("Enter your question: ")
messages.append({"role": "user", "content": user_query})
# 4. Send the conversation to LLM and get the response
llm_response = get_llm_response(messages)
print("\nLLM raw response:")
print(llm_response)
# 5. Process the response, executing any tool calls
processed_response = handler.process_response(llm_response)
print("\nProcessed response with tool results:")
print(processed_response)
# 6. Add to conversation history
messages.append({"role": "assistant", "content": processed_response})
# 7. Continue the conversation
while True:
user_query = input("\nEnter your next question (or 'exit' to quit): ")
if user_query.lower() == 'exit':
break
messages.append({"role": "user", "content": user_query})
llm_response = get_llm_response(messages)
processed_response = handler.process_response(llm_response)
print("\nProcessed response:")
print(processed_response)
messages.append({"role": "assistant", "content": processed_response})
def configure_mcp():
"""Configure MCP with available tools"""
handler = MCPHandler()
handler.register_tool(get_weather_tool())
handler.register_tool(get_calculator_tool())
return handler
def create_conversation_with_mcp(handler):
"""Create a conversation with MCP tool definitions"""
system_message = f"""You are an AI assistant with access to tools. Please help the user with their request.
{handler.get_tool_definitions()}
To use a tool, use the syntax: tool_name(parameter1="value1", parameter2="value2")
Examples:
- get_weather(location="San Francisco, CA", unit="fahrenheit")
- calculator(expression="2 * (3 + 4)")
Wait for tool execution results before continuing your response."""
return [{"role": "system", "content": system_message}]
def get_llm_response(messages):
"""Get response from LLM API"""
# In a real implementation, call the actual LLM API
# This is a mock implementation for demonstration
# Mock API call (replace with actual API call)
try:
# Real implementation would be:
# headers = {
# "Content-Type": "application/json",
# "Authorization": f"Bearer {LLM_API_KEY}"
# }
# payload = {
# "model": "model-name",
# "messages": messages,
# "max_tokens": 1000
# }
# response = requests.post(LLM_API_URL, json=payload, headers=headers)
# return response.json()["choices"][0]["message"]["content"]
# For demo purposes, generate a mock response with a tool call
last_message = messages[-1]["content"]
if "weather" in last_message.lower():
return """I'll check the weather for you.
get_weather(location="Boston, MA", unit="fahrenheit")
Is there anything else you'd like to know?"""
elif any(math_term in last_message.lower() for math_term in ["calculate", "math", "compute"]):
# Extract expression from message (simple approach for demo)
expression = "2 * (3 + 4)" # Default example
if "sqrt" in last_message.lower():
expression = "sqrt(16)"
return f"""I'll calculate that for you.
calculator(expression="{expression}")
Let me know if you need any other calculations."""
else:
return "I'm an AI assistant with tool capabilities. I can check the weather or perform calculations. What would you like to know?"
except Exception as e:
return f"Error calling LLM API: {str(e)}"
if __name__ == "__main__":
main()