example.py•1.48 kB
"""
Basic usage example for mcp_use.
This example demonstrates how to use the mcp_use library with MCPClient
to connect any LLM to MCP tools through a unified interface.
Special thanks to https://github.com/microsoft/playwright-mcp for the server.
"""
import asyncio
import os
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from mcp_use import MCPAgent, MCPClient
async def main():
"""Run the example using a configuration file."""
# Load environment variables
load_dotenv()
# Create MCPClient from config file
client = MCPClient(
config={
"mcpServers": {
"browser-use-mcp": {
"command": "browser-use-mcp",
"args": ["--model", "gpt-4o"],
"env": {
"OPENAI_API_KEY": os.getenv("OPENAI_API_KEY"),
"DISPLAY": ":0",
},
}
}
}
)
# Create LLM
llm = ChatOpenAI(model="gpt-4o")
# Create agent with the client
agent = MCPAgent(llm=llm, client=client, max_steps=30)
# Run the query
result = await agent.run(
"""
Navigate to https://github.com/mcp-use/mcp-use, give a star to the project and write
a summary of the project.
""",
max_steps=30,
)
print(f"\nResult: {result}")
if __name__ == "__main__":
# Run the appropriate example
asyncio.run(main())