Skip to main content
Glama

Agentic AI with MCP

by dev484p
client.py1.53 kB
import asyncio import json from langchain_groq import ChatGroq from mcp_use import MCPAgent, MCPClient async def run_memory_chat(): with open("./keys.json", "r") as file: data = json.load(file) GROQ_API = data["GROQ_API"] config_file = "tools.json" client = MCPClient.from_config_file(config_file) llm = ChatGroq(api_key=GROQ_API,model="qwen-qwq-32b") agent = MCPAgent( llm=llm, client=client, max_steps=15, memory_enabled=True, # Enable built-in conversation memory ) print("\n===== Interactive MCP Chat =====") print("Type 'exit' or 'quit' to end the conversation") print("Type 'clear' to clear conversation history") print("==================================\n") try: while True: user_input = input("\nYou: ") if user_input.lower() in ["exit", "quit"]: print("Ending conversation...") break if user_input.lower() == "clear": agent.clear_conversation_history() print("Conversation history cleared.") continue print("\nAssistant: ", end="", flush=True) try: response = await agent.run(user_input) print(response) except Exception as e: print(f"\nError: {e}") finally: if client and client.sessions: await client.close_all_sessions() if __name__ == "__main__": asyncio.run(run_memory_chat())

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/dev484p/AgenticAI_MCP'

If you have feedback or need assistance with the MCP directory API, please join our Discord server