Skip to main content
Glama

MCP Chat

by rayenamer
chat.py1.5 kB
from core.claude import Claude from mcp_client import MCPClient from core.tools import ToolManager from anthropic.types import MessageParam class Chat: def __init__(self, claude_service: Claude, clients: dict[str, MCPClient]): self.claude_service: Claude = claude_service self.clients: dict[str, MCPClient] = clients self.messages: list[MessageParam] = [] async def _process_query(self, query: str): self.messages.append({"role": "user", "content": query}) async def run( self, query: str, ) -> str: final_text_response = "" await self._process_query(query) while True: response = self.claude_service.chat( messages=self.messages, tools=await ToolManager.get_all_tools(self.clients), ) self.claude_service.add_assistant_message(self.messages, response) if response.stop_reason == "tool_use": print(self.claude_service.text_from_message(response)) tool_result_parts = await ToolManager.execute_tool_requests( self.clients, response ) self.claude_service.add_user_message( self.messages, tool_result_parts ) else: final_text_response = self.claude_service.text_from_message( response ) break return final_text_response

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/rayenamer/MCP_Intro'

If you have feedback or need assistance with the MCP directory API, please join our Discord server