Skip to main content
Glama

Elasticsearch MCP Server

Apache 2.0
157
  • Apple
""" Client example copied from https://modelcontextprotocol.io/quickstart/client """ import asyncio from typing import Optional from contextlib import AsyncExitStack from mcp import ClientSession, StdioServerParameters from mcp.client.stdio import stdio_client from anthropic import Anthropic from config import get_logger, read_config logger = get_logger(__name__) class MCPClient: def __init__(self): self.session: Optional[ClientSession] = None self.exit_stack = AsyncExitStack() self.anthropic = Anthropic() self.config = read_config() async def connect_to_server(self, server_script_path: str): """Connect to an MCP server Args: server_script_path: Path to the server script (.py or .js) """ is_python = server_script_path.endswith('.py') is_js = server_script_path.endswith('.js') if not (is_python or is_js): raise ValueError("Server script must be a .py or .js file") command = "python" if is_python else "node" server_params = StdioServerParameters(command=command, args=[server_script_path], env=None) stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params)) self.stdio, self.write = stdio_transport self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write)) if self.session is not None: await self.session.initialize() response = await self.session.list_tools() tools = response.tools logger.info(f"\nConnected to server with tools: {', '.join(tool.name for tool in tools)}") async def process_query(self, query: str) -> str: """Process a query using Claude and available tools""" messages = [ { "role": "user", "content": query } ] response = await self.session.list_tools() available_tools = [{ "name": tool.name, "description": tool.description, "input_schema": tool.inputSchema } for tool in response.tools] # Initial Claude API call response = self.anthropic.messages.create( model=self.config.anthropic.model, max_tokens=self.config.anthropic.max_tokens_message, messages=messages, tools=available_tools ) # Process response and handle tool calls final_text = [] assistant_message_content = [] for content in response.content: if content.type == 'text': final_text.append(content.text) assistant_message_content.append(content) elif content.type == 'tool_use': tool_name = content.name tool_args = content.input # Execute tool call result = await self.session.call_tool(tool_name, tool_args) final_text.append(f"[Calling tool {tool_name} with args {tool_args}]") assistant_message_content.append(content) messages.append({ "role": "assistant", "content": assistant_message_content }) messages.append({ "role": "user", "content": [ { "type": "tool_result", "tool_use_id": content.id, "content": result.content } ] }) # Get next response from Claude response = self.anthropic.messages.create( model=self.config.anthropic.model, max_tokens=self.config.anthropic.max_tokens_message, messages=messages, tools=available_tools ) final_text.append(response.content[0].text) return "\n".join(final_text) async def chat_loop(self): """Run an interactive chat loop""" logger.info("\nMCP Client Started!") logger.info("Type your queries or 'quit' to exit.") while True: try: query = input("\nQuery: ").strip() if query.lower() == 'quit': break response = await self.process_query(query) logger.info("\n" + response) except Exception as e: logger.error(f"\nError: {str(e)}") async def cleanup(self): """Clean up resources""" await self.exit_stack.aclose() async def main(): # if len(sys.argv) < 2: # logger.exception("Usage: python client.py <path_to_server_script>") # return client = MCPClient() try: await client.connect_to_server(sys.argv[1]) logger.info(f"Connected to the server: {sys.argv[1]}.") await client.chat_loop() finally: await client.cleanup() logger.info(f"Disconnected from the server: {sys.argv[1]}.") if __name__ == "__main__": import sys asyncio.run(main())

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/cr7258/elasticsearch-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server