Skip to main content
Glama

LLM Tool-Calling Assistant

by o6-webwork
client-stdio.py1.12 kB
import asyncio import nest_asyncio from mcp import ClientSession, StdioServerParameters from mcp.client.stdio import stdio_client nest_asyncio.apply() # Needed to run interactive python async def main(): # Define server parameters server_params = StdioServerParameters( command="python", # The command to run your server args=["server.py"], # Arguments to the command ) # Connect to the server async with stdio_client(server_params) as (read_stream, write_stream): async with ClientSession(read_stream, write_stream) as session: # Initialize the connection await session.initialize() # List available tools tools_result = await session.list_tools() print("Available tools:") for tool in tools_result.tools: print(f" - {tool.name}: {tool.description}") # Call our calculator tool result = await session.call_tool("add", arguments={"a": 2, "b": 3}) print(f"2 + 3 = {result.content[0].text}") if __name__ == "__main__": asyncio.run(main())

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/o6-webwork/mcp-template'

If you have feedback or need assistance with the MCP directory API, please join our Discord server