Skip to main content
Glama
smoke_tools.py1.42 kB
import asyncio import os import sys # Ensure repo root on sys.path _repo_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) if _repo_root not in sys.path: sys.path.insert(0, _repo_root) # Import server and ensure providers are configured import server from server import handle_call_tool try: # Prefer .env if present try: from dotenv import load_dotenv # type: ignore load_dotenv(os.path.join(_repo_root, ".env")) except Exception: pass # Configure providers (lazy guard also runs inside handle_call_tool) if hasattr(server, "configure_providers"): server.configure_providers() except Exception as e: print("[WARN] Provider configuration error:", e) async def call_tool(name: str, args: dict): try: out = await handle_call_tool(name, args) texts = [getattr(c, "text", getattr(c, "content", "")) for c in out] return "\n".join([t for t in texts if isinstance(t, str)]) except Exception as e: return f"ERROR calling {name}: {e}" async def main(): print("=== Smoke: listmodels ===") print(await call_tool("listmodels", {})) print("\n=== Smoke: provider_capabilities ===") print(await call_tool("provider_capabilities", {"include_tools": True})) print("\n=== Smoke: version ===") print(await call_tool("version", {})) if __name__ == "__main__": asyncio.run(main())

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/Zazzles2908/EX_AI-mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server