Skip to main content
Glama
test_orderbook_api.py2.13 kB
#!/usr/bin/env python3 """Test orderbook API directly""" import os import sys from pathlib import Path import httpx import json sys.path.insert(0, str(Path(__file__).parent / "src")) from mcp_opinion.config import OpinionConfig async def test_orderbook_api(): """Test orderbook API""" config = OpinionConfig.from_env() # Get markets first to get a token ID async with httpx.AsyncClient(timeout=60) as client: headers = { "apikey": config.api_key, "Content-Type": "application/json" } # Get markets url = f"{config.api_host}/openapi/market" response = await client.get(url, headers=headers) data = response.json() if data.get('errno') == 0 and data.get('result'): markets = data['result'].get('list', []) if markets: market = markets[0] yes_token_id = market.get('yesTokenId') print(f"Market: {market.get('marketTitle')}") print(f"YES Token ID: {yes_token_id}") # Try orderbook with tokenId parameter print(f"\nTrying with 'tokenId' parameter...") url = f"{config.api_host}/openapi/token/orderbook" params = {"tokenId": yes_token_id} response = await client.get(url, headers=headers, params=params) data = response.json() print(f"errno: {data.get('errno')}") print(f"errmsg: {data.get('errmsg')}") if data.get('errno') != 0: # Try with token_id parameter print(f"\nTrying with 'token_id' parameter...") params = {"token_id": yes_token_id} response = await client.get(url, headers=headers, params=params) data = response.json() print(f"errno: {data.get('errno')}") print(f"errmsg: {data.get('errmsg')}") if __name__ == "__main__": import asyncio asyncio.run(test_orderbook_api())

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/solenyaresearch0000/opinion-MCP'

If you have feedback or need assistance with the MCP directory API, please join our Discord server